BLKSerene/Wordless

View on GitHub

Showing 206 of 206 total issues

Consider simplifying this complex logical expression.
Open

    if platform.system() == 'Windows':
        assert is_windows and not is_macos and not is_linux
    elif platform.system() == 'Darwin':
        assert not is_windows and is_macos and not is_linux
    elif platform.system() == 'Linux':
Severity: Major
Found in tests/tests_utils/test_misc.py - About 1 hr to fix

    Consider simplifying this complex logical expression.
    Open

                if (
                    self.tokenized
                    and (css_para and css_sentence and css_word)
                    and (soup.select_one(css_para) and soup.select_one(css_sentence) and soup.select_one(css_word))
                ):
    Severity: Major
    Found in wordless/wl_nlp/wl_texts.py - About 1 hr to fix

      Consider simplifying this complex logical expression.
      Open

                      if (
                          token[0] == '།'
                          and text
                          and text[-1] in ['།', 'ཀ', 'ག']
                      ):
      Severity: Major
      Found in wordless/wl_nlp/wl_word_detokenization.py - About 1 hr to fix

        Function add_header_hor has 8 arguments (exceeds 4 allowed). Consider refactoring.
        Open

            def add_header_hor(
        Severity: Major
        Found in wordless/wl_widgets/wl_tables.py - About 1 hr to fix

          Function yatess_correction has 8 arguments (exceeds 4 allowed). Consider refactoring.
          Open

          def yatess_correction(o11s, o12s, o21s, o22s, e11s, e12s, e21s, e22s):
          Severity: Major
          Found in wordless/wl_measures/wl_measures_statistical_significance.py - About 1 hr to fix

            Consider simplifying this complex logical expression.
            Open

                                        if (
                                            (
                                                (
                                                    not settings['search_settings']['match_dependency_relations']
                                                    and (token in search_terms or head in search_terms)
            Severity: Major
            Found in wordless/wl_dependency_parser.py - About 1 hr to fix

              Function __init__ has 7 arguments (exceeds 4 allowed). Consider refactoring.
              Open

                  def __init__(
              Severity: Major
              Found in wordless/wl_profiler.py - About 50 mins to fix

                Function ins_headers_vert has 7 arguments (exceeds 4 allowed). Consider refactoring.
                Open

                    def ins_headers_vert(
                Severity: Major
                Found in wordless/wl_widgets/wl_tables.py - About 50 mins to fix

                  Function wl_spin_boxes_min_max_no_limit has 7 arguments (exceeds 4 allowed). Consider refactoring.
                  Open

                  def wl_spin_boxes_min_max_no_limit(
                  Severity: Major
                  Found in wordless/wl_widgets/wl_boxes.py - About 50 mins to fix

                    Function ins_header_vert has 7 arguments (exceeds 4 allowed). Consider refactoring.
                    Open

                        def ins_header_vert(
                    Severity: Major
                    Found in wordless/wl_widgets/wl_tables.py - About 50 mins to fix

                      Function widgets_filter has 7 arguments (exceeds 4 allowed). Consider refactoring.
                      Open

                      def widgets_filter(parent, label, val_min, val_max, settings, filter_name, double = False):
                      Severity: Major
                      Found in wordless/wl_results/wl_results_filter.py - About 50 mins to fix

                        Function __init__ has 7 arguments (exceeds 4 allowed). Consider refactoring.
                        Open

                            def __init__(self, main, title, width = 0, height = 0, resizable = True, help_info = '', err_msg = ''):
                        Severity: Major
                        Found in wordless/wl_dialogs/wl_dialogs_errs.py - About 50 mins to fix

                          Function __init__ has 7 arguments (exceeds 4 allowed). Consider refactoring.
                          Open

                              def __init__(self, main, title, width = 0, height = 0, resizable = True, icon = True, no_buttons = False):
                          Severity: Major
                          Found in wordless/wl_dialogs/wl_dialogs.py - About 50 mins to fix

                            Function wl_test_lemmatize_models has 6 arguments (exceeds 4 allowed). Consider refactoring.
                            Open

                            def wl_test_lemmatize_models(lang, lemmatizer, test_sentence, tokens, results, lang_exceptions = None):
                            Severity: Minor
                            Found in tests/tests_nlp/test_lemmatization.py - About 45 mins to fix

                              Avoid deeply nested control flow statements.
                              Open

                                                          for k, ngram in enumerate(wl_nlp_utils.ngrams(tokens, len_search_term)):
                                                              if ngram == search_term:
                                                                  points.append([x_start + k / text.num_tokens * len_tokens_total, y_start - j])
                                                                  # Total
                                                                  points.append([x_start_total + k, 0])
                              Severity: Major
                              Found in wordless/wl_concordancer.py - About 45 mins to fix

                                Avoid deeply nested control flow statements.
                                Open

                                                    for collocate in range(10):
                                                        collocate = wl_texts.Wl_Token(str(collocate))
                                                        stat_files_items[(node, collocate)] = [
                                                            random.uniform(0, val_max),
                                                            random.uniform(0, val_max),
                                Severity: Major
                                Found in tests/tests_figs/test_figs_stats.py - About 45 mins to fix

                                  Avoid deeply nested control flow statements.
                                  Open

                                                              if settings_limit_searching == _tr('Wl_Worker_Colligation_Extractor', 'Within sentence segments'):
                                                                  offsets_unit = offsets_sentence_segs
                                                                  len_unit = len_sentence_segs
                                                              elif settings_limit_searching == _tr('Wl_Worker_Colligation_Extractor', 'Within sentences'):
                                                                  offsets_unit = offsets_sentences
                                  Severity: Major
                                  Found in wordless/wl_colligation_extractor.py - About 45 mins to fix

                                    Avoid deeply nested control flow statements.
                                    Open

                                                        for sentence_seg in sentence:
                                                            for i, token in enumerate(sentence_seg):
                                                                if token.isupper():
                                                                    sentence_seg[i] = wl_texts.Wl_Token('')
                                            # Title Case
                                    Severity: Major
                                    Found in wordless/wl_nlp/wl_token_processing.py - About 45 mins to fix

                                      Avoid deeply nested control flow statements.
                                      Open

                                                          for collocate in range(10):
                                                              collocate = wl_texts.Wl_Token(str(collocate))
                                                              freq_1, freq_2 = random.sample(range(10000), 2)
                                      
                                                              freq_files_items[(node, collocate)] = [
                                      Severity: Major
                                      Found in tests/tests_figs/test_figs_freqs.py - About 45 mins to fix

                                        Avoid deeply nested control flow statements.
                                        Open

                                                            if i == 0 and j == 0 and k == 0:
                                                                tokens = []
                                        
                                                                for l, token in enumerate(sentence_seg):
                                                                    # Do not remove the first token and set it to an empty token instead if it is a punctuation mark
                                        Severity: Major
                                        Found in wordless/wl_nlp/wl_token_processing.py - About 45 mins to fix
                                          Severity
                                          Category
                                          Status
                                          Source
                                          Language