BLKSerene/Wordless

View on GitHub

Showing 204 of 204 total issues

Avoid deeply nested control flow statements.
Open

                        if line and line not in items_cur:
                            items_to_imp.append(line)

Severity: Major
Found in wordless/wl_widgets/wl_lists.py - About 45 mins to fix

    Avoid deeply nested control flow statements.
    Open

                        for i, token in enumerate(sentence_seg):
                            if wl_checks_tokens.is_num(token):
                                sentence_seg[i] = ''
    
        # Filter stop words
    Severity: Major
    Found in wordless/wl_nlp/wl_token_processing.py - About 45 mins to fix

      Avoid deeply nested control flow statements.
      Open

                              if wl_matching.split_tag_embedded(opening_tag_text)[1] == '*':
                                  opening_tag_text = opening_tag_text.replace('*', self.tr('TAG'))
      
      
      Severity: Major
      Found in wordless/wl_settings/wl_settings_files.py - About 45 mins to fix

        Avoid deeply nested control flow statements.
        Open

                                    for j, collocate in enumerate(reversed(tags_left)):
                                        if wl_matching.check_context(
                                            i, tokens,
                                            context_settings = settings['search_settings']['context_settings'],
                                            search_terms_incl = search_terms_incl,
        Severity: Major
        Found in wordless/wl_colligation_extractor.py - About 45 mins to fix

          Avoid deeply nested control flow statements.
          Open

                                      if ngram == search_term:
                                          colligations_freqs_file_filtered[(node, collocate)] = freqs
          
          
          Severity: Major
          Found in wordless/wl_colligation_extractor.py - About 45 mins to fix

            Avoid deeply nested control flow statements.
            Open

                                        for k, ngram in enumerate(wl_nlp_utils.ngrams(text.get_tokens_flat(), len_search_term)):
                                            if ngram == search_term:
                                                points.append([x_start + k, i])
            
            
            Severity: Major
            Found in wordless/wl_concordancer.py - About 45 mins to fix

              Avoid deeply nested control flow statements.
              Open

                                  for collocate in range(10):
                                      collocate = wl_texts.Wl_Token(str(collocate))
                                      freq_1, freq_2 = random.sample(range(10000), 2)
              
                                      freq_files_items[(node, collocate)] = [
              Severity: Major
              Found in tests/tests_figs/test_figs_freqs.py - About 45 mins to fix

                Avoid deeply nested control flow statements.
                Open

                                        if self.settings_tags == 'body_tag_settings' and tag_name == '*':
                                            opening_tag_text = opening_tag_text.replace('*', _tr('wl_settings_files', 'TAG'))
                                            closing_tag_text = self.model().item(row, 3).text().replace('*', _tr('wl_settings_files', 'TAG'))
                                            preview.setText(opening_tag_text + _tr('wl_settings_files', 'token') + closing_tag_text)
                                        else:
                Severity: Major
                Found in wordless/wl_settings/wl_settings_files.py - About 45 mins to fix

                  Avoid deeply nested control flow statements.
                  Open

                                              if settings_limit_searching == _tr('wl_colligation_extractor', 'None'):
                                                  tags_right = text.tags[i + ngram_size + window_left - 1 : i + ngram_size + window_right]
                                              else:
                                                  # Span positions (Right)
                                                  for position in range(i + ngram_size + window_left - 1, i + ngram_size + window_right):
                  Severity: Major
                  Found in wordless/wl_colligation_extractor.py - About 45 mins to fix

                    Avoid deeply nested control flow statements.
                    Open

                                            if (para := para[tag_last_end:]):
                                                tags_tokens = self.add_tags_splitting(para, tags_tokens)
                    
                                # Add empty tags for untagged files
                                if not self.tagged:
                    Severity: Major
                    Found in wordless/wl_nlp/wl_texts.py - About 45 mins to fix

                      Avoid deeply nested control flow statements.
                      Open

                                          for doc in nlp.pipe(docs):
                                              for token in doc:
                                                  texts_tagged.append(token.text)
                      
                                                  if tagset in ['default', 'raw']:
                      Severity: Major
                      Found in wordless/wl_nlp/wl_pos_tagging.py - About 45 mins to fix

                        Avoid deeply nested control flow statements.
                        Open

                                            for item in range(100):
                                                item = wl_texts.Wl_Token(str(item))
                                                freq_1, freq_2 = random.sample(range(100), 2)
                        
                                                freq_files_items[item] = [
                        Severity: Major
                        Found in tests/tests_figs/test_figs_freqs.py - About 45 mins to fix

                          Avoid deeply nested control flow statements.
                          Open

                                              if token_properties:
                                                  i_tag_start += len(doc)
                          
                          
                          Severity: Major
                          Found in wordless/wl_nlp/wl_dependency_parsing.py - About 45 mins to fix

                            Avoid deeply nested control flow statements.
                            Open

                                                    if not file_name:
                                                        wl_msg_boxes.Wl_Msg_Box_Warning(
                                                            self.main,
                                                            title = self.tr('Empty File Name'),
                                                            text = self.tr('''
                            Severity: Major
                            Found in wordless/wl_file_area.py - About 45 mins to fix

                              Avoid deeply nested control flow statements.
                              Open

                                                  if token.lemma_:
                                                      lemmas.append(token.lemma_)
                                                  else:
                                                      lemmas.append(token.text)
                                  # Stanza
                              Severity: Major
                              Found in wordless/wl_nlp/wl_lemmatization.py - About 45 mins to fix

                                Function add_header_vert has 6 arguments (exceeds 4 allowed). Consider refactoring.
                                Open

                                    def add_header_vert(
                                Severity: Minor
                                Found in wordless/wl_widgets/wl_tables.py - About 45 mins to fix

                                  Avoid deeply nested control flow statements.
                                  Open

                                                          for sentence in wl_sentence_tokenization.wl_sentence_split(self.main, text_no_tags):
                                                              self.tokens_multilevel[-1].append([])
                                  
                                                              for sentence_seg in wl_sentence_tokenization.wl_sentence_seg_tokenize_tokens(
                                                                  self.main,
                                  Severity: Major
                                  Found in wordless/wl_nlp/wl_texts.py - About 45 mins to fix

                                    Function wl_spin_boxes_min_max has 6 arguments (exceeds 4 allowed). Consider refactoring.
                                    Open

                                    def wl_spin_boxes_min_max(
                                    Severity: Minor
                                    Found in wordless/wl_widgets/wl_boxes.py - About 45 mins to fix

                                      Avoid deeply nested control flow statements.
                                      Open

                                                          for item in range(100):
                                                              item = wl_texts.Wl_Token(str(item))
                                                              freq_1, freq_2 = random.sample(range(100), 2)
                                      
                                                              freq_files_items[item] = [
                                      Severity: Major
                                      Found in tests/tests_figs/test_figs_freqs.py - About 45 mins to fix

                                        Avoid deeply nested control flow statements.
                                        Open

                                                            if tuple(tokens[i + j : i + j + len(search_term)]) == tuple(search_term):
                                                                excl_matched = False
                                        
                                                                break
                                            # Search terms to be excluded not found in texts
                                        Severity: Major
                                        Found in wordless/wl_nlp/wl_matching.py - About 45 mins to fix
                                          Severity
                                          Category
                                          Status
                                          Source
                                          Language