BLKSerene/Wordless

View on GitHub

Showing 206 of 206 total issues

Avoid deeply nested control flow statements.
Open

                    for i, token in enumerate(sentence_seg):
                        if wl_checks_tokens.is_num(token):
                            sentence_seg[i] = wl_texts.Wl_Token('')

    # Replace token texts with lemmas
Severity: Major
Found in wordless/wl_nlp/wl_token_processing.py - About 45 mins to fix

    Avoid deeply nested control flow statements.
    Open

                        for sentence in doc.sentences:
                            for token in sentence.words:
                                texts_tagged.append(token.text)
    
                                if tagset in ['default', 'raw']:
    Severity: Major
    Found in wordless/wl_nlp/wl_pos_tagging.py - About 45 mins to fix

      Avoid deeply nested control flow statements.
      Open

                                  for k, ngram in enumerate(wl_nlp_utils.ngrams(text.get_tokens_flat(), len_search_term)):
                                      if ngram == search_term:
                                          points.append([x_start + k, i])
      
      
      Severity: Major
      Found in wordless/wl_concordancer.py - About 45 mins to fix

        Avoid deeply nested control flow statements.
        Open

                        if any((text in tr for text in [])):
                            # Flag translation as unfinished to be reviewed manually
                            unfinished = True
        
        
        Severity: Major
        Found in utils/wl_trs_translate.py - About 45 mins to fix

          Avoid deeply nested control flow statements.
          Open

                                      for word in wl_word_tokenization.wl_word_tokenize_flat(main, tr, lang):
                                          add_val_to_trs(trs_lexicon, word, vals)
                                  else:
          Severity: Major
          Found in utils/wl_generate_vader_dicts.py - About 45 mins to fix

            Avoid deeply nested control flow statements.
            Open

                                    if wl_matching.split_tag_embedded(opening_tag_text)[1] == '*':
                                        opening_tag_text = opening_tag_text.replace('*', self.tr('TAG'))
            
            
            Severity: Major
            Found in wordless/wl_settings/wl_settings_files.py - About 45 mins to fix

              Avoid deeply nested control flow statements.
              Open

                                      for tag in re.finditer(re_tags, para):
                                          tags_tokens = self.add_tags_splitting(para[i_tag_end:tag.start()], tags_tokens)
                                          tags_tokens[-1].append(tag.group())
              
                                          i_tag_end = tag.end()
              Severity: Major
              Found in wordless/wl_nlp/wl_texts.py - About 45 mins to fix

                Avoid deeply nested control flow statements.
                Open

                                        if (para := para[i_tag_end:]):
                                            tags_tokens = self.add_tags_splitting(para, tags_tokens)
                
                                # Insert tags at the start of the text
                                if self.tags_text_start and tags_tokens:
                Severity: Major
                Found in wordless/wl_nlp/wl_texts.py - About 45 mins to fix

                  Avoid deeply nested control flow statements.
                  Open

                                      for token, lemma_search in set(zip(tokens, lemmas_search)):
                                          if re_match(lemma_matched, lemma_search, flags = re_flags):
                                              search_results.add(token)
                  
                  
                  Severity: Major
                  Found in wordless/wl_nlp/wl_matching.py - About 45 mins to fix

                    Avoid deeply nested control flow statements.
                    Open

                                        for token, lemma_search in set(zip(tokens, lemmas_search)):
                                            if re_match(lemma_matched, lemma_search, flags = re_flags):
                                                tokens_matched[search_term_token].add(token)
                    
                    
                    Severity: Major
                    Found in wordless/wl_nlp/wl_matching.py - About 45 mins to fix

                      Avoid deeply nested control flow statements.
                      Open

                                          for sentence_seg in sentence:
                                              for i, token in enumerate(sentence_seg):
                                                  if token.istitle():
                                                      sentence_seg[i] = wl_texts.Wl_Token('')
                          else:
                      Severity: Major
                      Found in wordless/wl_nlp/wl_token_processing.py - About 45 mins to fix

                        Avoid deeply nested control flow statements.
                        Open

                                                if i != item_row and item_text == text:
                                                    wl_msg_boxes.Wl_Msg_Box_Warning(
                                                        self.main,
                                                        title = _tr('wl_lists', 'Duplicates Found'),
                                                        text = _tr('wl_lists', '''
                        Severity: Major
                        Found in wordless/wl_widgets/wl_lists.py - About 45 mins to fix

                          Function add_headers_vert has 6 arguments (exceeds 4 allowed). Consider refactoring.
                          Open

                              def add_headers_vert(
                          Severity: Minor
                          Found in wordless/wl_widgets/wl_tables.py - About 45 mins to fix

                            Avoid deeply nested control flow statements.
                            Open

                                                    for file in glob.glob(os.path.join(
                                                        self.settings_custom['general']['imp']['temp_files']['default_path'], '*.*'
                                                    )):
                                                        os.remove(file)
                            
                            
                            Severity: Major
                            Found in wordless/wl_main.py - About 45 mins to fix

                              Function __init__ has 6 arguments (exceeds 4 allowed). Consider refactoring.
                              Open

                                  def __init__(
                              Severity: Minor
                              Found in wordless/wl_dialogs/wl_dialogs.py - About 45 mins to fix

                                Avoid deeply nested control flow statements.
                                Open

                                                            if (
                                                                (
                                                                    (
                                                                        not settings['search_settings']['match_dependency_relations']
                                                                        and (token in search_terms or head in search_terms)
                                Severity: Major
                                Found in wordless/wl_dependency_parser.py - About 45 mins to fix

                                  Avoid deeply nested control flow statements.
                                  Open

                                                              for j, collocate in enumerate(tokens_right):
                                                                  if wl_matching.check_context(
                                                                      i, tokens,
                                                                      context_settings = settings['search_settings']['context_settings'],
                                                                      search_terms_incl = search_terms_incl,
                                  Severity: Major
                                  Found in wordless/wl_collocation_extractor.py - About 45 mins to fix

                                    Avoid deeply nested control flow statements.
                                    Open

                                                                for j, collocate in enumerate(reversed(tokens_left)):
                                                                    if wl_matching.check_context(
                                                                        i, tokens,
                                                                        context_settings = settings['search_settings']['context_settings'],
                                                                        search_terms_incl = search_terms_incl,
                                    Severity: Major
                                    Found in wordless/wl_collocation_extractor.py - About 45 mins to fix

                                      Avoid deeply nested control flow statements.
                                      Open

                                                                  if settings_limit_searching == _tr('Wl_Worker_Collocation_Extractor', 'None'):
                                                                      tokens_right = tokens[i + ngram_size + window_left - 1 : i + ngram_size + window_right]
                                                                  else:
                                                                      # Span positions (Right)
                                                                      for position in range(i + ngram_size + window_left - 1, i + ngram_size + window_right):
                                      Severity: Major
                                      Found in wordless/wl_collocation_extractor.py - About 45 mins to fix

                                        Avoid deeply nested control flow statements.
                                        Open

                                                                with open(file_settings_display_lang, 'wb') as f:
                                                                    pickle.dump(action.lang, f)
                                        
                                                                # Remove settings file
                                                                if os.path.exists(file_settings):
                                        Severity: Major
                                        Found in wordless/wl_main.py - About 45 mins to fix
                                          Severity
                                          Category
                                          Status
                                          Source
                                          Language