Showing 206 of 206 total issues
Avoid deeply nested control flow statements. Open
Open
if token_syllabified == self.tr('No language support'):
self.set_item_err(i, 2, token_syllabified, alignment_hor = 'left')
else:
self.model().setItem(i, 2, wl_tables.Wl_Table_Item(token_syllabified))
# Same token found in more than one language
Avoid deeply nested control flow statements. Open
Open
for lang, syllabified_form in syls_tokens[token].items():
lang_text = wl_conversion.to_lang_text(self.main, lang)
token_syllabified_forms.append(f"{syllabified_form} [{lang_text}]")
Function z_test_berry_rogghe
has 6 arguments (exceeds 4 allowed). Consider refactoring. Open
Open
def z_test_berry_rogghe(main, o11s, o12s, o21s, o22s, span):
Avoid deeply nested control flow statements. Open
Open
if ngram == tuple(search_term):
self.dialog.items_found.append([table, row, col])
Avoid deeply nested control flow statements. Open
Open
if settings_limit_searching == _tr('Wl_Worker_Collocation_Extractor', 'None'):
tokens_left = tokens[max(0, i + window_left) : max(0, i + window_right + 1)]
else:
# Span positions (Left)
for position in range(max(0, i + window_left), max(0, i + window_right + 1)):
Avoid deeply nested control flow statements. Open
Open
if ngram == search_term:
collocations_freqs_file_filtered[(node, collocate)] = freqs
Avoid deeply nested control flow statements. Open
Open
if self.tr('No language support') in tokens_syllabified:
self.set_item_err(i, 2, tokens_syllabified, alignment_hor = 'left')
else:
self.model().setItem(i, 2, wl_tables.Wl_Table_Item(tokens_syllabified))
Avoid deeply nested control flow statements. Open
Open
if settings_limit_searching == _tr('Wl_Worker_Collocation_Extractor', 'None'):
tokens_left = tokens[max(0, i + window_left) : i]
tokens_right = tokens[i + ngram_size : i + ngram_size + window_right]
else:
# Span positions (Left)
Avoid deeply nested control flow statements. Open
Open
for j, collocate in enumerate(reversed(tokens_left)):
if wl_matching.check_context(
i, tokens,
context_settings = settings['search_settings']['context_settings'],
search_terms_incl = search_terms_incl,
Avoid deeply nested control flow statements. Open
Open
if os.path.exists(file_settings):
os.remove(file_settings)
# Remove file caches
for file in glob.glob(os.path.join(
Avoid deeply nested control flow statements. Open
Open
if settings_limit_searching == _tr('Wl_Worker_Collocation_Extractor', 'Within sentence segments'):
offsets_unit = offsets_sentence_segs
len_unit = len_sentence_segs
elif settings_limit_searching == _tr('Wl_Worker_Collocation_Extractor', 'Within sentences'):
offsets_unit = offsets_sentences
Avoid deeply nested control flow statements. Open
Open
for j, collocate in enumerate(tokens_right):
if wl_matching.check_context(
i, tokens,
context_settings = settings['search_settings']['context_settings'],
search_terms_incl = search_terms_incl,
Consider simplifying this complex logical expression. Open
Open
if (
token_settings.get('apply_lemmatization', False)
or search_settings['match_inflected_forms']
or (
search_settings['context_settings']['incl']['incl']
Function wl_pos_tag_universal
has 5 arguments (exceeds 4 allowed). Consider refactoring. Open
Open
def wl_pos_tag_universal(main, inputs, lang, pos_tagger = 'default', tagged = False):
Function wl_syl_tokenize
has 5 arguments (exceeds 4 allowed). Consider refactoring. Open
Open
def wl_syl_tokenize(main, inputs, lang, syl_tokenizer = 'default', force = False):
Function wl_lemmatize
has 5 arguments (exceeds 4 allowed). Consider refactoring. Open
Open
def wl_lemmatize(main, inputs, lang, lemmatizer = 'default', force = False):
Function match_search_terms_context
has 5 arguments (exceeds 4 allowed). Consider refactoring. Open
Open
def match_search_terms_context(
Function set_item_err
has 5 arguments (exceeds 4 allowed). Consider refactoring. Open
Open
def set_item_err(self, row, col, text, alignment_hor = 'center'):
Function __init__
has 5 arguments (exceeds 4 allowed). Consider refactoring. Open
Open
def __init__(self, parent, line_edit, caption, filters, initial_filter = -1):
Function wl_process_tokens_concordancer
has 5 arguments (exceeds 4 allowed). Consider refactoring. Open
Open
def wl_process_tokens_concordancer(main, text, token_settings, search_settings, preserve_blank_lines = False):