From 5a94f083fa843890c01ef5a9a49a025f2227def3 Mon Sep 17 00:00:00 2001 From: William Allen <16820599+williamjallen@users.noreply.github.com> Date: Wed, 18 May 2022 18:28:48 -0400 Subject: [PATCH] [Bugfix:Plagiarism] Fix missing default tokenization args (#84) --- bin/tokenize_all.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/tokenize_all.py b/bin/tokenize_all.py index ec1dd6f..a3a73c1 100644 --- a/bin/tokenize_all.py +++ b/bin/tokenize_all.py @@ -33,7 +33,7 @@ def tokenize(lichen_config_data, my_concatenated_file, my_tokenized_file): cli_args.append(language_token_data["command_args"][argument]["argument"]) else: print(f"Error: Unknown tokenization argument {argument}") - else: # Use the default arguments + elif "command_args" in language_token_data: # Use the default arguments if they exist for argument in language_token_data["command_args"]: if "default" in language_token_data["command_args"][argument].keys() and\ language_token_data["command_args"][argument]["default"]: