From 5d44d90047e7ae082c49187b09ce6f2a6b707987 Mon Sep 17 00:00:00 2001 From: Adam Wilson Date: Wed, 23 Apr 2025 06:56:24 -0600 Subject: [PATCH] fix prompt ref --- tests/llm/phi3_language_model.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/llm/phi3_language_model.py b/tests/llm/phi3_language_model.py index 7c6ab73b0..a08357ec4 100644 --- a/tests/llm/phi3_language_model.py +++ b/tests/llm/phi3_language_model.py @@ -15,7 +15,7 @@ class Phi3LanguageModel: self.tokenizer_stream = self.tokenizer.create_stream() - def get_response(self, args): + def get_response(self, prompt_input): search_options = { 'max_length': 2048 } params = og.GeneratorParams(self.model) @@ -23,7 +23,6 @@ class Phi3LanguageModel: generator = og.Generator(self.model, params) # process prompt input and generate tokens - prompt_input = args.prompt chat_template = '<|user|>\n{input} <|end|>\n<|assistant|>' prompt = f'{chat_template.format(input=prompt_input)}' input_tokens = self.tokenizer.encode(prompt) @@ -54,4 +53,4 @@ if __name__ == "__main__": args = parser.parse_args() model = Phi3LanguageModel(args.model_path) - model.get_response() + model.get_response(args.prompt)