From edcff6a89ad31c4ec7a83aa49db398617eb9afe9 Mon Sep 17 00:00:00 2001 From: Sam Khoze <68170403+SamKhoze@users.noreply.github.com> Date: Sat, 15 Jun 2024 23:32:24 +0530 Subject: [PATCH] Update llm_node.py --- llm_node.py | 33 +++++++++++++++++++-------------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/llm_node.py b/llm_node.py index 38af881..d855189 100644 --- a/llm_node.py +++ b/llm_node.py @@ -19,21 +19,26 @@ class LLM_node: } CATEGORY = "DeepFuze" - RETURN_TYPES = ("STRING",) - RETURN_NAMES = ("text",) + RETURN_TYPES = ("NEW_STRING",) + RETURN_NAMES = ("LLM_RESPONSE",) FUNCTION = "run_llm" def run_llm(self, system_prompt, user_query, model_name,temperature,api_key,max_tokens,timeout): client = OpenAI(api_key=api_key) - response = client.chat.completions.create( - model=model_name, - messages=[ - {"role": "system", "content": system_prompt}, - {"role": "user", "content": user_query} - ], - # stop = stop.split(","), - temperature=temperature, - max_tokens=max_tokens, - timeout=timeout - ) - return (text,) + try: + response = client.chat.completions.create( + model=model_name, + messages=[ + {"role": "system", "content": system_prompt}, + {"role": "user", "content": user_query} + ], + # stop = stop.split(","), + temperature=temperature, + max_tokens=max_tokens, + timeout=timeout + ).to_dict() + print(response) + result = response["choices"][0]["message"]["content"] + except Exception as e: + raise ValueError(e) + return (result,)