diff --git a/src/text_generation/services/guidelines/guidelines_factory.py b/src/text_generation/services/guidelines/guidelines_factory.py index 5e6900267..fb40eb612 100644 --- a/src/text_generation/services/guidelines/guidelines_factory.py +++ b/src/text_generation/services/guidelines/guidelines_factory.py @@ -27,7 +27,6 @@ class AbstractGuidelinesFactory(ABC): def create_rag_context_guidelines_service( self, foundation_model: AbstractFoundationModel, - prompt_template: StringPromptTemplate, response_processing_service: AbstractResponseProcessingService, prompt_template_service: AbstractPromptTemplateService, llm_configuration_introspection_service: AbstractLLMConfigurationIntrospectionService, @@ -51,7 +50,6 @@ class GuidelinesFactory(AbstractGuidelinesFactory): def create_rag_context_guidelines_service( self, foundation_model: AbstractFoundationModel, - prompt_template: StringPromptTemplate, response_processing_service: AbstractResponseProcessingService, prompt_template_service: AbstractPromptTemplateService, llm_configuration_introspection_service: AbstractLLMConfigurationIntrospectionService, @@ -62,8 +60,7 @@ class GuidelinesFactory(AbstractGuidelinesFactory): response_processing_service=response_processing_service, prompt_template_service=prompt_template_service, llm_configuration_introspection_service=llm_configuration_introspection_service, - config_builder=config_builder, - prompt_template=prompt_template + config_builder=config_builder ) def create_cot_guidelines_service( diff --git a/src/text_generation/services/nlp/text_generation_completion_service.py b/src/text_generation/services/nlp/text_generation_completion_service.py index d405c679c..79ceac6b8 100644 --- a/src/text_generation/services/nlp/text_generation_completion_service.py +++ b/src/text_generation/services/nlp/text_generation_completion_service.py @@ -261,7 +261,6 @@ class TextGenerationCompletionService(AbstractTextGenerationCompletionService): """Factory method to create the appropriate guidelines service""" base_params = { 'foundation_model': self._current_model, - 'prompt_template': prompt_template, 'response_processing_service': self.response_processing_service, 'prompt_template_service': self.prompt_template_service, 'llm_configuration_introspection_service': self.llm_configuration_introspection_service