From 1b241eed3036cda63e1f4b97f2810cbca1e5a418 Mon Sep 17 00:00:00 2001 From: Antonino Lorenzo <94693967+antoninoLorenzo@users.noreply.github.com> Date: Tue, 16 Jul 2024 11:43:42 +0200 Subject: [PATCH] llm.py: removed stream parameter from query method --- src/agent/llm/llm.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/agent/llm/llm.py b/src/agent/llm/llm.py index f8d095c..85b6f23 100644 --- a/src/agent/llm/llm.py +++ b/src/agent/llm/llm.py @@ -48,7 +48,7 @@ class Provider(ABC): api_key: str | None = None @abstractmethod - def query(self, messages: list, stream=True): + def query(self, messages: list): """Implement to makes query to the LLM provider""" @@ -62,13 +62,13 @@ def __post_init__(self): raise ValueError(f'Model {self.model} is not available') self.client = Client(self.client_url) - def query(self, messages: list, stream=True): + def query(self, messages: list): """Generator that returns response chunks.""" try: stream = self.client.chat( model=self.model, messages=messages, - stream=stream, + stream=True, options=AVAILABLE_MODELS[self.model]['options'] ) for chunk in stream: @@ -89,7 +89,7 @@ def __post_init__(self): 'mistral': 'mistralai/mistral-7b-instruct:free' } - def query(self, messages: list, stream=True): + def query(self, messages: list): """Generator that returns response chunks.""" response = self.session.post( url=self.client_url,