Skip to content

Commit

Permalink
llm.py: removed stream parameter from query method
Browse files Browse the repository at this point in the history
  • Loading branch information
antoninoLorenzo committed Jul 16, 2024
1 parent c9385c8 commit 1b241ee
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions src/agent/llm/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ class Provider(ABC):
api_key: str | None = None

@abstractmethod
def query(self, messages: list, stream=True):
def query(self, messages: list):
"""Implement to makes query to the LLM provider"""


Expand All @@ -62,13 +62,13 @@ def __post_init__(self):
raise ValueError(f'Model {self.model} is not available')
self.client = Client(self.client_url)

def query(self, messages: list, stream=True):
def query(self, messages: list):
"""Generator that returns response chunks."""
try:
stream = self.client.chat(
model=self.model,
messages=messages,
stream=stream,
stream=True,
options=AVAILABLE_MODELS[self.model]['options']
)
for chunk in stream:
Expand All @@ -89,7 +89,7 @@ def __post_init__(self):
'mistral': 'mistralai/mistral-7b-instruct:free'
}

def query(self, messages: list, stream=True):
def query(self, messages: list):
"""Generator that returns response chunks."""
response = self.session.post(
url=self.client_url,
Expand Down

0 comments on commit 1b241ee

Please sign in to comment.