Skip to content

Commit

Permalink
Added 'ProviderError' for error handling
Browse files Browse the repository at this point in the history
error handling i.e not showing a traceback on the ai-ops-cli client
  • Loading branch information
antoninoLorenzo committed Jul 23, 2024
1 parent fbb30b5 commit dd063df
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 5 deletions.
10 changes: 9 additions & 1 deletion src/agent/llm/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
from src.agent.llm.llm import LLM, Provider, Ollama, OpenRouter
"""Exposes implemented LLM functionalities"""

from src.agent.llm.llm import (
LLM,
Provider,
Ollama,
OpenRouter,
ProviderError
)

AVAILABLE_PROVIDERS = {
'ollama': {'class': Ollama, 'key_required': False},
Expand Down
7 changes: 6 additions & 1 deletion src/agent/llm/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,11 @@ def query(self, messages: list):
"""Implement to makes query to the LLM provider"""


class ProviderError(Exception):
"""Just a wrapper to Exception for error handling
when an error is caused by the LLM provider"""


@dataclass
class Ollama(Provider):
"""Ollama Interface"""
Expand All @@ -74,7 +79,7 @@ def query(self, messages: list):
for chunk in stream:
yield chunk['message']['content']
except ResponseError as err:
raise RuntimeError(err)
raise ProviderError(err)


@dataclass
Expand Down
11 changes: 8 additions & 3 deletions src/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
- /collections/new: Creates a new Collection.
"""
import os
import json

from dotenv import load_dotenv
from fastapi import FastAPI, HTTPException, Body
Expand All @@ -33,6 +34,7 @@
# from src.agent.knowledge import Store
from src.agent.plan import TaskStatus
from src.agent.tools import TOOLS
from src.agent.llm import ProviderError

load_dotenv()

Expand Down Expand Up @@ -170,9 +172,12 @@ def delete_session(sid: int):
# --- AGENT RELATED

def query_generator(sid: int, q: str):
stream = agent.query(sid, q, rag=False)
for chunk in stream:
yield chunk
try:
stream = agent.query(sid, q, rag=False)
for chunk in stream:
yield chunk
except ProviderError as err:
yield json.dumps({'error': str(err)})


@app.post('/session/{sid}/query/')
Expand Down

0 comments on commit dd063df

Please sign in to comment.