Skip to content

Commit

Permalink
cr
Browse files Browse the repository at this point in the history
  • Loading branch information
jerryjliu committed Dec 2, 2023
1 parent 3f81f7e commit 04cdaa1
Show file tree
Hide file tree
Showing 6 changed files with 32 additions and 90 deletions.
52 changes: 22 additions & 30 deletions 1_🏠_Home.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,7 @@
import streamlit as st
from streamlit_pills import pills
from typing import cast

from core.agent_builder import (
load_meta_agent_and_tools,
load_agent_ids_from_directory,
AgentCacheRegistry,
)
from st_utils import add_sidebar, get_current_state
from core.constants import (
AGENT_CACHE_DIR,
)


####################
Expand Down Expand Up @@ -77,24 +68,25 @@ def add_to_message_history(role: str, content: str) -> None:
with st.chat_message("user"):
st.write(prompt)

# If last message is not from assistant, generate a new response
if st.session_state.messages[-1]["role"] != "assistant":
with st.chat_message("assistant"):
with st.spinner("Thinking..."):
response = current_state.builder_agent.chat(prompt)
st.write(str(response))
add_to_message_history("assistant", str(response))

# check agent_ids again, if it doesn't match, add to directory and refresh
agent_ids = current_state.agent_registry.get_agent_ids()
# check diff between agent_ids and cur agent ids
diff_ids = list(set(agent_ids) - set(st.session_state.cur_agent_ids))
if len(diff_ids) > 0:
# clear streamlit cache, to allow you to generate a new agent
st.cache_resource.clear()

# trigger refresh
st.rerun()

else:
pass
# If last message is not from assistant, generate a new response
if st.session_state.messages[-1]["role"] != "assistant":
with st.chat_message("assistant"):
with st.spinner("Thinking..."):
response = current_state.builder_agent.chat(prompt)
st.write(str(response))
add_to_message_history("assistant", str(response))

# check agent_ids again
# if it doesn't match, add to directory and refresh
agent_ids = current_state.agent_registry.get_agent_ids()
# check diff between agent_ids and cur agent ids
diff_ids = list(set(agent_ids) - set(st.session_state.cur_agent_ids))
if len(diff_ids) > 0:
# clear streamlit cache, to allow you to generate a new agent
st.cache_resource.clear()

# trigger refresh
st.rerun()

else:
pass
19 changes: 5 additions & 14 deletions core/agent_builder.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,10 @@
"""Agent builder."""

from llama_index.llms import OpenAI, ChatMessage, Anthropic, Replicate
from llama_index.llms.base import LLM
from llama_index.llms.utils import resolve_llm
from pydantic import BaseModel, Field
from llama_index.llms import ChatMessage
from llama_index.prompts import ChatPromptTemplate
from typing import List, cast, Optional
from llama_index import SimpleDirectoryReader
from llama_index.embeddings.utils import resolve_embed_model
from llama_index.tools import QueryEngineTool, ToolMetadata, FunctionTool
from llama_index.tools import FunctionTool
from llama_index.agent.types import BaseAgent
from llama_index.chat_engine.types import BaseChatEngine
from llama_index.agent.react.formatter import ReActChatFormatter
from llama_index.llms.openai_utils import is_function_calling_model
from llama_index.chat_engine import CondensePlusContextChatEngine
from core.builder_config import BUILDER_LLM
from typing import Dict, Tuple, Any, Callable, Union
import streamlit as st
Expand All @@ -23,8 +14,6 @@
from core.constants import AGENT_CACHE_DIR
import shutil

from llama_index.callbacks import CallbackManager
from callback_manager import StreamlitFunctionsCallbackHandler
from core.param_cache import ParamCache, RAGParams
from core.utils import (
load_data,
Expand Down Expand Up @@ -152,7 +141,9 @@ def __init__(
) -> None:
"""Init params."""
self._cache = cache or ParamCache()
self._agent_registry = agent_registry or AgentCacheRegistry(str(AGENT_CACHE_DIR))
self._agent_registry = agent_registry or AgentCacheRegistry(
str(AGENT_CACHE_DIR)
)

@property
def cache(self) -> ParamCache:
Expand Down
25 changes: 0 additions & 25 deletions core/param_cache.py
Original file line number Diff line number Diff line change
@@ -1,43 +1,18 @@
"""Param cache."""

from llama_index.llms import OpenAI, ChatMessage, Anthropic, Replicate
from llama_index.llms.base import LLM
from llama_index.llms.utils import resolve_llm
from pydantic import BaseModel, Field
import os
from llama_index.agent import OpenAIAgent, ReActAgent
from llama_index.agent.react.prompts import REACT_CHAT_SYSTEM_HEADER
from llama_index import (
VectorStoreIndex,
SummaryIndex,
ServiceContext,
StorageContext,
Document,
load_index_from_storage,
)
from llama_index.prompts import ChatPromptTemplate
from typing import List, cast, Optional
from llama_index import SimpleDirectoryReader
from llama_index.embeddings.utils import resolve_embed_model
from llama_index.tools import QueryEngineTool, ToolMetadata, FunctionTool
from llama_index.agent.types import BaseAgent
from llama_index.chat_engine.types import BaseChatEngine
from llama_index.agent.react.formatter import ReActChatFormatter
from llama_index.llms.openai_utils import is_function_calling_model
from llama_index.chat_engine import CondensePlusContextChatEngine
from core.builder_config import BUILDER_LLM
from typing import Dict, Tuple, Any, Callable
import streamlit as st
from pathlib import Path
import json
import uuid
from core.constants import AGENT_CACHE_DIR
import shutil
from core.utils import load_data, get_tool_objects, construct_agent, RAGParams

from llama_index.callbacks import CallbackManager
from callback_manager import StreamlitFunctionsCallbackHandler


class ParamCache(BaseModel):
"""Cache for RAG agent builder.
Expand Down
14 changes: 3 additions & 11 deletions core/utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""Utils."""

from llama_index.llms import OpenAI, ChatMessage, Anthropic, Replicate
from llama_index.llms import OpenAI, Anthropic, Replicate
from llama_index.llms.base import LLM
from llama_index.llms.utils import resolve_llm
from pydantic import BaseModel, Field
Expand All @@ -11,28 +11,20 @@
VectorStoreIndex,
SummaryIndex,
ServiceContext,
StorageContext,
Document,
load_index_from_storage,
)
from llama_index.prompts import ChatPromptTemplate
from typing import List, cast, Optional
from llama_index import SimpleDirectoryReader
from llama_index.embeddings.utils import resolve_embed_model
from llama_index.tools import QueryEngineTool, ToolMetadata, FunctionTool
from llama_index.tools import QueryEngineTool, ToolMetadata
from llama_index.agent.types import BaseAgent
from llama_index.chat_engine.types import BaseChatEngine
from llama_index.agent.react.formatter import ReActChatFormatter
from llama_index.llms.openai_utils import is_function_calling_model
from llama_index.chat_engine import CondensePlusContextChatEngine
from core.builder_config import BUILDER_LLM
from typing import Dict, Tuple, Any, Callable
from typing import Dict, Tuple, Any
import streamlit as st
from pathlib import Path
import json
import uuid
from core.constants import AGENT_CACHE_DIR
import shutil

from llama_index.callbacks import CallbackManager
from callback_manager import StreamlitFunctionsCallbackHandler
Expand Down
8 changes: 2 additions & 6 deletions pages/2_⚙️_RAG_Config.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,15 @@
"""Streamlit page showing builder config."""
import streamlit as st
from typing import cast, Optional

from core.param_cache import (
RAGParams,
ParamCache,
)
from core.agent_builder import (
RAGAgentBuilder,
remove_agent_from_directory,
AgentCacheRegistry,
)
from st_utils import update_selected_agent_with_id, get_current_state
from core.constants import AGENT_CACHE_DIR
from st_utils import add_sidebar
from st_utils import update_selected_agent_with_id, get_current_state, add_sidebar
from typing import cast


####################
Expand Down
4 changes: 0 additions & 4 deletions pages/3_🤖_Generated_RAG_Agent.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,5 @@
"""Streamlit page showing builder config."""
import streamlit as st
from typing import cast, Optional
from core.agent_builder import RAGAgentBuilder, AgentCacheRegistry
from core.param_cache import ParamCache
from core.constants import AGENT_CACHE_DIR
from st_utils import add_sidebar, get_current_state


Expand Down

0 comments on commit 04cdaa1

Please sign in to comment.