[add] better error handling
All checks were successful
continuous-integration/drone/push Build is passing
All checks were successful
continuous-integration/drone/push Build is passing
This commit is contained in:
parent
3168bfffd1
commit
ca8c306534
@ -19,36 +19,17 @@ def get_response():
|
||||
|
||||
resp = minyma.oai.query(message)
|
||||
|
||||
# Derive LLM Data
|
||||
# llm_resp = resp.get("llm", {})
|
||||
# llm_choices = llm_resp.get("choices", [])
|
||||
|
||||
# Derive VDB Data
|
||||
# vdb_resp = resp.get("vdb", {})
|
||||
# combined_context = [{
|
||||
# "id": vdb_resp.get("ids")[i],
|
||||
# "distance": vdb_resp.get("distances")[i],
|
||||
# "doc": vdb_resp.get("docs")[i],
|
||||
# "metadata": vdb_resp.get("metadatas")[i],
|
||||
# } for i, _ in enumerate(vdb_resp.get("docs", []))]
|
||||
|
||||
# Return Data
|
||||
return resp
|
||||
|
||||
|
||||
|
||||
"""
|
||||
Return the raw vector db related response
|
||||
TODO - Embeds and loads data into the local ChromaDB.
|
||||
|
||||
{
|
||||
"input": "string",
|
||||
"normalizer": "string",
|
||||
}
|
||||
"""
|
||||
@bp.route("/related", methods=["POST"])
|
||||
def get_related():
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return {"error": "Missing Message"}
|
||||
|
||||
message = str(data.get("message"))
|
||||
if message == "":
|
||||
return {"error": "Empty Message"}
|
||||
|
||||
related_documents = minyma.vdb.get_related(message)
|
||||
return related_documents
|
||||
bp.route("/embed", methods=["POST"])
|
||||
def post_embeddings():
|
||||
pass
|
||||
|
@ -1,11 +1,12 @@
|
||||
import os
|
||||
|
||||
|
||||
def get_env(key, default=None, required=False) -> str:
|
||||
def get_env(key, default=None, required=False) -> str | None:
|
||||
"""Wrapper for gathering env vars."""
|
||||
if required:
|
||||
assert key in os.environ, "Missing Environment Variable: %s" % key
|
||||
return str(os.environ.get(key, default))
|
||||
env = os.environ.get(key, default)
|
||||
return str(env) if env is not None else None
|
||||
|
||||
|
||||
class Config:
|
||||
@ -19,7 +20,7 @@ class Config:
|
||||
OpenAI API Key - Required
|
||||
"""
|
||||
|
||||
CHROMA_DATA_PATH: str = get_env("CHROMA_DATA_PATH", required=False)
|
||||
HOME_ASSISTANT_API_KEY: str = get_env("HOME_ASSISTANT_API_KEY", required=False)
|
||||
HOME_ASSISTANT_URL: str = get_env("HOME_ASSISTANT_URL", required=False)
|
||||
OPENAI_API_KEY: str = get_env("OPENAI_API_KEY", required=True)
|
||||
CHROMA_DATA_PATH: str | None = get_env("CHROMA_DATA_PATH", required=False)
|
||||
HOME_ASSISTANT_API_KEY: str | None = get_env("HOME_ASSISTANT_API_KEY", required=False)
|
||||
HOME_ASSISTANT_URL: str | None = get_env("HOME_ASSISTANT_URL", required=False)
|
||||
OPENAI_API_KEY: str | None = get_env("OPENAI_API_KEY", required=True)
|
||||
|
@ -79,20 +79,33 @@ class OpenAIConnector:
|
||||
|
||||
print("[OpenAIConnector] Completed Initial OAI Query:\n", indent(json.dumps({ "usage": response.usage, "function_calls": all_funcs }, indent=2), ' ' * 2))
|
||||
|
||||
# Execute Requested Functions
|
||||
func_responses = {}
|
||||
for func in all_funcs:
|
||||
func_responses[func] = minyma.plugins.execute(func)
|
||||
# Build Response Text & Metadata
|
||||
func_metadata = {}
|
||||
func_response = []
|
||||
|
||||
# Build Response Text
|
||||
response_content_arr = []
|
||||
for key, val in func_responses.items():
|
||||
indented_val = indent(val, ' ' * 2)
|
||||
response_content_arr.append("- %s\n%s" % (key, indented_val))
|
||||
response_content = "\n".join(response_content_arr)
|
||||
for func in all_funcs:
|
||||
# Execute Requested Function
|
||||
resp = minyma.plugins.execute(func)
|
||||
|
||||
# Unknown Response
|
||||
if resp is None:
|
||||
print("[OpenAIConnector] Invalid Function Response: %s" % func)
|
||||
continue
|
||||
|
||||
# Get Response
|
||||
content = resp.get("content")
|
||||
metadata = resp.get("metadata")
|
||||
error = resp.get("error")
|
||||
|
||||
# Append Responses & Metadata
|
||||
indented_val = indent(content or error or "Unknown Error", ' ' * 2)
|
||||
func_response.append("- %s\n%s" % (func, indented_val))
|
||||
func_metadata[func] = { "metadata": metadata, "error": error }
|
||||
|
||||
func_response = "\n".join(func_response)
|
||||
|
||||
# Create Follow Up Prompt
|
||||
prompt = FOLLOW_UP_PROMPT_TEMPLATE.format(question = question, response = response_content)
|
||||
prompt = FOLLOW_UP_PROMPT_TEMPLATE.format(question = question, response = func_response)
|
||||
messages = [{"role": "user", "content": prompt}]
|
||||
|
||||
print("[OpenAIConnector] Running Follup Up OAI Query")
|
||||
@ -116,7 +129,7 @@ class OpenAIConnector:
|
||||
# Return Response
|
||||
return {
|
||||
"response": content,
|
||||
"functions": func_responses,
|
||||
"functions": func_metadata,
|
||||
"usage": {
|
||||
"prompt_tokens": prompt_tokens,
|
||||
"completion_tokens": completion_tokens,
|
||||
|
@ -13,8 +13,9 @@ class ChromaDBPlugin(MinymaPlugin):
|
||||
def __init__(self, config):
|
||||
self.name = "chroma_db"
|
||||
self.config = config
|
||||
self.word_cap = 1000
|
||||
|
||||
if not config.CHROMA_DATA_PATH:
|
||||
if config.CHROMA_DATA_PATH is None:
|
||||
self.functions = []
|
||||
else:
|
||||
self.vdb = ChromaDB(config.CHROMA_DATA_PATH)
|
||||
@ -25,17 +26,28 @@ class ChromaDBPlugin(MinymaPlugin):
|
||||
# Get Related
|
||||
related = self.vdb.get_related(collection_name, query)
|
||||
|
||||
# Get Metadata
|
||||
metadata = [{
|
||||
"id": related.get("ids")[i],
|
||||
"distance": related.get("distances")[i],
|
||||
"metadata": related.get("metadatas")[i],
|
||||
} for i, _ in enumerate(related.get("docs", []))]
|
||||
|
||||
# Normalize Data
|
||||
return list(
|
||||
map(
|
||||
lambda x: " ".join(x.split()[:self.vdb.word_cap]),
|
||||
lambda x: " ".join(x.split()[:self.word_cap]),
|
||||
related.get("docs", [])
|
||||
)
|
||||
)
|
||||
), metadata
|
||||
|
||||
|
||||
def lookup_pubmed_data(self, query: str):
|
||||
COLLECTION_NAME = "pubmed"
|
||||
documents = self.__lookup_data(COLLECTION_NAME, query)
|
||||
documents, metadata = self.__lookup_data(COLLECTION_NAME, query)
|
||||
context = '\n'.join(documents)
|
||||
return context
|
||||
return {
|
||||
"content": context,
|
||||
"metadata": metadata,
|
||||
"error": None
|
||||
}
|
||||
|
@ -21,6 +21,7 @@ class DuckDuckGoPlugin(MinymaPlugin):
|
||||
resp = requests.get("https://html.duckduckgo.com/html/?q=%s" % query, headers=HEADERS)
|
||||
soup = BeautifulSoup(resp.text, features="html.parser")
|
||||
|
||||
# Get Results
|
||||
results = []
|
||||
for item in soup.select(".result > div"):
|
||||
title_el = item.select_one(".result__title > a")
|
||||
@ -31,4 +32,18 @@ class DuckDuckGoPlugin(MinymaPlugin):
|
||||
|
||||
results.append({"title": title, "description": description})
|
||||
|
||||
return json.dumps(results[:5])
|
||||
# Derive Metadata (Title)
|
||||
metadata = {
|
||||
"titles": list(
|
||||
map(
|
||||
lambda x: x.get("title"),
|
||||
results[:5]
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
"content": json.dumps(results[:5]),
|
||||
"metadata": metadata,
|
||||
"error": None
|
||||
}
|
||||
|
@ -10,18 +10,15 @@ class HomeAssistantPlugin(MinymaPlugin):
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.name = "home_assistant"
|
||||
self.functions = []
|
||||
|
||||
|
||||
if not config.HOME_ASSISTANT_API_KEY or not config.HOME_ASSISTANT_URL:
|
||||
if config.HOME_ASSISTANT_API_KEY and config.HOME_ASSISTANT_URL:
|
||||
self.functions = [self.home_automation_command]
|
||||
if not config.HOME_ASSISTANT_API_KEY:
|
||||
print("[HomeAssistantPlugin] Missing HOME_ASSISTANT_API_KEY")
|
||||
if not config.HOME_ASSISTANT_URL:
|
||||
print("[HomeAssistantPlugin] Missing HOME_ASSISTANT_URL")
|
||||
|
||||
self.functions = []
|
||||
else:
|
||||
self.functions = [self.home_automation_command]
|
||||
|
||||
def home_automation_command(self, natural_language_command: str):
|
||||
url = urllib.parse.urljoin(self.config.HOME_ASSISTANT_URL, "/api/conversation/process")
|
||||
headers = {
|
||||
@ -34,6 +31,17 @@ class HomeAssistantPlugin(MinymaPlugin):
|
||||
|
||||
# Parse JSON
|
||||
try:
|
||||
return json.dumps(resp.json())
|
||||
r = resp.json()
|
||||
text = r["response"]["speech"]["plain"]["speech"]
|
||||
|
||||
return {
|
||||
"content": text,
|
||||
"metadata": r,
|
||||
"error": None
|
||||
}
|
||||
except requests.JSONDecodeError:
|
||||
return json.dumps({ "error": "Command Failed" })
|
||||
return {
|
||||
"content": None,
|
||||
"metadata": None,
|
||||
"error": "Command Failed"
|
||||
}
|
||||
|
@ -50,10 +50,11 @@ class VehicleLookupPlugin(MinymaPlugin):
|
||||
|
||||
# Invalid JSON
|
||||
if json_resp is None:
|
||||
return json.dumps({
|
||||
return{
|
||||
"content": None,
|
||||
"metadata": text_resp,
|
||||
"error": error,
|
||||
"response": text_resp,
|
||||
})
|
||||
}
|
||||
|
||||
try:
|
||||
# Check Result
|
||||
@ -63,7 +64,11 @@ class VehicleLookupPlugin(MinymaPlugin):
|
||||
error = "No Results"
|
||||
else:
|
||||
error = "API Error: %s" % status_resp
|
||||
return {"error": error, "response": text_resp}
|
||||
return {
|
||||
"content": None,
|
||||
"metadata": json_resp,
|
||||
"error": error,
|
||||
}
|
||||
|
||||
# Parse Result
|
||||
vehicle_info = json_resp.get("content")
|
||||
@ -74,17 +79,20 @@ class VehicleLookupPlugin(MinymaPlugin):
|
||||
trim = vehicle_info.get("vehicles")[0].get("trim")
|
||||
|
||||
except Exception as e:
|
||||
return json.dumps({
|
||||
return {
|
||||
"content": None,
|
||||
"metadata": text_resp,
|
||||
"error": "Unknown Error: %s" % e,
|
||||
"response": text_resp,
|
||||
})
|
||||
}
|
||||
|
||||
return json.dumps({
|
||||
"result": {
|
||||
return {
|
||||
"content": json.dumps({
|
||||
"vin": vin,
|
||||
"year": year,
|
||||
"make": make,
|
||||
"model": model,
|
||||
"trim": trim,
|
||||
},
|
||||
})
|
||||
}),
|
||||
"metadata": json_resp,
|
||||
"error": None
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user