add-assistant #31

Open
Ari wants to merge 8 commits from Jam-Master/Altium-archimajor-3d-printer-driver-demo:add-assistant into main
3 changed files with 63 additions and 20 deletions
Showing only changes of commit 4e961198ba - Show all commits

View File

@ -2,6 +2,7 @@ import os
import io import io
from openai import OpenAI from openai import OpenAI
class ConfigureAssistant: class ConfigureAssistant:
""" """
A class to configure an OpenAI assistant for aiding designers using the ArchiMajor project. A class to configure an OpenAI assistant for aiding designers using the ArchiMajor project.
@ -9,9 +10,32 @@ class ConfigureAssistant:
""" """
SUPPORTED_FORMATS = { SUPPORTED_FORMATS = {
"c", "cpp", "css", "docx", "gif", "html", "java", "jpeg", "jpg", "js", "c",
"json", "md", "pdf", "php", "png", "pptx", "py", "rb", "tar", "tex", "ts", "txt", "cpp",
"webp", "xlsx", "xml", "zip", "css",
"docx",
"gif",
"html",
"java",
"jpeg",
"jpg",
"js",
"json",
"md",
"pdf",
"php",
"png",
"pptx",
"py",
"rb",
"tar",
"tex",
"ts",
"txt",
"webp",
"xlsx",
"xml",
"zip",
# "csv", # CSV is supported but not actually parsed so we're going to treat it as text # "csv", # CSV is supported but not actually parsed so we're going to treat it as text
} }
@ -76,7 +100,7 @@ class ConfigureAssistant:
# TO DO: Preprocess Outjob and PcbDoc files into something OpenAI (or future vector DB) can understand # TO DO: Preprocess Outjob and PcbDoc files into something OpenAI (or future vector DB) can understand
excluded_extensions = ["schdoc", "exe", "so", "dll", "outjob", "pcbdoc", "png"] excluded_extensions = ["schdoc", "exe", "so", "dll", "outjob", "pcbdoc", "png"]
if extension in self.SUPPORTED_FORMATS and extension not in excluded_extensions: if extension in self.SUPPORTED_FORMATS and extension not in excluded_extensions:
return os.path.basename(file_path), True return os.path.basename(file_path), True
else: else:
@ -108,8 +132,10 @@ class ConfigureAssistant:
spoofed_file = io.BytesIO(file_content) spoofed_file = io.BytesIO(file_content)
spoofed_file.name = new_filename # Spoof the filename spoofed_file.name = new_filename # Spoof the filename
# Upload the file to the vector store # Upload the file to the vector store
file_batch = self.client.beta.vector_stores.file_batches.upload_and_poll( file_batch = (
vector_store_id=self.vector_store.id, files=[spoofed_file] self.client.beta.vector_stores.file_batches.upload_and_poll(
vector_store_id=self.vector_store.id, files=[spoofed_file]
)
) )
print(f"Successfully uploaded: {new_filename}") print(f"Successfully uploaded: {new_filename}")
except Exception as e: except Exception as e:
@ -144,16 +170,16 @@ if __name__ == "__main__":
root_path = os.path.dirname(os.path.dirname(__file__)) root_path = os.path.dirname(os.path.dirname(__file__))
# Create an instance of ConfigureAssistant with the root path # Create an instance of ConfigureAssistant with the root path
configurator = ConfigureAssistant(root_path=root_path) configurator = ConfigureAssistant(root_path=root_path)
# Retrieve file paths # Retrieve file paths
file_paths = configurator.get_file_paths(excluded_folders=[".git"]) file_paths = configurator.get_file_paths(excluded_folders=[".git"])
# Preprocess and observe the files that will be uploaded # Preprocess and observe the files that will be uploaded
print("Files to be uploaded:") print("Files to be uploaded:")
for path in file_paths: for path in file_paths:
new_filename, should_upload = configurator.preprocess_file(path) new_filename, should_upload = configurator.preprocess_file(path)
if should_upload: if should_upload:
print(f"Original: {path}, New: {new_filename}") print(f"Original: {path}, New: {new_filename}")
# Configure the assistant # Configure the assistant
configurator.configure() configurator.configure()

View File

@ -3,6 +3,7 @@ import logging
import pandas as pd import pandas as pd
from openai import OpenAI from openai import OpenAI
class OpenAIResourceManager: class OpenAIResourceManager:
""" """
A class to manage OpenAI resources such as assistants, vector stores, and files. A class to manage OpenAI resources such as assistants, vector stores, and files.
@ -112,14 +113,20 @@ class OpenAIResourceManager:
:param max_length: The maximum length of the string. :param max_length: The maximum length of the string.
:return: The truncated string. :return: The truncated string.
""" """
return (s[:max_length] + '...') if len(s) > max_length else s return (s[:max_length] + "...") if len(s) > max_length else s
def show_all_assistants(self): def show_all_assistants(self):
""" """
Display all assistants in a table. Display all assistants in a table.
""" """
assistants = self.get_all_assistants() assistants = self.get_all_assistants()
assistant_data = [{k: self.truncate_string(str(v), max_length=25) for k, v in assistant.dict().items()} for assistant in assistants] assistant_data = [
{
k: self.truncate_string(str(v), max_length=25)
for k, v in assistant.dict().items()
}
for assistant in assistants
]
df = pd.DataFrame(assistant_data) df = pd.DataFrame(assistant_data)
print("Assistants:") print("Assistants:")
print(df.to_markdown(index=False)) print(df.to_markdown(index=False))
@ -129,7 +136,10 @@ class OpenAIResourceManager:
Display all vector stores in a table. Display all vector stores in a table.
""" """
vector_stores = self.get_all_vector_stores() vector_stores = self.get_all_vector_stores()
vector_store_data = [{k: self.truncate_string(str(v)) for k, v in vector_store.dict().items()} for vector_store in vector_stores] vector_store_data = [
{k: self.truncate_string(str(v)) for k, v in vector_store.dict().items()}
for vector_store in vector_stores
]
df = pd.DataFrame(vector_store_data) df = pd.DataFrame(vector_store_data)
print("Vector Stores:") print("Vector Stores:")
print(df.to_markdown(index=False)) print(df.to_markdown(index=False))
@ -139,7 +149,10 @@ class OpenAIResourceManager:
Display all files in a table. Display all files in a table.
""" """
files = self.get_all_files() files = self.get_all_files()
file_data = [{k: self.truncate_string(str(v)) for k, v in file.dict().items()} for file in files] file_data = [
{k: self.truncate_string(str(v)) for k, v in file.dict().items()}
for file in files
]
df = pd.DataFrame(file_data) df = pd.DataFrame(file_data)
print("Files:") print("Files:")
print(df.to_markdown(index=False)) print(df.to_markdown(index=False))

View File

@ -8,6 +8,7 @@ from typing_extensions import override
# Configure logging # Configure logging
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
class QueryAssistant: class QueryAssistant:
""" """
A class to manage querying an OpenAI assistant. A class to manage querying an OpenAI assistant.
@ -72,22 +73,23 @@ class QueryAssistant:
""" """
logging.info(f"Fetching response for thread {thread_id}...") logging.info(f"Fetching response for thread {thread_id}...")
run = self.client.beta.threads.runs.create_and_poll( run = self.client.beta.threads.runs.create_and_poll(
thread_id=thread_id, thread_id=thread_id, assistant_id=self.assistant_id
assistant_id=self.assistant_id
) )
# Poll the run status with a delay to reduce the number of GET requests # Poll the run status with a delay to reduce the number of GET requests
while run.status != 'completed' and run.status != 'failed': while run.status != "completed" and run.status != "failed":
time.sleep(2) # Add a 2-second delay between checks time.sleep(2) # Add a 2-second delay between checks
run = self.client.beta.threads.runs.retrieve(thread_id=thread_id, run_id=run.id) run = self.client.beta.threads.runs.retrieve(
thread_id=thread_id, run_id=run.id
)
logging.info(f"Run status: {run.status}") logging.info(f"Run status: {run.status}")
if run.status == 'completed': if run.status == "completed":
messages = self.client.beta.threads.messages.list(thread_id=thread_id).data messages = self.client.beta.threads.messages.list(thread_id=thread_id).data
for message in messages: for message in messages:
if message.role == 'assistant': if message.role == "assistant":
for content in message.content: for content in message.content:
if content.type == 'text': if content.type == "text":
print(content.text.value) print(content.text.value)
else: else:
logging.error(f"Run failed with status: {run.status}") logging.error(f"Run failed with status: {run.status}")
@ -144,6 +146,7 @@ class QueryAssistant:
if output.type == "logs": if output.type == "logs":
print(f"\n{output.logs}", flush=True) print(f"\n{output.logs}", flush=True)
def main(query: str, assistant_id: str, context: str, use_streaming: bool): def main(query: str, assistant_id: str, context: str, use_streaming: bool):
""" """
The main function to run the assistant query. The main function to run the assistant query.
@ -169,6 +172,7 @@ def main(query: str, assistant_id: str, context: str, use_streaming: bool):
assistant.fetch_response(thread_id=thread.id) assistant.fetch_response(thread_id=thread.id)
print("\n") print("\n")
if __name__ == "__main__": if __name__ == "__main__":
# Default query and context # Default query and context
DEFAULT_QUERY = "What are you capable of as an assistant?" DEFAULT_QUERY = "What are you capable of as an assistant?"