add-assistant #31
@ -2,6 +2,7 @@ import os
|
||||
import io
|
||||
from openai import OpenAI
|
||||
|
||||
|
||||
class ConfigureAssistant:
|
||||
"""
|
||||
A class to configure an OpenAI assistant for aiding designers using the ArchiMajor project.
|
||||
@ -9,9 +10,32 @@ class ConfigureAssistant:
|
||||
"""
|
||||
|
||||
SUPPORTED_FORMATS = {
|
||||
"c", "cpp", "css", "docx", "gif", "html", "java", "jpeg", "jpg", "js",
|
||||
"json", "md", "pdf", "php", "png", "pptx", "py", "rb", "tar", "tex", "ts", "txt",
|
||||
"webp", "xlsx", "xml", "zip",
|
||||
"c",
|
||||
"cpp",
|
||||
"css",
|
||||
"docx",
|
||||
"gif",
|
||||
"html",
|
||||
"java",
|
||||
"jpeg",
|
||||
"jpg",
|
||||
"js",
|
||||
"json",
|
||||
"md",
|
||||
"pdf",
|
||||
"php",
|
||||
"png",
|
||||
"pptx",
|
||||
"py",
|
||||
"rb",
|
||||
"tar",
|
||||
"tex",
|
||||
"ts",
|
||||
"txt",
|
||||
"webp",
|
||||
"xlsx",
|
||||
"xml",
|
||||
"zip",
|
||||
# "csv", # CSV is supported but not actually parsed so we're going to treat it as text
|
||||
}
|
||||
|
||||
@ -76,7 +100,7 @@ class ConfigureAssistant:
|
||||
|
||||
# TO DO: Preprocess Outjob and PcbDoc files into something OpenAI (or future vector DB) can understand
|
||||
excluded_extensions = ["schdoc", "exe", "so", "dll", "outjob", "pcbdoc", "png"]
|
||||
|
||||
|
||||
if extension in self.SUPPORTED_FORMATS and extension not in excluded_extensions:
|
||||
return os.path.basename(file_path), True
|
||||
else:
|
||||
@ -108,8 +132,10 @@ class ConfigureAssistant:
|
||||
spoofed_file = io.BytesIO(file_content)
|
||||
spoofed_file.name = new_filename # Spoof the filename
|
||||
# Upload the file to the vector store
|
||||
file_batch = self.client.beta.vector_stores.file_batches.upload_and_poll(
|
||||
vector_store_id=self.vector_store.id, files=[spoofed_file]
|
||||
file_batch = (
|
||||
self.client.beta.vector_stores.file_batches.upload_and_poll(
|
||||
vector_store_id=self.vector_store.id, files=[spoofed_file]
|
||||
)
|
||||
)
|
||||
print(f"Successfully uploaded: {new_filename}")
|
||||
except Exception as e:
|
||||
@ -144,16 +170,16 @@ if __name__ == "__main__":
|
||||
root_path = os.path.dirname(os.path.dirname(__file__))
|
||||
# Create an instance of ConfigureAssistant with the root path
|
||||
configurator = ConfigureAssistant(root_path=root_path)
|
||||
|
||||
|
||||
# Retrieve file paths
|
||||
file_paths = configurator.get_file_paths(excluded_folders=[".git"])
|
||||
|
||||
|
||||
# Preprocess and observe the files that will be uploaded
|
||||
print("Files to be uploaded:")
|
||||
for path in file_paths:
|
||||
new_filename, should_upload = configurator.preprocess_file(path)
|
||||
if should_upload:
|
||||
print(f"Original: {path}, New: {new_filename}")
|
||||
|
||||
|
||||
# Configure the assistant
|
||||
configurator.configure()
|
||||
|
@ -3,6 +3,7 @@ import logging
|
||||
import pandas as pd
|
||||
from openai import OpenAI
|
||||
|
||||
|
||||
class OpenAIResourceManager:
|
||||
"""
|
||||
A class to manage OpenAI resources such as assistants, vector stores, and files.
|
||||
@ -112,14 +113,20 @@ class OpenAIResourceManager:
|
||||
:param max_length: The maximum length of the string.
|
||||
:return: The truncated string.
|
||||
"""
|
||||
return (s[:max_length] + '...') if len(s) > max_length else s
|
||||
return (s[:max_length] + "...") if len(s) > max_length else s
|
||||
|
||||
def show_all_assistants(self):
|
||||
"""
|
||||
Display all assistants in a table.
|
||||
"""
|
||||
assistants = self.get_all_assistants()
|
||||
assistant_data = [{k: self.truncate_string(str(v), max_length=25) for k, v in assistant.dict().items()} for assistant in assistants]
|
||||
assistant_data = [
|
||||
{
|
||||
k: self.truncate_string(str(v), max_length=25)
|
||||
for k, v in assistant.dict().items()
|
||||
}
|
||||
for assistant in assistants
|
||||
]
|
||||
df = pd.DataFrame(assistant_data)
|
||||
print("Assistants:")
|
||||
print(df.to_markdown(index=False))
|
||||
@ -129,7 +136,10 @@ class OpenAIResourceManager:
|
||||
Display all vector stores in a table.
|
||||
"""
|
||||
vector_stores = self.get_all_vector_stores()
|
||||
vector_store_data = [{k: self.truncate_string(str(v)) for k, v in vector_store.dict().items()} for vector_store in vector_stores]
|
||||
vector_store_data = [
|
||||
{k: self.truncate_string(str(v)) for k, v in vector_store.dict().items()}
|
||||
for vector_store in vector_stores
|
||||
]
|
||||
df = pd.DataFrame(vector_store_data)
|
||||
print("Vector Stores:")
|
||||
print(df.to_markdown(index=False))
|
||||
@ -139,7 +149,10 @@ class OpenAIResourceManager:
|
||||
Display all files in a table.
|
||||
"""
|
||||
files = self.get_all_files()
|
||||
file_data = [{k: self.truncate_string(str(v)) for k, v in file.dict().items()} for file in files]
|
||||
file_data = [
|
||||
{k: self.truncate_string(str(v)) for k, v in file.dict().items()}
|
||||
for file in files
|
||||
]
|
||||
df = pd.DataFrame(file_data)
|
||||
print("Files:")
|
||||
print(df.to_markdown(index=False))
|
||||
|
@ -8,6 +8,7 @@ from typing_extensions import override
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
|
||||
class QueryAssistant:
|
||||
"""
|
||||
A class to manage querying an OpenAI assistant.
|
||||
@ -72,22 +73,23 @@ class QueryAssistant:
|
||||
"""
|
||||
logging.info(f"Fetching response for thread {thread_id}...")
|
||||
run = self.client.beta.threads.runs.create_and_poll(
|
||||
thread_id=thread_id,
|
||||
assistant_id=self.assistant_id
|
||||
thread_id=thread_id, assistant_id=self.assistant_id
|
||||
)
|
||||
|
||||
# Poll the run status with a delay to reduce the number of GET requests
|
||||
while run.status != 'completed' and run.status != 'failed':
|
||||
while run.status != "completed" and run.status != "failed":
|
||||
time.sleep(2) # Add a 2-second delay between checks
|
||||
run = self.client.beta.threads.runs.retrieve(thread_id=thread_id, run_id=run.id)
|
||||
run = self.client.beta.threads.runs.retrieve(
|
||||
thread_id=thread_id, run_id=run.id
|
||||
)
|
||||
logging.info(f"Run status: {run.status}")
|
||||
|
||||
if run.status == 'completed':
|
||||
if run.status == "completed":
|
||||
messages = self.client.beta.threads.messages.list(thread_id=thread_id).data
|
||||
for message in messages:
|
||||
if message.role == 'assistant':
|
||||
if message.role == "assistant":
|
||||
for content in message.content:
|
||||
if content.type == 'text':
|
||||
if content.type == "text":
|
||||
print(content.text.value)
|
||||
else:
|
||||
logging.error(f"Run failed with status: {run.status}")
|
||||
@ -144,6 +146,7 @@ class QueryAssistant:
|
||||
if output.type == "logs":
|
||||
print(f"\n{output.logs}", flush=True)
|
||||
|
||||
|
||||
def main(query: str, assistant_id: str, context: str, use_streaming: bool):
|
||||
"""
|
||||
The main function to run the assistant query.
|
||||
@ -169,6 +172,7 @@ def main(query: str, assistant_id: str, context: str, use_streaming: bool):
|
||||
assistant.fetch_response(thread_id=thread.id)
|
||||
print("\n")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Default query and context
|
||||
DEFAULT_QUERY = "What are you capable of as an assistant?"
|
||||
|
Loading…
Reference in New Issue
Block a user