llm-review/main.py
2025-04-03 10:56:52 -04:00

171 lines
4.9 KiB
Python

import argparse
import asyncio
import json
import logging
import os
import sys
import tempfile
import final_agent
import step_agent
from lib import filter_schematic_page, render_svg, split_multipage_svg
VERSION = (0, 5, 2)
RETRY_DELAY = 60
"""
Time in seconds to sleep between pages to when hitting a rate limit.
"""
MAX_ATTEMPTS = 3
"""
Number of attempts to make when hitting a rate limit.
"""
DEFAULT_MODEL = "anthropic:claude-3-7-sonnet-latest"
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler)
async def main():
arg_parser = argparse.ArgumentParser(description="LLM Review")
arg_parser.add_argument(
"--json-path", help="Path to the JSON of the schematic", required=True
)
arg_parser.add_argument(
"--svg-path", help="Path to the SVG of the schematic", required=True
)
arg_parser.add_argument("--netlist-path", help="Path to the netlist", required=True)
arg_parser.add_argument(
"--output-path", help="Path to the output file", required=True
)
arg_parser.add_argument(
"--model",
help="Model to use for the agent",
default=DEFAULT_MODEL,
)
arg_parser.add_argument("--log-level", help="Log level", default="INFO")
args = arg_parser.parse_args()
logger.setLevel(args.log_level)
logger.info("LLM Review version %s" % ".".join(map(str, VERSION)))
with open(args.json_path, "r") as f:
json_data = json.load(f)
assert "type" in json_data and json_data["type"] == "Schematic"
with open(args.svg_path, "r") as f:
multi_page_svg = f.read()
temp_dir = tempfile.TemporaryDirectory()
logger.debug(f"Created temporary directory at {temp_dir.name}")
svg_pages = split_multipage_svg(multi_page_svg)
logger.info(f"Split SVG into {len(svg_pages)} pages")
json_pages = json_data.get("pages", [])
assert len(json_pages) == len(svg_pages)
page_paths = []
for i, (json_page, svg_text) in enumerate(zip(json_pages, svg_pages)):
page_svg_path = os.path.join(temp_dir.name, f"page_{i + 1}.svg")
page_png_path = os.path.join(temp_dir.name, f"page_{i + 1}.png")
page_json_path = os.path.join(temp_dir.name, f"page_{i + 1}.json")
with open(page_svg_path, "w") as f:
f.write(svg_text)
await render_svg(page_svg_path, page_png_path)
with open(page_json_path, "w") as f:
page_json = filter_schematic_page(json_page)
json.dump(page_json, f)
page_paths.append((page_json_path, page_png_path))
logger.info(f"Wrote page {i + 1} to {page_json_path}")
with open(args.netlist_path, "r") as f:
netlist = f.read()
if args.model == "__default__":
args.model = DEFAULT_MODEL
step_agent.step_agent.model = args.model
final_agent.final_agent.model = args.model
current_memory = ""
current_comments = []
for i, (page_json_path, page_png_path) in enumerate(page_paths):
logger.info(f"Reviewing page {i + 1}/{len(page_paths)}")
with open(page_json_path, "r") as f:
page_json = f.read()
with open(page_png_path, "rb") as f:
image = f.read()
try:
result = await step_agent.call(
current_memory,
current_comments,
page_json,
image,
netlist,
i + 1,
len(page_paths),
MAX_ATTEMPTS,
RETRY_DELAY,
logger,
)
except Exception as e:
logger.error(f"Error running agent: {e}")
sys.exit(1)
if result is None:
logger.error("Could not get a response from the API. Exiting")
sys.exit(1)
current_memory = result.memory
page_comments = result.comments
current_comments.extend(page_comments)
logger.info(f"Completed review of page {i + 1}")
logger.debug(f"Memory: {current_memory}")
logger.debug(f"Comments: {current_comments}")
logger.info("Completed review of all pages, preparing final comment.")
try:
final_result = await final_agent.call(
current_memory,
current_comments,
netlist,
final_agent.FinalAgentDeps(logger=logger),
MAX_ATTEMPTS,
RETRY_DELAY,
logger,
)
except Exception as e:
logger.error(f"Error running agent: {e}")
sys.exit(1)
if final_result is None:
logger.error("Rate limited too many times. Exiting.")
sys.exit(1)
out_json = final_result.model_dump(mode="json")
with open(args.output_path, "w") as f:
json.dump(out_json, f, indent=4)
temp_dir.cleanup()
if __name__ == "__main__":
asyncio.run(main())