Format codebase with uv run pre-commit run --all-files

This commit is contained in:
James Murdza
2025-10-22 11:11:02 -07:00
parent 759ff4703e
commit ddc5a5de91
234 changed files with 10127 additions and 8467 deletions

View File

@@ -9,58 +9,61 @@ from agent import ComputerAgent
from computer import Computer
from computer.helpers import sandboxed
@sandboxed()
def read_file(location: str) -> str:
"""Read contents of a file
Parameters
----------
location : str
Path to the file to read
Returns
-------
str
Contents of the file or error message
"""
try:
with open(location, 'r') as f:
with open(location, "r") as f:
return f.read()
except Exception as e:
return f"Error reading file: {str(e)}"
def save_note(content: str, filename: str = "note.txt") -> str:
"""Save content to a note file
Parameters
----------
content : str
Content to save to the file
filename : str, optional
Name of the file to save to (default is "note.txt")
Returns
-------
str
Success or error message
"""
try:
with open(filename, 'w') as f:
with open(filename, "w") as f:
f.write(content)
return f"Saved note to {filename}"
except Exception as e:
return f"Error saving note: {str(e)}"
def calculate(a: int, b: int) -> int:
"""Calculate the sum of two integers
Parameters
----------
a : int
First integer
b : int
Second integer
Returns
-------
int
@@ -68,15 +71,18 @@ def calculate(a: int, b: int) -> int:
"""
return a + b
async def main():
"""Example usage of ComputerAgent with different models"""
# Example 1: Using Claude with computer and custom tools
print("=== Example 1: Claude with Computer ===")
import os
import dotenv
import json
import os
import dotenv
dotenv.load_dotenv()
assert os.getenv("CUA_CONTAINER_NAME") is not None, "CUA_CONTAINER_NAME is not set"
@@ -86,38 +92,37 @@ async def main():
os_type="linux",
provider_type="cloud",
name=os.getenv("CUA_CONTAINER_NAME") or "",
api_key=os.getenv("CUA_API_KEY") or ""
api_key=os.getenv("CUA_API_KEY") or "",
) as computer:
agent = ComputerAgent(
# Supported models:
# == OpenAI CUA (computer-use-preview) ==
model="openai/computer-use-preview",
# == Anthropic CUA (Claude > 3.5) ==
# model="anthropic/claude-opus-4-20250514",
# model="anthropic/claude-opus-4-20250514",
# model="anthropic/claude-sonnet-4-20250514",
# model="anthropic/claude-3-7-sonnet-20250219",
# model="anthropic/claude-3-5-sonnet-20241022",
# == UI-TARS ==
# model="huggingface-local/ByteDance-Seed/UI-TARS-1.5-7B",
# TODO: add local mlx provider
# model="mlx-community/UI-TARS-1.5-7B-6bit",
# model="ollama_chat/0000/ui-tars-1.5-7b",
# == Omniparser + Any LLM ==
# model="omniparser+..."
# model="omniparser+anthropic/claude-opus-4-20250514",
tools=[computer],
only_n_most_recent_images=3,
verbosity=logging.INFO,
trajectory_dir="trajectories",
use_prompt_caching=True,
max_trajectory_budget={ "max_budget": 1.0, "raise_error": True, "reset_after_each_run": False },
max_trajectory_budget={
"max_budget": 1.0,
"raise_error": True,
"reset_after_each_run": False,
},
)
history = []
while True:
user_input = input("> ")
@@ -143,5 +148,6 @@ async def main():
# elif item["type"] == "function_call_output":
# print("===>", item["output"])
if __name__ == "__main__":
asyncio.run(main())
asyncio.run(main())