mirror of
https://github.com/trycua/computer.git
synced 2025-12-30 18:09:55 -06:00
Add dev container, fix lints
This commit is contained in:
37
.dockerignore
Normal file
37
.dockerignore
Normal file
@@ -0,0 +1,37 @@
|
||||
# Version control
|
||||
.git
|
||||
.github
|
||||
.gitignore
|
||||
|
||||
# Environment and cache
|
||||
.venv
|
||||
.env
|
||||
.env.local
|
||||
__pycache__
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
.Python
|
||||
.pytest_cache
|
||||
.pdm-build
|
||||
|
||||
# Distribution / packaging
|
||||
dist
|
||||
build
|
||||
*.egg-info
|
||||
|
||||
# Development
|
||||
.vscode
|
||||
.idea
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# Docs
|
||||
docs/site
|
||||
|
||||
# Notebooks
|
||||
notebooks/.ipynb_checkpoints
|
||||
|
||||
# Docker
|
||||
Dockerfile
|
||||
.dockerignore
|
||||
55
Dockerfile
Normal file
55
Dockerfile
Normal file
@@ -0,0 +1,55 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
# Set environment variables
|
||||
ENV PYTHONUNBUFFERED=1 \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
PIP_NO_CACHE_DIR=1 \
|
||||
PIP_DISABLE_PIP_VERSION_CHECK=1 \
|
||||
PYTHONPATH="/app/libs/core:/app/libs/computer:/app/libs/agent:/app/libs/som:/app/libs/pylume:/app/libs/computer-server"
|
||||
|
||||
# Install system dependencies for ARM architecture
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
git \
|
||||
build-essential \
|
||||
libgl1-mesa-glx \
|
||||
libglib2.0-0 \
|
||||
libxcb-xinerama0 \
|
||||
libxkbcommon-x11-0 \
|
||||
cmake \
|
||||
pkg-config \
|
||||
curl \
|
||||
iputils-ping \
|
||||
net-tools \
|
||||
sed \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Copy the entire project temporarily
|
||||
# We'll mount the real source code over this at runtime
|
||||
COPY . /app/
|
||||
|
||||
# Create a simple .env.local file for build.sh
|
||||
RUN echo "PYTHON_BIN=python" > /app/.env.local
|
||||
|
||||
# Modify build.sh to skip virtual environment creation
|
||||
RUN sed -i 's/python -m venv .venv/echo "Skipping venv creation in Docker"/' /app/scripts/build.sh && \
|
||||
sed -i 's/source .venv\/bin\/activate/echo "Skipping venv activation in Docker"/' /app/scripts/build.sh && \
|
||||
sed -i 's/find . -type d -name ".venv" -exec rm -rf {} +/echo "Skipping .venv removal in Docker"/' /app/scripts/build.sh && \
|
||||
chmod +x /app/scripts/build.sh
|
||||
|
||||
# Run the build script to install dependencies
|
||||
RUN cd /app && ./scripts/build.sh
|
||||
|
||||
# Clean up the source files now that dependencies are installed
|
||||
# When we run the container, we'll mount the actual source code
|
||||
RUN rm -rf /app/* /app/.??*
|
||||
|
||||
# Note: This Docker image doesn't contain the lume executable (macOS-specific)
|
||||
# Instead, it relies on connecting to a lume server running on the host machine
|
||||
# via host.docker.internal:3000
|
||||
|
||||
# Default command
|
||||
CMD ["bash"]
|
||||
@@ -4,24 +4,29 @@
|
||||
|
||||
The project is organized as a monorepo with these main packages:
|
||||
- `libs/core/` - Base package with telemetry support
|
||||
- `libs/pylume/` - Python bindings for Lume
|
||||
- `libs/computer/` - Core computer interaction library
|
||||
- `libs/computer/` - Computer-use interface (CUI) library
|
||||
- `libs/agent/` - AI agent library with multi-provider support
|
||||
- `libs/som/` - Computer vision and NLP processing library (formerly omniparser)
|
||||
- `libs/computer-server/` - Server implementation for computer control
|
||||
- `libs/lume/` - Swift implementation for enhanced macOS integration
|
||||
- `libs/som/` - Set-of-Mark parser
|
||||
- `libs/computer-server/` - Server component for VM
|
||||
- `libs/lume/` - Lume CLI
|
||||
- `libs/pylume/` - Python bindings for Lume
|
||||
|
||||
Each package has its own virtual environment and dependencies, managed through PDM.
|
||||
|
||||
### Local Development Setup
|
||||
|
||||
1. Clone the repository:
|
||||
1. Install Lume CLI:
|
||||
```bash
|
||||
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/trycua/cua/main/libs/lume/scripts/install.sh)"
|
||||
```
|
||||
|
||||
2. Clone the repository:
|
||||
```bash
|
||||
git clone https://github.com/trycua/cua.git
|
||||
cd cua
|
||||
```
|
||||
|
||||
2. Create a `.env.local` file in the root directory with your API keys:
|
||||
3. Create a `.env.local` file in the root directory with your API keys:
|
||||
```bash
|
||||
# Required for Anthropic provider
|
||||
ANTHROPIC_API_KEY=your_anthropic_key_here
|
||||
@@ -30,7 +35,7 @@ ANTHROPIC_API_KEY=your_anthropic_key_here
|
||||
OPENAI_API_KEY=your_openai_key_here
|
||||
```
|
||||
|
||||
3. Run the build script to set up all packages:
|
||||
4. Run the build script to set up all packages:
|
||||
```bash
|
||||
./scripts/build.sh
|
||||
```
|
||||
@@ -41,9 +46,9 @@ This will:
|
||||
- Set up the correct Python path
|
||||
- Install development tools
|
||||
|
||||
4. Open the workspace in VSCode or Cursor:
|
||||
5. Open the workspace in VSCode or Cursor:
|
||||
```bash
|
||||
# Using VSCode or Cursor
|
||||
# For Cua Python development
|
||||
code .vscode/py.code-workspace
|
||||
|
||||
# For Lume (Swift) development
|
||||
@@ -56,9 +61,55 @@ Using the workspace file is strongly recommended as it:
|
||||
- Enables debugging configurations
|
||||
- Maintains consistent settings across packages
|
||||
|
||||
### Docker Development Environment
|
||||
|
||||
As an alternative to running directly on your host machine, you can use Docker for development. This approach has several advantages:
|
||||
|
||||
- Ensures consistent development environment across different machines
|
||||
- Isolates dependencies from your host system
|
||||
- Works well for cross-platform development
|
||||
- Avoids conflicts with existing Python installations
|
||||
|
||||
#### Prerequisites
|
||||
|
||||
- Docker installed on your machine
|
||||
- Lume server running on your host (port 3000): `lume serve`
|
||||
|
||||
#### Setup and Usage
|
||||
|
||||
1. Build the development Docker image:
|
||||
```bash
|
||||
./scripts/run-docker-dev.sh build
|
||||
```
|
||||
|
||||
2. Run an example in the container:
|
||||
```bash
|
||||
./scripts/run-docker-dev.sh run computer_examples.py
|
||||
```
|
||||
|
||||
3. Get an interactive shell in the container:
|
||||
```bash
|
||||
./scripts/run-docker-dev.sh run --interactive
|
||||
```
|
||||
|
||||
4. Stop any running containers:
|
||||
```bash
|
||||
./scripts/run-docker-dev.sh stop
|
||||
```
|
||||
|
||||
#### How it Works
|
||||
|
||||
The Docker development environment:
|
||||
- Installs all required Python dependencies in the container
|
||||
- Mounts your source code from the host at runtime
|
||||
- Automatically configures the connection to use host.docker.internal:3000 for accessing the Lume server on your host machine
|
||||
- Preserves your code changes without requiring rebuilds (source code is mounted as a volume)
|
||||
|
||||
> **Note**: The Docker container doesn't include the macOS-specific Lume executable. Instead, it connects to the Lume server running on your host machine via host.docker.internal:3000. Make sure to start the Lume server on your host before running examples in the container.
|
||||
|
||||
### Cleanup and Reset
|
||||
|
||||
If you need to clean up the environment and start fresh:
|
||||
If you need to clean up the environment (non-docker) and start fresh:
|
||||
|
||||
```bash
|
||||
./scripts/cleanup.sh
|
||||
|
||||
@@ -5,13 +5,13 @@ import asyncio
|
||||
import logging
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import signal
|
||||
|
||||
from computer import Computer
|
||||
|
||||
# Import the unified agent class and types
|
||||
from agent import ComputerAgent, AgentLoop, LLMProvider, LLM
|
||||
from agent import AgentLoop, LLMProvider, LLM
|
||||
from agent.core.computer_agent import ComputerAgent
|
||||
|
||||
# Import utility functions
|
||||
from utils import load_dotenv_files, handle_sigint
|
||||
@@ -23,7 +23,8 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
async def run_omni_agent_example():
|
||||
"""Run example of using the ComputerAgent with OpenAI and Omni provider."""
|
||||
print(f"\n=== Example: ComputerAgent with OpenAI and Omni provider ===")
|
||||
print("\n=== Example: ComputerAgent with OpenAI and Omni provider ===")
|
||||
|
||||
try:
|
||||
# Create Computer instance with default parameters
|
||||
computer = Computer(verbosity=logging.DEBUG)
|
||||
@@ -31,10 +32,10 @@ async def run_omni_agent_example():
|
||||
# Create agent with loop and provider
|
||||
agent = ComputerAgent(
|
||||
computer=computer,
|
||||
# loop=AgentLoop.OMNI,
|
||||
loop=AgentLoop.ANTHROPIC,
|
||||
# model=LLM(provider=LLMProvider.OPENAI, name="gpt-4.5-preview"),
|
||||
model=LLM(provider=LLMProvider.ANTHROPIC, name="claude-3-7-sonnet-20250219"),
|
||||
# loop=AgentLoop.ANTHROPIC,
|
||||
loop=AgentLoop.OMNI,
|
||||
model=LLM(provider=LLMProvider.OPENAI, name="gpt-4.5-preview"),
|
||||
# model=LLM(provider=LLMProvider.ANTHROPIC, name="claude-3-7-sonnet-20250219"),
|
||||
save_trajectory=True,
|
||||
trajectory_dir=str(Path("trajectories")),
|
||||
only_n_most_recent_images=3,
|
||||
@@ -69,14 +70,15 @@ async def run_omni_agent_example():
|
||||
print(f"Task {i} completed")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in run_anthropic_agent_example: {e}")
|
||||
logger.error(f"Error in run_omni_agent_example: {e}")
|
||||
traceback.print_exc()
|
||||
raise
|
||||
finally:
|
||||
# Clean up resources
|
||||
if computer and computer._initialized:
|
||||
try:
|
||||
await computer.stop()
|
||||
# await computer.stop()
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.warning(f"Error stopping computer: {e}")
|
||||
|
||||
|
||||
@@ -28,6 +28,8 @@ from computer.utils import get_image_size
|
||||
async def main():
|
||||
try:
|
||||
print("\n=== Using direct initialization ===")
|
||||
|
||||
# Create computer with configured host
|
||||
computer = Computer(
|
||||
display="1024x768", # Higher resolution
|
||||
memory="8GB", # More memory
|
||||
@@ -48,10 +50,10 @@ async def main():
|
||||
print(f"Accessibility tree: {accessibility_tree}")
|
||||
|
||||
# Screen Actions Examples
|
||||
print("\n=== Screen Actions ===")
|
||||
screenshot = await computer.interface.screenshot()
|
||||
with open("screenshot_direct.png", "wb") as f:
|
||||
f.write(screenshot)
|
||||
# print("\n=== Screen Actions ===")
|
||||
# screenshot = await computer.interface.screenshot()
|
||||
# with open("screenshot_direct.png", "wb") as f:
|
||||
# f.write(screenshot)
|
||||
|
||||
screen_size = await computer.interface.get_screen_size()
|
||||
print(f"Screen size: {screen_size}")
|
||||
|
||||
@@ -48,9 +48,7 @@ except Exception as e:
|
||||
# Other issues with telemetry
|
||||
logger.warning(f"Error initializing telemetry: {e}")
|
||||
|
||||
from .core.factory import AgentFactory
|
||||
from .core.agent import ComputerAgent
|
||||
from .providers.omni.types import LLMProvider, LLM
|
||||
from .types.base import Provider, AgentLoop
|
||||
from .types.base import AgentLoop
|
||||
|
||||
__all__ = ["AgentFactory", "Provider", "ComputerAgent", "AgentLoop", "LLMProvider", "LLM"]
|
||||
__all__ = ["AgentLoop", "LLMProvider", "LLM"]
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Core agent components."""
|
||||
|
||||
from .base_agent import BaseComputerAgent
|
||||
from .loop import BaseLoop
|
||||
from .messages import (
|
||||
create_user_message,
|
||||
@@ -12,7 +11,7 @@ from .messages import (
|
||||
ImageRetentionConfig,
|
||||
)
|
||||
from .callbacks import (
|
||||
CallbackManager,
|
||||
CallbackManager,
|
||||
CallbackHandler,
|
||||
BaseCallbackManager,
|
||||
ContentCallback,
|
||||
@@ -21,9 +20,8 @@ from .callbacks import (
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"BaseComputerAgent",
|
||||
"BaseLoop",
|
||||
"CallbackManager",
|
||||
"BaseLoop",
|
||||
"CallbackManager",
|
||||
"CallbackHandler",
|
||||
"BaseMessageManager",
|
||||
"ImageRetentionConfig",
|
||||
|
||||
@@ -1,252 +0,0 @@
|
||||
"""Unified computer agent implementation that supports multiple loops."""
|
||||
|
||||
import os
|
||||
import logging
|
||||
import asyncio
|
||||
import time
|
||||
import uuid
|
||||
from typing import Any, AsyncGenerator, Dict, List, Optional, TYPE_CHECKING, Union, cast
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
|
||||
from computer import Computer
|
||||
|
||||
from ..types.base import Provider, AgentLoop
|
||||
from .base_agent import BaseComputerAgent
|
||||
from ..core.telemetry import record_agent_initialization
|
||||
|
||||
# Only import types for type checking to avoid circular imports
|
||||
if TYPE_CHECKING:
|
||||
from ..providers.anthropic.loop import AnthropicLoop
|
||||
from ..providers.omni.loop import OmniLoop
|
||||
from ..providers.omni.parser import OmniParser
|
||||
|
||||
# Import the provider types
|
||||
from ..providers.omni.types import LLMProvider, LLM, Model, LLMModel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Default models for different providers
|
||||
DEFAULT_MODELS = {
|
||||
LLMProvider.OPENAI: "gpt-4o",
|
||||
LLMProvider.ANTHROPIC: "claude-3-7-sonnet-20250219",
|
||||
}
|
||||
|
||||
# Map providers to their environment variable names
|
||||
ENV_VARS = {
|
||||
LLMProvider.OPENAI: "OPENAI_API_KEY",
|
||||
LLMProvider.ANTHROPIC: "ANTHROPIC_API_KEY",
|
||||
}
|
||||
|
||||
|
||||
class ComputerAgent(BaseComputerAgent):
|
||||
"""Unified implementation of the computer agent supporting multiple loop types.
|
||||
|
||||
This class consolidates the previous AnthropicComputerAgent and OmniComputerAgent
|
||||
into a single implementation with configurable loop type.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
computer: Computer,
|
||||
loop: AgentLoop = AgentLoop.OMNI,
|
||||
model: Optional[Union[LLM, Dict[str, str], str]] = None,
|
||||
api_key: Optional[str] = None,
|
||||
save_trajectory: bool = True,
|
||||
trajectory_dir: Optional[str] = "trajectories",
|
||||
only_n_most_recent_images: Optional[int] = None,
|
||||
max_retries: int = 3,
|
||||
verbosity: int = logging.INFO,
|
||||
telemetry_enabled: bool = True,
|
||||
**kwargs,
|
||||
):
|
||||
"""Initialize a ComputerAgent instance.
|
||||
|
||||
Args:
|
||||
computer: The Computer instance to control
|
||||
loop: The agent loop to use: ANTHROPIC or OMNI
|
||||
model: The model to use. Can be a string, dict or LLM object.
|
||||
Defaults to LLM for the loop type.
|
||||
api_key: The API key to use. If None, will use environment variables.
|
||||
save_trajectory: Whether to save the trajectory.
|
||||
trajectory_dir: The directory to save trajectories to.
|
||||
only_n_most_recent_images: Only keep this many most recent images.
|
||||
max_retries: Maximum number of retries for failed requests.
|
||||
verbosity: Logging level (standard Python logging levels).
|
||||
telemetry_enabled: Whether to enable telemetry tracking. Defaults to True.
|
||||
**kwargs: Additional keyword arguments to pass to the loop.
|
||||
"""
|
||||
super().__init__(computer)
|
||||
self._configure_logging(verbosity)
|
||||
logger.info(f"Initializing ComputerAgent with {loop} loop")
|
||||
|
||||
# Store telemetry preference
|
||||
self.telemetry_enabled = telemetry_enabled
|
||||
|
||||
# Process the model configuration
|
||||
self.model = self._process_model_config(model, loop)
|
||||
self.loop_type = loop
|
||||
self.api_key = api_key
|
||||
|
||||
# Store computer
|
||||
self.computer = computer
|
||||
|
||||
# Save trajectory settings
|
||||
self.save_trajectory = save_trajectory
|
||||
self.trajectory_dir = trajectory_dir
|
||||
self.only_n_most_recent_images = only_n_most_recent_images
|
||||
|
||||
# Store the max retries setting
|
||||
self.max_retries = max_retries
|
||||
|
||||
# Initialize message history
|
||||
self.messages = []
|
||||
|
||||
# Extra kwargs for the loop
|
||||
self.loop_kwargs = kwargs
|
||||
|
||||
# Initialize the actual loop implementation
|
||||
self.loop = self._init_loop()
|
||||
|
||||
# Record initialization in telemetry if enabled
|
||||
if telemetry_enabled:
|
||||
record_agent_initialization()
|
||||
|
||||
def _process_model_config(
|
||||
self, model_input: Optional[Union[LLM, Dict[str, str], str]], loop: AgentLoop
|
||||
) -> LLM:
|
||||
"""Process and normalize model configuration.
|
||||
|
||||
Args:
|
||||
model_input: Input model configuration (LLM, dict, string, or None)
|
||||
loop: The loop type being used
|
||||
|
||||
Returns:
|
||||
Normalized LLM instance
|
||||
"""
|
||||
# Handle case where model_input is None
|
||||
if model_input is None:
|
||||
# Use Anthropic for Anthropic loop, OpenAI for Omni loop
|
||||
default_provider = (
|
||||
LLMProvider.ANTHROPIC if loop == AgentLoop.ANTHROPIC else LLMProvider.OPENAI
|
||||
)
|
||||
return LLM(provider=default_provider)
|
||||
|
||||
# Handle case where model_input is already a LLM or one of its aliases
|
||||
if isinstance(model_input, (LLM, Model, LLMModel)):
|
||||
return model_input
|
||||
|
||||
# Handle case where model_input is a dict
|
||||
if isinstance(model_input, dict):
|
||||
provider = model_input.get("provider", LLMProvider.OPENAI)
|
||||
if isinstance(provider, str):
|
||||
provider = LLMProvider(provider)
|
||||
return LLM(provider=provider, name=model_input.get("name"))
|
||||
|
||||
# Handle case where model_input is a string (model name)
|
||||
if isinstance(model_input, str):
|
||||
default_provider = (
|
||||
LLMProvider.ANTHROPIC if loop == AgentLoop.ANTHROPIC else LLMProvider.OPENAI
|
||||
)
|
||||
return LLM(provider=default_provider, name=model_input)
|
||||
|
||||
raise ValueError(f"Unsupported model configuration: {model_input}")
|
||||
|
||||
def _configure_logging(self, verbosity: int):
|
||||
"""Configure logging based on verbosity level."""
|
||||
# Use the logging level directly without mapping
|
||||
logger.setLevel(verbosity)
|
||||
logging.getLogger("agent").setLevel(verbosity)
|
||||
|
||||
# Log the verbosity level that was set
|
||||
if verbosity <= logging.DEBUG:
|
||||
logger.info("Agent logging set to DEBUG level (full debug information)")
|
||||
elif verbosity <= logging.INFO:
|
||||
logger.info("Agent logging set to INFO level (standard output)")
|
||||
elif verbosity <= logging.WARNING:
|
||||
logger.warning("Agent logging set to WARNING level (warnings and errors only)")
|
||||
elif verbosity <= logging.ERROR:
|
||||
logger.warning("Agent logging set to ERROR level (errors only)")
|
||||
elif verbosity <= logging.CRITICAL:
|
||||
logger.warning("Agent logging set to CRITICAL level (critical errors only)")
|
||||
|
||||
def _init_loop(self) -> Any:
|
||||
"""Initialize the loop based on the loop_type.
|
||||
|
||||
Returns:
|
||||
Initialized loop instance
|
||||
"""
|
||||
# Lazy import OmniLoop and OmniParser to avoid circular imports
|
||||
from ..providers.omni.loop import OmniLoop
|
||||
from ..providers.omni.parser import OmniParser
|
||||
|
||||
if self.loop_type == AgentLoop.ANTHROPIC:
|
||||
from ..providers.anthropic.loop import AnthropicLoop
|
||||
|
||||
# Ensure we always have a valid model name
|
||||
model_name = self.model.name or DEFAULT_MODELS[LLMProvider.ANTHROPIC]
|
||||
|
||||
return AnthropicLoop(
|
||||
api_key=self.api_key,
|
||||
model=model_name,
|
||||
computer=self.computer,
|
||||
save_trajectory=self.save_trajectory,
|
||||
base_dir=self.trajectory_dir,
|
||||
only_n_most_recent_images=self.only_n_most_recent_images,
|
||||
**self.loop_kwargs,
|
||||
)
|
||||
|
||||
# Initialize parser for OmniLoop with appropriate device
|
||||
if "parser" not in self.loop_kwargs:
|
||||
self.loop_kwargs["parser"] = OmniParser()
|
||||
|
||||
# Ensure we always have a valid model name
|
||||
model_name = self.model.name or DEFAULT_MODELS[self.model.provider]
|
||||
|
||||
return OmniLoop(
|
||||
provider=self.model.provider,
|
||||
api_key=self.api_key,
|
||||
model=model_name,
|
||||
computer=self.computer,
|
||||
save_trajectory=self.save_trajectory,
|
||||
base_dir=self.trajectory_dir,
|
||||
only_n_most_recent_images=self.only_n_most_recent_images,
|
||||
**self.loop_kwargs,
|
||||
)
|
||||
|
||||
async def _execute_task(self, task: str) -> AsyncGenerator[Dict[str, Any], None]:
|
||||
"""Execute a task using the appropriate agent loop.
|
||||
|
||||
Args:
|
||||
task: The task to execute
|
||||
|
||||
Returns:
|
||||
AsyncGenerator yielding task outputs
|
||||
"""
|
||||
logger.info(f"Executing task: {task}")
|
||||
|
||||
try:
|
||||
# Create a message from the task
|
||||
task_message = {"role": "user", "content": task}
|
||||
messages_with_task = self.messages + [task_message]
|
||||
|
||||
# Use the run method of the loop
|
||||
async for output in self.loop.run(messages_with_task):
|
||||
yield output
|
||||
except Exception as e:
|
||||
logger.error(f"Error executing task: {e}")
|
||||
raise
|
||||
finally:
|
||||
pass
|
||||
|
||||
async def _execute_action(self, action_type: str, **action_params) -> Any:
|
||||
"""Execute an action with telemetry tracking."""
|
||||
try:
|
||||
# Execute the action
|
||||
result = await super()._execute_action(action_type, **action_params)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.exception(f"Error executing action {action_type}: {e}")
|
||||
raise
|
||||
finally:
|
||||
pass
|
||||
@@ -1,164 +0,0 @@
|
||||
"""Base computer agent implementation."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, AsyncGenerator, Dict, Optional
|
||||
|
||||
from computer import Computer
|
||||
|
||||
from ..types.base import Provider
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseComputerAgent(ABC):
|
||||
"""Base class for computer agents."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
max_retries: int = 3,
|
||||
computer: Optional[Computer] = None,
|
||||
screenshot_dir: Optional[str] = None,
|
||||
log_dir: Optional[str] = None,
|
||||
**kwargs,
|
||||
):
|
||||
"""Initialize the base computer agent.
|
||||
|
||||
Args:
|
||||
max_retries: Maximum number of retry attempts
|
||||
computer: Optional Computer instance
|
||||
screenshot_dir: Directory to save screenshots
|
||||
log_dir: Directory to save logs (set to None to disable logging to files)
|
||||
**kwargs: Additional provider-specific arguments
|
||||
"""
|
||||
self.max_retries = max_retries
|
||||
self.computer = computer or Computer()
|
||||
self.queue = asyncio.Queue()
|
||||
self.screenshot_dir = screenshot_dir
|
||||
self.log_dir = log_dir
|
||||
self._retry_count = 0
|
||||
self.provider = Provider.UNKNOWN
|
||||
|
||||
# Setup logging
|
||||
if self.log_dir:
|
||||
os.makedirs(self.log_dir, exist_ok=True)
|
||||
logger.info(f"Created logs directory: {self.log_dir}")
|
||||
|
||||
# Setup screenshots directory
|
||||
if self.screenshot_dir:
|
||||
os.makedirs(self.screenshot_dir, exist_ok=True)
|
||||
logger.info(f"Created screenshots directory: {self.screenshot_dir}")
|
||||
|
||||
logger.info("BaseComputerAgent initialized")
|
||||
|
||||
async def run(self, task: str) -> AsyncGenerator[Dict[str, Any], None]:
|
||||
"""Run a task using the computer agent.
|
||||
|
||||
Args:
|
||||
task: Task description
|
||||
|
||||
Yields:
|
||||
Task execution updates
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Running task: {task}")
|
||||
|
||||
# Initialize the computer if needed
|
||||
await self._init_if_needed()
|
||||
|
||||
# Execute the task and yield results
|
||||
# The _execute_task method should be implemented to yield results
|
||||
async for result in self._execute_task(task):
|
||||
yield result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in agent run method: {str(e)}")
|
||||
yield {
|
||||
"role": "assistant",
|
||||
"content": f"Error: {str(e)}",
|
||||
"metadata": {"title": "❌ Error"},
|
||||
}
|
||||
|
||||
async def _init_if_needed(self):
|
||||
"""Initialize the computer interface if it hasn't been initialized yet."""
|
||||
if not self.computer._initialized:
|
||||
logger.info("Computer not initialized, initializing now...")
|
||||
try:
|
||||
# Call run directly without setting the flag first
|
||||
await self.computer.run()
|
||||
logger.info("Computer interface initialized successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Error initializing computer interface: {str(e)}")
|
||||
raise
|
||||
|
||||
async def __aenter__(self):
|
||||
"""Initialize the agent when used as a context manager."""
|
||||
logger.info("Entering BaseComputerAgent context")
|
||||
|
||||
# In case the computer wasn't initialized
|
||||
try:
|
||||
# Initialize the computer only if not already initialized
|
||||
logger.info("Checking if computer is already initialized...")
|
||||
if not self.computer._initialized:
|
||||
logger.info("Initializing computer in __aenter__...")
|
||||
# Use the computer's __aenter__ directly instead of calling run()
|
||||
# This avoids the circular dependency
|
||||
await self.computer.__aenter__()
|
||||
logger.info("Computer initialized in __aenter__")
|
||||
else:
|
||||
logger.info("Computer already initialized, skipping initialization")
|
||||
|
||||
# Take a test screenshot to verify the computer is working
|
||||
logger.info("Testing computer with a screenshot...")
|
||||
try:
|
||||
test_screenshot = await self.computer.interface.screenshot()
|
||||
# Determine the screenshot size based on its type
|
||||
if isinstance(test_screenshot, bytes):
|
||||
size = len(test_screenshot)
|
||||
else:
|
||||
# Assume it's an object with base64_image attribute
|
||||
try:
|
||||
size = len(test_screenshot.base64_image)
|
||||
except AttributeError:
|
||||
size = "unknown"
|
||||
logger.info(f"Screenshot test successful, size: {size}")
|
||||
except Exception as e:
|
||||
logger.error(f"Screenshot test failed: {str(e)}")
|
||||
# Even though screenshot failed, we continue since some tests might not need it
|
||||
except Exception as e:
|
||||
logger.error(f"Error initializing computer in __aenter__: {str(e)}")
|
||||
raise
|
||||
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Cleanup computer resources if needed."""
|
||||
logger.info("Cleaning up agent resources")
|
||||
|
||||
# Do any necessary cleanup
|
||||
# We're not shutting down the computer here as it might be shared
|
||||
# Just log that we're exiting
|
||||
if exc_type:
|
||||
logger.error(f"Exiting agent context with error: {exc_type.__name__}: {exc_val}")
|
||||
else:
|
||||
logger.info("Exiting agent context normally")
|
||||
|
||||
# If we have a queue, make sure to signal it's done
|
||||
if hasattr(self, "queue") and self.queue:
|
||||
await self.queue.put(None) # Signal that we're done
|
||||
|
||||
@abstractmethod
|
||||
async def _execute_task(self, task: str) -> AsyncGenerator[Dict[str, Any], None]:
|
||||
"""Execute a task. Must be implemented by subclasses.
|
||||
|
||||
This is an async method that returns an AsyncGenerator. Implementations
|
||||
should use 'yield' statements to produce results asynchronously.
|
||||
"""
|
||||
yield {
|
||||
"role": "assistant",
|
||||
"content": "Base class method called",
|
||||
"metadata": {"title": "Error"},
|
||||
}
|
||||
raise NotImplementedError("Subclasses must implement _execute_task")
|
||||
@@ -1,69 +1,251 @@
|
||||
"""Main entry point for computer agents."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, AsyncGenerator, Dict, Optional
|
||||
import os
|
||||
from typing import Any, AsyncGenerator, Dict, Optional, cast
|
||||
from dataclasses import dataclass
|
||||
|
||||
from computer import Computer
|
||||
from ..types.base import Provider
|
||||
from .factory import AgentFactory
|
||||
from ..providers.anthropic.loop import AnthropicLoop
|
||||
from ..providers.omni.loop import OmniLoop
|
||||
from ..providers.omni.parser import OmniParser
|
||||
from ..providers.omni.types import LLMProvider, LLM
|
||||
from .. import AgentLoop
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Default models for different providers
|
||||
DEFAULT_MODELS = {
|
||||
LLMProvider.OPENAI: "gpt-4o",
|
||||
LLMProvider.ANTHROPIC: "claude-3-7-sonnet-20250219",
|
||||
}
|
||||
|
||||
# Map providers to their environment variable names
|
||||
ENV_VARS = {
|
||||
LLMProvider.OPENAI: "OPENAI_API_KEY",
|
||||
LLMProvider.ANTHROPIC: "ANTHROPIC_API_KEY",
|
||||
}
|
||||
|
||||
|
||||
class ComputerAgent:
|
||||
"""A computer agent that can perform automated tasks using natural language instructions."""
|
||||
|
||||
def __init__(self, provider: Provider, computer: Optional[Computer] = None, **kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
computer: Computer,
|
||||
model: LLM,
|
||||
loop: AgentLoop,
|
||||
max_retries: int = 3,
|
||||
screenshot_dir: Optional[str] = None,
|
||||
log_dir: Optional[str] = None,
|
||||
api_key: Optional[str] = None,
|
||||
save_trajectory: bool = True,
|
||||
trajectory_dir: str = "trajectories",
|
||||
only_n_most_recent_images: Optional[int] = None,
|
||||
parser: Optional[OmniParser] = None,
|
||||
verbosity: int = logging.INFO,
|
||||
):
|
||||
"""Initialize the ComputerAgent.
|
||||
|
||||
Args:
|
||||
provider: The AI provider to use (e.g., Provider.ANTHROPIC)
|
||||
computer: Optional Computer instance. If not provided, one will be created with default settings.
|
||||
**kwargs: Additional provider-specific arguments
|
||||
computer: Computer instance. If not provided, one will be created with default settings.
|
||||
max_retries: Maximum number of retry attempts.
|
||||
screenshot_dir: Directory to save screenshots.
|
||||
log_dir: Directory to save logs (set to None to disable logging to files).
|
||||
model: LLM object containing provider and model name. Takes precedence over provider/model_name.
|
||||
provider: The AI provider to use (e.g., LLMProvider.ANTHROPIC). Only used if model is None.
|
||||
api_key: The API key for the provider. If not provided, will look for environment variable.
|
||||
model_name: The model name to use. Only used if model is None.
|
||||
save_trajectory: Whether to save the trajectory.
|
||||
trajectory_dir: Directory to save the trajectory.
|
||||
only_n_most_recent_images: Maximum number of recent screenshots to include in API requests.
|
||||
parser: Parser instance for the OmniLoop. Only used if provider is not ANTHROPIC.
|
||||
verbosity: Logging level.
|
||||
"""
|
||||
self.provider = provider
|
||||
self._computer = computer
|
||||
self._kwargs = kwargs
|
||||
self._agent = None
|
||||
# Basic agent configuration
|
||||
self.max_retries = max_retries
|
||||
self.computer = computer or Computer()
|
||||
self.queue = asyncio.Queue()
|
||||
self.screenshot_dir = screenshot_dir
|
||||
self.log_dir = log_dir
|
||||
self._retry_count = 0
|
||||
self._initialized = False
|
||||
self._in_context = False
|
||||
|
||||
# Create provider-specific agent using factory
|
||||
self._agent = AgentFactory.create(provider=provider, computer=computer, **kwargs)
|
||||
# Set logging level
|
||||
logger.setLevel(verbosity)
|
||||
|
||||
# Setup logging
|
||||
if self.log_dir:
|
||||
os.makedirs(self.log_dir, exist_ok=True)
|
||||
logger.info(f"Created logs directory: {self.log_dir}")
|
||||
|
||||
# Setup screenshots directory
|
||||
if self.screenshot_dir:
|
||||
os.makedirs(self.screenshot_dir, exist_ok=True)
|
||||
logger.info(f"Created screenshots directory: {self.screenshot_dir}")
|
||||
|
||||
# Use the provided LLM object
|
||||
self.provider = model.provider
|
||||
actual_model_name = model.name or DEFAULT_MODELS.get(self.provider, "")
|
||||
|
||||
# Ensure we have a valid model name
|
||||
if not actual_model_name:
|
||||
actual_model_name = DEFAULT_MODELS.get(self.provider, "")
|
||||
if not actual_model_name:
|
||||
raise ValueError(
|
||||
f"No model specified for provider {self.provider} and no default found"
|
||||
)
|
||||
|
||||
# Ensure computer is properly cast for typing purposes
|
||||
computer_instance = cast(Computer, self.computer)
|
||||
|
||||
# Get API key from environment if not provided
|
||||
actual_api_key = api_key or os.environ.get(ENV_VARS[self.provider], "")
|
||||
if not actual_api_key:
|
||||
raise ValueError(f"No API key provided for {self.provider}")
|
||||
|
||||
# Initialize the appropriate loop based on the loop parameter
|
||||
if loop == AgentLoop.ANTHROPIC:
|
||||
self._loop = AnthropicLoop(
|
||||
api_key=actual_api_key,
|
||||
model=actual_model_name,
|
||||
computer=computer_instance,
|
||||
save_trajectory=save_trajectory,
|
||||
base_dir=trajectory_dir,
|
||||
only_n_most_recent_images=only_n_most_recent_images,
|
||||
)
|
||||
else:
|
||||
# Default to OmniLoop for other loop types
|
||||
# Initialize parser if not provided
|
||||
actual_parser = parser or OmniParser()
|
||||
|
||||
self._loop = OmniLoop(
|
||||
provider=self.provider,
|
||||
api_key=actual_api_key,
|
||||
model=actual_model_name,
|
||||
computer=computer_instance,
|
||||
save_trajectory=save_trajectory,
|
||||
base_dir=trajectory_dir,
|
||||
only_n_most_recent_images=only_n_most_recent_images,
|
||||
parser=actual_parser,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"ComputerAgent initialized with provider: {self.provider}, model: {actual_model_name}"
|
||||
)
|
||||
|
||||
async def __aenter__(self):
|
||||
"""Enter the async context manager."""
|
||||
"""Initialize the agent when used as a context manager."""
|
||||
logger.info("Entering ComputerAgent context")
|
||||
self._in_context = True
|
||||
|
||||
# In case the computer wasn't initialized
|
||||
try:
|
||||
# Initialize the computer only if not already initialized
|
||||
logger.info("Checking if computer is already initialized...")
|
||||
if not self.computer._initialized:
|
||||
logger.info("Initializing computer in __aenter__...")
|
||||
# Use the computer's __aenter__ directly instead of calling run()
|
||||
await self.computer.__aenter__()
|
||||
logger.info("Computer initialized in __aenter__")
|
||||
else:
|
||||
logger.info("Computer already initialized, skipping initialization")
|
||||
|
||||
# Take a test screenshot to verify the computer is working
|
||||
logger.info("Testing computer with a screenshot...")
|
||||
try:
|
||||
test_screenshot = await self.computer.interface.screenshot()
|
||||
# Determine the screenshot size based on its type
|
||||
if isinstance(test_screenshot, (bytes, bytearray, memoryview)):
|
||||
size = len(test_screenshot)
|
||||
elif hasattr(test_screenshot, "base64_image"):
|
||||
size = len(test_screenshot.base64_image)
|
||||
else:
|
||||
size = "unknown"
|
||||
logger.info(f"Screenshot test successful, size: {size}")
|
||||
except Exception as e:
|
||||
logger.error(f"Screenshot test failed: {str(e)}")
|
||||
# Even though screenshot failed, we continue since some tests might not need it
|
||||
except Exception as e:
|
||||
logger.error(f"Error initializing computer in __aenter__: {str(e)}")
|
||||
raise
|
||||
|
||||
await self.initialize()
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Exit the async context manager."""
|
||||
"""Cleanup agent resources if needed."""
|
||||
logger.info("Cleaning up agent resources")
|
||||
self._in_context = False
|
||||
|
||||
# Do any necessary cleanup
|
||||
# We're not shutting down the computer here as it might be shared
|
||||
# Just log that we're exiting
|
||||
if exc_type:
|
||||
logger.error(f"Exiting agent context with error: {exc_type.__name__}: {exc_val}")
|
||||
else:
|
||||
logger.info("Exiting agent context normally")
|
||||
|
||||
# If we have a queue, make sure to signal it's done
|
||||
if hasattr(self, "queue") and self.queue:
|
||||
await self.queue.put(None) # Signal that we're done
|
||||
|
||||
async def initialize(self) -> None:
|
||||
"""Initialize the agent and its components."""
|
||||
if not self._initialized:
|
||||
if not self._in_context and self._computer:
|
||||
# If not in context manager but have a computer, initialize it
|
||||
await self._computer.run()
|
||||
# Always initialize the computer if available
|
||||
if self.computer and not self.computer._initialized:
|
||||
await self.computer.run()
|
||||
self._initialized = True
|
||||
|
||||
async def _init_if_needed(self):
|
||||
"""Initialize the computer interface if it hasn't been initialized yet."""
|
||||
if not self.computer._initialized:
|
||||
logger.info("Computer not initialized, initializing now...")
|
||||
try:
|
||||
# Call run directly
|
||||
await self.computer.run()
|
||||
logger.info("Computer interface initialized successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Error initializing computer interface: {str(e)}")
|
||||
raise
|
||||
|
||||
async def run(self, task: str) -> AsyncGenerator[Dict[str, Any], None]:
|
||||
"""Run the agent with a given task."""
|
||||
if not self._initialized:
|
||||
await self.initialize()
|
||||
"""Run a task using the computer agent.
|
||||
|
||||
if self._agent is None:
|
||||
logger.error("Agent not initialized properly")
|
||||
yield {"error": "Agent not initialized properly"}
|
||||
return
|
||||
Args:
|
||||
task: Task description
|
||||
|
||||
async for result in self._agent.run(task):
|
||||
yield result
|
||||
Yields:
|
||||
Task execution updates
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Running task: {task}")
|
||||
|
||||
@property
|
||||
def computer(self) -> Optional[Computer]:
|
||||
"""Get the underlying computer instance."""
|
||||
return self._agent.computer if self._agent else None
|
||||
# Initialize the computer if needed
|
||||
if not self._initialized:
|
||||
await self.initialize()
|
||||
|
||||
# Format task as a message
|
||||
messages = [{"role": "user", "content": task}]
|
||||
|
||||
# Pass properly formatted messages to the loop
|
||||
if self._loop is None:
|
||||
logger.error("Loop not initialized properly")
|
||||
yield {"error": "Loop not initialized properly"}
|
||||
return
|
||||
|
||||
# Execute the task and yield results
|
||||
async for result in self._loop.run(messages):
|
||||
yield result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in agent run method: {str(e)}")
|
||||
yield {
|
||||
"role": "assistant",
|
||||
"content": f"Error: {str(e)}",
|
||||
"metadata": {"title": "❌ Error"},
|
||||
}
|
||||
|
||||
@@ -84,7 +84,21 @@ class ExperimentManager:
|
||||
if isinstance(data, dict):
|
||||
result = {}
|
||||
for k, v in data.items():
|
||||
result[k] = self.sanitize_log_data(v)
|
||||
# Special handling for 'data' field in Anthropic message source
|
||||
if k == "data" and isinstance(v, str) and len(v) > 1000:
|
||||
result[k] = f"[BASE64_DATA_LENGTH_{len(v)}]"
|
||||
# Special handling for the 'media_type' key which indicates we're in an image block
|
||||
elif k == "media_type" and "image" in str(v):
|
||||
result[k] = v
|
||||
# If we're in an image block, look for a sibling 'data' field with base64 content
|
||||
if (
|
||||
"data" in result
|
||||
and isinstance(result["data"], str)
|
||||
and len(result["data"]) > 1000
|
||||
):
|
||||
result["data"] = f"[BASE64_DATA_LENGTH_{len(result['data'])}]"
|
||||
else:
|
||||
result[k] = self.sanitize_log_data(v)
|
||||
return result
|
||||
elif isinstance(data, list):
|
||||
return [self.sanitize_log_data(item) for item in data]
|
||||
@@ -93,15 +107,18 @@ class ExperimentManager:
|
||||
else:
|
||||
return data
|
||||
|
||||
def save_screenshot(self, img_base64: str, action_type: str = "") -> None:
|
||||
def save_screenshot(self, img_base64: str, action_type: str = "") -> Optional[str]:
|
||||
"""Save a screenshot to the experiment directory.
|
||||
|
||||
Args:
|
||||
img_base64: Base64 encoded screenshot
|
||||
action_type: Type of action that triggered the screenshot
|
||||
|
||||
Returns:
|
||||
Path to the saved screenshot or None if there was an error
|
||||
"""
|
||||
if not self.current_turn_dir:
|
||||
return
|
||||
return None
|
||||
|
||||
try:
|
||||
# Increment screenshot counter
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
"""Factory for creating provider-specific agents."""
|
||||
|
||||
from typing import Optional, Dict, Any, List
|
||||
|
||||
from computer import Computer
|
||||
from ..types.base import Provider
|
||||
from .base_agent import BaseComputerAgent
|
||||
|
||||
# Import provider-specific implementations
|
||||
_ANTHROPIC_AVAILABLE = False
|
||||
_OPENAI_AVAILABLE = False
|
||||
_OLLAMA_AVAILABLE = False
|
||||
_OMNI_AVAILABLE = False
|
||||
|
||||
# Try importing providers
|
||||
try:
|
||||
import anthropic
|
||||
from ..providers.anthropic.agent import AnthropicComputerAgent
|
||||
|
||||
_ANTHROPIC_AVAILABLE = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import openai
|
||||
|
||||
_OPENAI_AVAILABLE = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
from ..providers.omni.agent import OmniComputerAgent
|
||||
|
||||
_OMNI_AVAILABLE = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class AgentFactory:
|
||||
"""Factory for creating provider-specific agent implementations."""
|
||||
|
||||
@staticmethod
|
||||
def create(
|
||||
provider: Provider, computer: Optional[Computer] = None, **kwargs: Any
|
||||
) -> BaseComputerAgent:
|
||||
"""Create an agent based on the specified provider.
|
||||
|
||||
Args:
|
||||
provider: The AI provider to use
|
||||
computer: Optional Computer instance
|
||||
**kwargs: Additional provider-specific arguments
|
||||
|
||||
Returns:
|
||||
A provider-specific agent implementation
|
||||
|
||||
Raises:
|
||||
ImportError: If provider dependencies are not installed
|
||||
ValueError: If provider is not supported
|
||||
"""
|
||||
# Create a Computer instance if none is provided
|
||||
if computer is None:
|
||||
computer = Computer()
|
||||
|
||||
if provider == Provider.ANTHROPIC:
|
||||
if not _ANTHROPIC_AVAILABLE:
|
||||
raise ImportError(
|
||||
"Anthropic provider requires additional dependencies. "
|
||||
"Install them with: pip install cua-agent[anthropic]"
|
||||
)
|
||||
return AnthropicComputerAgent(max_retries=3, computer=computer, **kwargs)
|
||||
elif provider == Provider.OPENAI:
|
||||
if not _OPENAI_AVAILABLE:
|
||||
raise ImportError(
|
||||
"OpenAI provider requires additional dependencies. "
|
||||
"Install them with: pip install cua-agent[openai]"
|
||||
)
|
||||
raise NotImplementedError("OpenAI provider not yet implemented")
|
||||
elif provider == Provider.OLLAMA:
|
||||
if not _OLLAMA_AVAILABLE:
|
||||
raise ImportError(
|
||||
"Ollama provider requires additional dependencies. "
|
||||
"Install them with: pip install cua-agent[ollama]"
|
||||
)
|
||||
# Only import ollama when actually creating an Ollama agent
|
||||
try:
|
||||
import ollama
|
||||
from ..providers.ollama.agent import OllamaComputerAgent
|
||||
|
||||
return OllamaComputerAgent(max_retries=3, computer=computer, **kwargs)
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Failed to import ollama package. " "Install it with: pip install ollama"
|
||||
)
|
||||
elif provider == Provider.OMNI:
|
||||
if not _OMNI_AVAILABLE:
|
||||
raise ImportError(
|
||||
"Omni provider requires additional dependencies. "
|
||||
"Install them with: pip install cua-agent[omni]"
|
||||
)
|
||||
return OmniComputerAgent(max_retries=3, computer=computer, **kwargs)
|
||||
else:
|
||||
raise ValueError(f"Unsupported provider: {provider}")
|
||||
@@ -141,9 +141,6 @@ class BaseLoop(ABC):
|
||||
# Initialize API client
|
||||
await self.initialize_client()
|
||||
|
||||
# Initialize computer
|
||||
await self.computer.initialize()
|
||||
|
||||
logger.info("Initialization complete.")
|
||||
return
|
||||
except Exception as e:
|
||||
@@ -173,15 +170,22 @@ class BaseLoop(ABC):
|
||||
base64_image = ""
|
||||
|
||||
# Handle different types of screenshot returns
|
||||
if isinstance(screenshot, bytes):
|
||||
if isinstance(screenshot, (bytes, bytearray, memoryview)):
|
||||
# Raw bytes screenshot
|
||||
base64_image = base64.b64encode(screenshot).decode("utf-8")
|
||||
elif hasattr(screenshot, "base64_image"):
|
||||
# Object-style screenshot with attributes
|
||||
base64_image = screenshot.base64_image
|
||||
if hasattr(screenshot, "width") and hasattr(screenshot, "height"):
|
||||
width = screenshot.width
|
||||
height = screenshot.height
|
||||
# Type checking can't infer these attributes, but they exist at runtime
|
||||
# on certain screenshot return types
|
||||
base64_image = getattr(screenshot, "base64_image")
|
||||
width = (
|
||||
getattr(screenshot, "width", width) if hasattr(screenshot, "width") else width
|
||||
)
|
||||
height = (
|
||||
getattr(screenshot, "height", height)
|
||||
if hasattr(screenshot, "height")
|
||||
else height
|
||||
)
|
||||
|
||||
# Create parsed screen data
|
||||
parsed_screen = {
|
||||
|
||||
@@ -4,39 +4,11 @@ import logging
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
from typing import Dict, Any
|
||||
from typing import Dict, Any, Callable
|
||||
|
||||
# Import the core telemetry module
|
||||
TELEMETRY_AVAILABLE = False
|
||||
|
||||
try:
|
||||
from core.telemetry import (
|
||||
record_event,
|
||||
increment,
|
||||
get_telemetry_client,
|
||||
flush,
|
||||
is_telemetry_enabled,
|
||||
is_telemetry_globally_disabled,
|
||||
)
|
||||
|
||||
def increment_counter(counter_name: str, value: int = 1) -> None:
|
||||
"""Wrapper for increment to maintain backward compatibility."""
|
||||
if is_telemetry_enabled():
|
||||
increment(counter_name, value)
|
||||
|
||||
def set_dimension(name: str, value: Any) -> None:
|
||||
"""Set a dimension that will be attached to all events."""
|
||||
logger = logging.getLogger("cua.agent.telemetry")
|
||||
logger.debug(f"Setting dimension {name}={value}")
|
||||
|
||||
TELEMETRY_AVAILABLE = True
|
||||
logger = logging.getLogger("cua.agent.telemetry")
|
||||
logger.info("Successfully imported telemetry")
|
||||
except ImportError as e:
|
||||
logger = logging.getLogger("cua.agent.telemetry")
|
||||
logger.warning(f"Could not import telemetry: {e}")
|
||||
TELEMETRY_AVAILABLE = False
|
||||
|
||||
|
||||
# Local fallbacks in case core telemetry isn't available
|
||||
def _noop(*args: Any, **kwargs: Any) -> None:
|
||||
@@ -44,18 +16,58 @@ def _noop(*args: Any, **kwargs: Any) -> None:
|
||||
pass
|
||||
|
||||
|
||||
# Define default functions with unique names to avoid shadowing
|
||||
_default_record_event = _noop
|
||||
_default_increment_counter = _noop
|
||||
_default_set_dimension = _noop
|
||||
_default_get_telemetry_client = lambda: None
|
||||
_default_flush = _noop
|
||||
_default_is_telemetry_enabled = lambda: False
|
||||
_default_is_telemetry_globally_disabled = lambda: True
|
||||
|
||||
# Set the actual functions to the defaults initially
|
||||
record_event = _default_record_event
|
||||
increment_counter = _default_increment_counter
|
||||
set_dimension = _default_set_dimension
|
||||
get_telemetry_client = _default_get_telemetry_client
|
||||
flush = _default_flush
|
||||
is_telemetry_enabled = _default_is_telemetry_enabled
|
||||
is_telemetry_globally_disabled = _default_is_telemetry_globally_disabled
|
||||
|
||||
logger = logging.getLogger("cua.agent.telemetry")
|
||||
|
||||
# If telemetry isn't available, use no-op functions
|
||||
if not TELEMETRY_AVAILABLE:
|
||||
try:
|
||||
# Import from core telemetry
|
||||
from core.telemetry import (
|
||||
record_event as core_record_event,
|
||||
increment as core_increment,
|
||||
get_telemetry_client as core_get_telemetry_client,
|
||||
flush as core_flush,
|
||||
is_telemetry_enabled as core_is_telemetry_enabled,
|
||||
is_telemetry_globally_disabled as core_is_telemetry_globally_disabled,
|
||||
)
|
||||
|
||||
# Override the default functions with actual implementations
|
||||
record_event = core_record_event
|
||||
get_telemetry_client = core_get_telemetry_client
|
||||
flush = core_flush
|
||||
is_telemetry_enabled = core_is_telemetry_enabled
|
||||
is_telemetry_globally_disabled = core_is_telemetry_globally_disabled
|
||||
|
||||
def increment_counter(counter_name: str, value: int = 1) -> None:
|
||||
"""Wrapper for increment to maintain backward compatibility."""
|
||||
if is_telemetry_enabled():
|
||||
core_increment(counter_name, value)
|
||||
|
||||
def set_dimension(name: str, value: Any) -> None:
|
||||
"""Set a dimension that will be attached to all events."""
|
||||
logger.debug(f"Setting dimension {name}={value}")
|
||||
|
||||
TELEMETRY_AVAILABLE = True
|
||||
logger.info("Successfully imported telemetry")
|
||||
except ImportError as e:
|
||||
logger.warning(f"Could not import telemetry: {e}")
|
||||
logger.debug("Telemetry not available, using no-op functions")
|
||||
record_event = _noop # type: ignore
|
||||
increment_counter = _noop # type: ignore
|
||||
set_dimension = _noop # type: ignore
|
||||
get_telemetry_client = lambda: None # type: ignore
|
||||
flush = _noop # type: ignore
|
||||
is_telemetry_enabled = lambda: False # type: ignore
|
||||
is_telemetry_globally_disabled = lambda: True # type: ignore
|
||||
|
||||
# Get system info once to use in telemetry
|
||||
SYSTEM_INFO = {
|
||||
@@ -71,7 +83,7 @@ def enable_telemetry() -> bool:
|
||||
Returns:
|
||||
bool: True if telemetry was successfully enabled, False otherwise
|
||||
"""
|
||||
global TELEMETRY_AVAILABLE
|
||||
global TELEMETRY_AVAILABLE, record_event, increment_counter, get_telemetry_client, flush, is_telemetry_enabled, is_telemetry_globally_disabled
|
||||
|
||||
# Check if globally disabled using core function
|
||||
if TELEMETRY_AVAILABLE and is_telemetry_globally_disabled():
|
||||
|
||||
@@ -17,6 +17,7 @@ from anthropic.types.beta import (
|
||||
BetaTextBlock,
|
||||
BetaTextBlockParam,
|
||||
BetaToolUseBlockParam,
|
||||
BetaContentBlockParam,
|
||||
)
|
||||
|
||||
# Computer
|
||||
@@ -24,12 +25,12 @@ from computer import Computer
|
||||
|
||||
# Base imports
|
||||
from ...core.loop import BaseLoop
|
||||
from ...core.messages import ImageRetentionConfig
|
||||
from ...core.messages import ImageRetentionConfig as CoreImageRetentionConfig
|
||||
|
||||
# Anthropic provider-specific imports
|
||||
from .api.client import AnthropicClientFactory, BaseAnthropicClient
|
||||
from .tools.manager import ToolManager
|
||||
from .messages.manager import MessageManager
|
||||
from .messages.manager import MessageManager, ImageRetentionConfig
|
||||
from .callbacks.manager import CallbackManager
|
||||
from .prompts import SYSTEM_PROMPT
|
||||
from .types import LLMProvider
|
||||
@@ -48,8 +49,8 @@ class AnthropicLoop(BaseLoop):
|
||||
def __init__(
|
||||
self,
|
||||
api_key: str,
|
||||
computer: Computer,
|
||||
model: str = "claude-3-7-sonnet-20250219", # Fixed model
|
||||
computer: Optional[Computer] = None,
|
||||
only_n_most_recent_images: Optional[int] = 2,
|
||||
base_dir: Optional[str] = "trajectories",
|
||||
max_retries: int = 3,
|
||||
@@ -69,7 +70,7 @@ class AnthropicLoop(BaseLoop):
|
||||
retry_delay: Delay between retries in seconds
|
||||
save_trajectory: Whether to save trajectory data
|
||||
"""
|
||||
# Initialize base class
|
||||
# Initialize base class with core config
|
||||
super().__init__(
|
||||
computer=computer,
|
||||
model=model,
|
||||
@@ -93,8 +94,8 @@ class AnthropicLoop(BaseLoop):
|
||||
self.message_manager = None
|
||||
self.callback_manager = None
|
||||
|
||||
# Configure image retention
|
||||
self.image_retention_config = ImageRetentionConfig(
|
||||
# Configure image retention with core config
|
||||
self.image_retention_config = CoreImageRetentionConfig(
|
||||
num_images_to_keep=only_n_most_recent_images
|
||||
)
|
||||
|
||||
@@ -113,7 +114,7 @@ class AnthropicLoop(BaseLoop):
|
||||
|
||||
# Initialize message manager
|
||||
self.message_manager = MessageManager(
|
||||
ImageRetentionConfig(
|
||||
image_retention_config=ImageRetentionConfig(
|
||||
num_images_to_keep=self.only_n_most_recent_images, enable_caching=True
|
||||
)
|
||||
)
|
||||
@@ -250,6 +251,10 @@ class AnthropicLoop(BaseLoop):
|
||||
await self._process_screen(parsed_screen, self.message_history)
|
||||
|
||||
# Prepare messages and make API call
|
||||
if self.message_manager is None:
|
||||
raise RuntimeError(
|
||||
"Message manager not initialized. Call initialize_client() first."
|
||||
)
|
||||
prepared_messages = self.message_manager.prepare_messages(
|
||||
cast(List[BetaMessageParam], self.message_history.copy())
|
||||
)
|
||||
@@ -257,7 +262,7 @@ class AnthropicLoop(BaseLoop):
|
||||
# Create new turn directory for this API call
|
||||
self._create_turn_dir()
|
||||
|
||||
# Make API call
|
||||
# Use _make_api_call instead of direct client call to ensure logging
|
||||
response = await self._make_api_call(prepared_messages)
|
||||
|
||||
# Handle the response
|
||||
@@ -287,6 +292,11 @@ class AnthropicLoop(BaseLoop):
|
||||
Returns:
|
||||
API response
|
||||
"""
|
||||
if self.client is None:
|
||||
raise RuntimeError("Client not initialized. Call initialize_client() first.")
|
||||
if self.tool_manager is None:
|
||||
raise RuntimeError("Tool manager not initialized. Call initialize_client() first.")
|
||||
|
||||
last_error = None
|
||||
|
||||
for attempt in range(self.max_retries):
|
||||
@@ -297,6 +307,7 @@ class AnthropicLoop(BaseLoop):
|
||||
"max_tokens": self.max_tokens,
|
||||
"system": SYSTEM_PROMPT,
|
||||
}
|
||||
# Let ExperimentManager handle sanitization
|
||||
self._log_api_call("request", request_data)
|
||||
|
||||
# Setup betas and system
|
||||
@@ -320,7 +331,7 @@ class AnthropicLoop(BaseLoop):
|
||||
betas=betas,
|
||||
)
|
||||
|
||||
# Log success response
|
||||
# Let ExperimentManager handle sanitization
|
||||
self._log_api_call("response", request_data, response)
|
||||
|
||||
return response
|
||||
@@ -365,25 +376,38 @@ class AnthropicLoop(BaseLoop):
|
||||
}
|
||||
)
|
||||
|
||||
if self.callback_manager is None:
|
||||
raise RuntimeError(
|
||||
"Callback manager not initialized. Call initialize_client() first."
|
||||
)
|
||||
|
||||
# Handle tool use blocks and collect results
|
||||
tool_result_content = []
|
||||
for content_block in response_params:
|
||||
# Notify callback of content
|
||||
self.callback_manager.on_content(content_block)
|
||||
self.callback_manager.on_content(cast(BetaContentBlockParam, content_block))
|
||||
|
||||
# Handle tool use
|
||||
if content_block.get("type") == "tool_use":
|
||||
if self.tool_manager is None:
|
||||
raise RuntimeError(
|
||||
"Tool manager not initialized. Call initialize_client() first."
|
||||
)
|
||||
result = await self.tool_manager.execute_tool(
|
||||
name=content_block["name"],
|
||||
tool_input=cast(Dict[str, Any], content_block["input"]),
|
||||
)
|
||||
|
||||
# Create tool result and add to content
|
||||
tool_result = self._make_tool_result(result, content_block["id"])
|
||||
tool_result = self._make_tool_result(
|
||||
cast(ToolResult, result), content_block["id"]
|
||||
)
|
||||
tool_result_content.append(tool_result)
|
||||
|
||||
# Notify callback of tool result
|
||||
self.callback_manager.on_tool_result(result, content_block["id"])
|
||||
self.callback_manager.on_tool_result(
|
||||
cast(ToolResult, result), content_block["id"]
|
||||
)
|
||||
|
||||
# If no tool results, we're done
|
||||
if not tool_result_content:
|
||||
@@ -495,13 +519,13 @@ class AnthropicLoop(BaseLoop):
|
||||
result_text = f"<s>{result.system}</s>\n{result_text}"
|
||||
return result_text
|
||||
|
||||
def _handle_content(self, content: Dict[str, Any]) -> None:
|
||||
def _handle_content(self, content: BetaContentBlockParam) -> None:
|
||||
"""Handle content updates from the assistant."""
|
||||
if content.get("type") == "text":
|
||||
text = content.get("text", "")
|
||||
text_content = cast(BetaTextBlockParam, content)
|
||||
text = text_content["text"]
|
||||
if text == "<DONE>":
|
||||
return
|
||||
|
||||
logger.info(f"Assistant: {text}")
|
||||
|
||||
def _handle_tool_result(self, result: ToolResult, tool_id: str) -> None:
|
||||
@@ -517,5 +541,10 @@ class AnthropicLoop(BaseLoop):
|
||||
"""Handle API interactions."""
|
||||
if error:
|
||||
logger.error(f"API error: {error}")
|
||||
self._log_api_call("error", request, error=error)
|
||||
else:
|
||||
logger.debug(f"API request: {request}")
|
||||
if response:
|
||||
self._log_api_call("response", request, response)
|
||||
else:
|
||||
self._log_api_call("request", request)
|
||||
|
||||
@@ -90,7 +90,9 @@ class MessageManager:
|
||||
blocks_with_cache_control += 1
|
||||
# Add cache control to the last content block only
|
||||
if content and len(content) > 0:
|
||||
content[-1]["cache_control"] = {"type": "ephemeral"}
|
||||
content[-1]["cache_control"] = BetaCacheControlEphemeralParam(
|
||||
type="ephemeral"
|
||||
)
|
||||
else:
|
||||
# Remove any existing cache control
|
||||
if content and len(content) > 0:
|
||||
|
||||
@@ -6,7 +6,7 @@ from typing import Any, Dict
|
||||
|
||||
from anthropic.types.beta import BetaToolUnionParam
|
||||
|
||||
from ....core.tools.base import BaseTool, ToolError, ToolResult, ToolFailure, CLIResult
|
||||
from ....core.tools.base import BaseTool
|
||||
|
||||
|
||||
class BaseAnthropicTool(BaseTool, metaclass=ABCMeta):
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Collection classes for managing multiple tools."""
|
||||
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from anthropic.types.beta import BetaToolUnionParam
|
||||
|
||||
@@ -22,7 +22,7 @@ class ToolCollection:
|
||||
def to_params(
|
||||
self,
|
||||
) -> list[BetaToolUnionParam]:
|
||||
return [tool.to_params() for tool in self.tools]
|
||||
return cast(list[BetaToolUnionParam], [tool.to_params() for tool in self.tools])
|
||||
|
||||
async def run(self, *, name: str, tool_input: dict[str, Any]) -> ToolResult:
|
||||
tool = self.tool_map.get(name)
|
||||
|
||||
@@ -61,9 +61,9 @@ class ComputerTool(BaseComputerTool, BaseAnthropicTool):
|
||||
|
||||
name: Literal["computer"] = "computer"
|
||||
api_type: Literal["computer_20250124"] = "computer_20250124"
|
||||
width: int | None
|
||||
height: int | None
|
||||
display_num: int | None
|
||||
width: int | None = None
|
||||
height: int | None = None
|
||||
display_num: int | None = None
|
||||
computer: Computer # The CUA Computer instance
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -106,6 +106,7 @@ class ComputerTool(BaseComputerTool, BaseAnthropicTool):
|
||||
display_size = await self.computer.interface.get_screen_size()
|
||||
self.width = display_size["width"]
|
||||
self.height = display_size["height"]
|
||||
assert isinstance(self.width, int) and isinstance(self.height, int)
|
||||
self.logger.info(f"Initialized screen dimensions to {self.width}x{self.height}")
|
||||
|
||||
async def __call__(
|
||||
@@ -120,6 +121,8 @@ class ComputerTool(BaseComputerTool, BaseAnthropicTool):
|
||||
# Ensure dimensions are initialized
|
||||
if self.width is None or self.height is None:
|
||||
await self.initialize_dimensions()
|
||||
if self.width is None or self.height is None:
|
||||
raise ToolError("Failed to initialize screen dimensions")
|
||||
except Exception as e:
|
||||
raise ToolError(f"Failed to initialize dimensions: {e}")
|
||||
|
||||
@@ -147,7 +150,10 @@ class ComputerTool(BaseComputerTool, BaseAnthropicTool):
|
||||
self.logger.info(
|
||||
f"Scaling image from {pre_img.size} to {self.width}x{self.height} to match screen dimensions"
|
||||
)
|
||||
pre_img = pre_img.resize((self.width, self.height), Image.Resampling.LANCZOS)
|
||||
if not isinstance(self.width, int) or not isinstance(self.height, int):
|
||||
raise ToolError("Screen dimensions must be integers")
|
||||
size = (int(self.width), int(self.height))
|
||||
pre_img = pre_img.resize(size, Image.Resampling.LANCZOS)
|
||||
|
||||
self.logger.info(f" Current dimensions: {pre_img.width}x{pre_img.height}")
|
||||
|
||||
@@ -160,15 +166,7 @@ class ComputerTool(BaseComputerTool, BaseAnthropicTool):
|
||||
await self.computer.interface.move_cursor(x, y)
|
||||
# Then perform drag operation - check if drag_to exists or we need to use other methods
|
||||
try:
|
||||
if hasattr(self.computer.interface, "drag_to"):
|
||||
await self.computer.interface.drag_to(x, y)
|
||||
else:
|
||||
# Alternative approach: press mouse down, move, release
|
||||
await self.computer.interface.mouse_down()
|
||||
await asyncio.sleep(0.2)
|
||||
await self.computer.interface.move_cursor(x, y)
|
||||
await asyncio.sleep(0.2)
|
||||
await self.computer.interface.mouse_up()
|
||||
await self.computer.interface.drag_to(x, y)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error during drag operation: {str(e)}")
|
||||
raise ToolError(f"Failed to perform drag: {str(e)}")
|
||||
@@ -214,9 +212,10 @@ class ComputerTool(BaseComputerTool, BaseAnthropicTool):
|
||||
self.logger.info(
|
||||
f"Scaling image from {pre_img.size} to {self.width}x{self.height} to match screen dimensions"
|
||||
)
|
||||
pre_img = pre_img.resize(
|
||||
(self.width, self.height), Image.Resampling.LANCZOS
|
||||
)
|
||||
if not isinstance(self.width, int) or not isinstance(self.height, int):
|
||||
raise ToolError("Screen dimensions must be integers")
|
||||
size = (int(self.width), int(self.height))
|
||||
pre_img = pre_img.resize(size, Image.Resampling.LANCZOS)
|
||||
# Save the scaled image back to bytes
|
||||
buffer = io.BytesIO()
|
||||
pre_img.save(buffer, format="PNG")
|
||||
@@ -275,9 +274,10 @@ class ComputerTool(BaseComputerTool, BaseAnthropicTool):
|
||||
self.logger.info(
|
||||
f"Scaling image from {pre_img.size} to {self.width}x{self.height}"
|
||||
)
|
||||
pre_img = pre_img.resize(
|
||||
(self.width, self.height), Image.Resampling.LANCZOS
|
||||
)
|
||||
if not isinstance(self.width, int) or not isinstance(self.height, int):
|
||||
raise ToolError("Screen dimensions must be integers")
|
||||
size = (int(self.width), int(self.height))
|
||||
pre_img = pre_img.resize(size, Image.Resampling.LANCZOS)
|
||||
|
||||
# Perform the click action
|
||||
if action == "left_click":
|
||||
@@ -335,7 +335,10 @@ class ComputerTool(BaseComputerTool, BaseAnthropicTool):
|
||||
self.logger.info(
|
||||
f"Scaling image from {pre_img.size} to {self.width}x{self.height}"
|
||||
)
|
||||
pre_img = pre_img.resize((self.width, self.height), Image.Resampling.LANCZOS)
|
||||
if not isinstance(self.width, int) or not isinstance(self.height, int):
|
||||
raise ToolError("Screen dimensions must be integers")
|
||||
size = (int(self.width), int(self.height))
|
||||
pre_img = pre_img.resize(size, Image.Resampling.LANCZOS)
|
||||
|
||||
if action == "key":
|
||||
# Special handling for page up/down on macOS
|
||||
@@ -365,7 +368,7 @@ class ComputerTool(BaseComputerTool, BaseAnthropicTool):
|
||||
# Handle single key press
|
||||
self.logger.info(f"Pressing key: {text}")
|
||||
try:
|
||||
await self.computer.interface.press(text)
|
||||
await self.computer.interface.press_key(text)
|
||||
output_text = text
|
||||
except ValueError as e:
|
||||
raise ToolError(f"Invalid key: {text}. {str(e)}")
|
||||
@@ -442,7 +445,10 @@ class ComputerTool(BaseComputerTool, BaseAnthropicTool):
|
||||
self.logger.info(
|
||||
f"Scaling image from {img.size} to {self.width}x{self.height}"
|
||||
)
|
||||
img = img.resize((self.width, self.height), Image.Resampling.LANCZOS)
|
||||
if not isinstance(self.width, int) or not isinstance(self.height, int):
|
||||
raise ToolError("Screen dimensions must be integers")
|
||||
size = (int(self.width), int(self.height))
|
||||
img = img.resize(size, Image.Resampling.LANCZOS)
|
||||
buffer = io.BytesIO()
|
||||
img.save(buffer, format="PNG")
|
||||
screenshot = buffer.getvalue()
|
||||
@@ -451,7 +457,8 @@ class ComputerTool(BaseComputerTool, BaseAnthropicTool):
|
||||
|
||||
elif action == "cursor_position":
|
||||
pos = await self.computer.interface.get_cursor_position()
|
||||
return ToolResult(output=f"X={int(pos[0])},Y={int(pos[1])}")
|
||||
x, y = pos # Unpack the tuple
|
||||
return ToolResult(output=f"X={int(x)},Y={int(y)}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error during {action} action: {str(e)}")
|
||||
@@ -517,7 +524,10 @@ class ComputerTool(BaseComputerTool, BaseAnthropicTool):
|
||||
# Scale image if needed
|
||||
if img.size != (self.width, self.height):
|
||||
self.logger.info(f"Scaling image from {img.size} to {self.width}x{self.height}")
|
||||
img = img.resize((self.width, self.height), Image.Resampling.LANCZOS)
|
||||
if not isinstance(self.width, int) or not isinstance(self.height, int):
|
||||
raise ToolError("Screen dimensions must be integers")
|
||||
size = (int(self.width), int(self.height))
|
||||
img = img.resize(size, Image.Resampling.LANCZOS)
|
||||
buffer = io.BytesIO()
|
||||
img.save(buffer, format="PNG")
|
||||
screenshot = buffer.getvalue()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Any, Dict, List
|
||||
from typing import Any, Dict, List, cast
|
||||
from anthropic.types.beta import BetaToolUnionParam
|
||||
from computer.computer import Computer
|
||||
|
||||
@@ -37,7 +37,7 @@ class ToolManager(BaseToolManager):
|
||||
"""Get tool parameters for Anthropic API calls."""
|
||||
if self.tools is None:
|
||||
raise RuntimeError("Tools not initialized. Call initialize() first.")
|
||||
return self.tools.to_params()
|
||||
return cast(List[BetaToolUnionParam], self.tools.to_params())
|
||||
|
||||
async def execute_tool(self, name: str, tool_input: dict[str, Any]) -> ToolResult:
|
||||
"""Execute a tool with the given input.
|
||||
|
||||
@@ -126,15 +126,18 @@ class ExperimentManager:
|
||||
# Since we no longer want to use the images/ folder, we'll skip this functionality
|
||||
return
|
||||
|
||||
def save_screenshot(self, img_base64: str, action_type: str = "") -> None:
|
||||
def save_screenshot(self, img_base64: str, action_type: str = "") -> Optional[str]:
|
||||
"""Save a screenshot to the experiment directory.
|
||||
|
||||
Args:
|
||||
img_base64: Base64 encoded screenshot
|
||||
action_type: Type of action that triggered the screenshot
|
||||
|
||||
Returns:
|
||||
Optional[str]: Path to the saved screenshot, or None if saving failed
|
||||
"""
|
||||
if not self.current_turn_dir:
|
||||
return
|
||||
return None
|
||||
|
||||
try:
|
||||
# Increment screenshot counter
|
||||
|
||||
@@ -13,6 +13,7 @@ import asyncio
|
||||
from httpx import ConnectError, ReadTimeout
|
||||
import shutil
|
||||
import copy
|
||||
from typing import cast
|
||||
|
||||
from .parser import OmniParser, ParseResult, ParserMetadata, UIElement
|
||||
from ...core.loop import BaseLoop
|
||||
@@ -182,8 +183,6 @@ class OmniLoop(BaseLoop):
|
||||
|
||||
if self.provider == LLMProvider.OPENAI:
|
||||
self.client = OpenAIClient(api_key=self.api_key, model=self.model)
|
||||
elif self.provider == LLMProvider.GROQ:
|
||||
self.client = GroqClient(api_key=self.api_key, model=self.model)
|
||||
elif self.provider == LLMProvider.ANTHROPIC:
|
||||
self.client = AnthropicClient(
|
||||
api_key=self.api_key,
|
||||
@@ -329,10 +328,15 @@ class OmniLoop(BaseLoop):
|
||||
raise RuntimeError(error_message)
|
||||
|
||||
async def _handle_response(
|
||||
self, response: Any, messages: List[Dict[str, Any]], parsed_screen: Dict[str, Any]
|
||||
self, response: Any, messages: List[Dict[str, Any]], parsed_screen: ParseResult
|
||||
) -> Tuple[bool, bool]:
|
||||
"""Handle API response.
|
||||
|
||||
Args:
|
||||
response: API response
|
||||
messages: List of messages to update
|
||||
parsed_screen: Current parsed screen information
|
||||
|
||||
Returns:
|
||||
Tuple of (should_continue, action_screenshot_saved)
|
||||
"""
|
||||
@@ -394,7 +398,9 @@ class OmniLoop(BaseLoop):
|
||||
|
||||
try:
|
||||
# Execute action with current parsed screen info
|
||||
await self._execute_action(parsed_content, parsed_screen)
|
||||
await self._execute_action(
|
||||
parsed_content, cast(ParseResult, parsed_screen)
|
||||
)
|
||||
action_screenshot_saved = True
|
||||
except Exception as e:
|
||||
logger.error(f"Error executing action: {str(e)}")
|
||||
@@ -463,7 +469,7 @@ class OmniLoop(BaseLoop):
|
||||
|
||||
try:
|
||||
# Execute action with current parsed screen info
|
||||
await self._execute_action(parsed_content, parsed_screen)
|
||||
await self._execute_action(parsed_content, cast(ParseResult, parsed_screen))
|
||||
action_screenshot_saved = True
|
||||
except Exception as e:
|
||||
logger.error(f"Error executing action: {str(e)}")
|
||||
@@ -488,7 +494,7 @@ class OmniLoop(BaseLoop):
|
||||
|
||||
try:
|
||||
# Execute action with current parsed screen info
|
||||
await self._execute_action(content, parsed_screen)
|
||||
await self._execute_action(content, cast(ParseResult, parsed_screen))
|
||||
action_screenshot_saved = True
|
||||
except Exception as e:
|
||||
logger.error(f"Error executing action: {str(e)}")
|
||||
|
||||
@@ -122,8 +122,9 @@ class OmniParser:
|
||||
# Create a minimal valid result for error cases
|
||||
return ParseResult(
|
||||
elements=[],
|
||||
screen_info=None,
|
||||
annotated_image_base64="",
|
||||
parsed_content_list=[f"Error: {str(e)}"],
|
||||
parsed_content_list=[{"error": str(e)}],
|
||||
metadata=ParserMetadata(
|
||||
image_size=(0, 0),
|
||||
num_icons=0,
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from .bash import OmniBashTool
|
||||
from .computer import OmniComputerTool
|
||||
from .edit import OmniEditTool
|
||||
from .manager import OmniToolManager
|
||||
|
||||
__all__ = [
|
||||
|
||||
@@ -177,7 +177,7 @@ class OmniComputerTool(BaseComputerTool):
|
||||
keys = text.split("+")
|
||||
await self.computer.interface.hotkey(*keys)
|
||||
else:
|
||||
await self.computer.interface.press(text)
|
||||
await self.computer.interface.press_key(text)
|
||||
|
||||
# Take screenshot after action
|
||||
screenshot = await self.computer.interface.screenshot()
|
||||
@@ -188,7 +188,8 @@ class OmniComputerTool(BaseComputerTool):
|
||||
)
|
||||
elif action == "cursor_position":
|
||||
pos = await self.computer.interface.get_cursor_position()
|
||||
return ToolResult(output=f"X={int(pos[0])},Y={int(pos[1])}")
|
||||
x, y = pos
|
||||
return ToolResult(output=f"X={int(x)},Y={int(y)}")
|
||||
elif action == "scroll":
|
||||
if direction == "down":
|
||||
self.logger.info(f"Scrolling down, amount: {amount}")
|
||||
|
||||
@@ -10,7 +10,6 @@ from ....core.tools.collection import ToolCollection
|
||||
|
||||
from .bash import OmniBashTool
|
||||
from .computer import OmniComputerTool
|
||||
from .edit import OmniEditTool
|
||||
|
||||
|
||||
class ProviderType(Enum):
|
||||
@@ -35,11 +34,10 @@ class OmniToolManager(BaseToolManager):
|
||||
# Initialize tools
|
||||
self.computer_tool = OmniComputerTool(self.computer)
|
||||
self.bash_tool = OmniBashTool(self.computer)
|
||||
self.edit_tool = OmniEditTool(self.computer)
|
||||
|
||||
def _initialize_tools(self) -> ToolCollection:
|
||||
"""Initialize all available tools."""
|
||||
return ToolCollection(self.computer_tool, self.bash_tool, self.edit_tool)
|
||||
return ToolCollection(self.computer_tool, self.bash_tool)
|
||||
|
||||
async def _initialize_tools_specific(self) -> None:
|
||||
"""Initialize provider-specific tool requirements."""
|
||||
|
||||
@@ -96,7 +96,7 @@ def compress_image_base64(
|
||||
# Resize image
|
||||
new_width = int(img.width * scale_factor)
|
||||
new_height = int(img.height * scale_factor)
|
||||
current_img = img.resize((new_width, new_height), Image.LANCZOS)
|
||||
current_img = img.resize((new_width, new_height), Image.Resampling.LANCZOS)
|
||||
|
||||
# Try with reduced size and quality
|
||||
buffer = io.BytesIO()
|
||||
@@ -130,7 +130,9 @@ def compress_image_base64(
|
||||
|
||||
# Last resort: Use minimum quality and size
|
||||
buffer = io.BytesIO()
|
||||
smallest_img = img.resize((int(img.width * 0.5), int(img.height * 0.5)), Image.LANCZOS)
|
||||
smallest_img = img.resize(
|
||||
(int(img.width * 0.5), int(img.height * 0.5)), Image.Resampling.LANCZOS
|
||||
)
|
||||
# Convert to RGB if necessary
|
||||
if smallest_img.mode in ("RGBA", "LA") or (
|
||||
smallest_img.mode == "P" and "transparency" in smallest_img.info
|
||||
|
||||
@@ -1,23 +1,20 @@
|
||||
"""Type definitions for the agent package."""
|
||||
|
||||
from .base import Provider, HostConfig, TaskResult, Annotation
|
||||
from .base import HostConfig, TaskResult, Annotation
|
||||
from .messages import Message, Request, Response, StepMessage, DisengageMessage
|
||||
from .tools import ToolInvocation, ToolInvocationState, ClientAttachment, ToolResult
|
||||
|
||||
__all__ = [
|
||||
# Base types
|
||||
"Provider",
|
||||
"HostConfig",
|
||||
"TaskResult",
|
||||
"Annotation",
|
||||
|
||||
# Message types
|
||||
"Message",
|
||||
"Request",
|
||||
"Response",
|
||||
"StepMessage",
|
||||
"DisengageMessage",
|
||||
|
||||
# Tool types
|
||||
"ToolInvocation",
|
||||
"ToolInvocationState",
|
||||
|
||||
@@ -5,17 +5,6 @@ from typing import Dict, Any
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
class Provider(str, Enum):
|
||||
"""Available AI providers."""
|
||||
|
||||
UNKNOWN = "unknown" # Default provider for base class
|
||||
ANTHROPIC = "anthropic"
|
||||
OPENAI = "openai"
|
||||
OLLAMA = "ollama"
|
||||
OMNI = "omni"
|
||||
GROQ = "groq"
|
||||
|
||||
|
||||
class HostConfig(BaseModel):
|
||||
"""Host configuration."""
|
||||
|
||||
@@ -48,6 +37,5 @@ class AgentLoop(Enum):
|
||||
"""Enumeration of available loop types."""
|
||||
|
||||
ANTHROPIC = auto() # Anthropic implementation
|
||||
OPENAI = auto() # OpenAI implementation
|
||||
OMNI = auto() # OmniLoop implementation
|
||||
# Add more loop types as needed
|
||||
|
||||
@@ -1,6 +1,14 @@
|
||||
from typing import Optional, List, Literal, Dict, Any, Union, TYPE_CHECKING, cast
|
||||
from pylume import PyLume
|
||||
from pylume.models import VMRunOpts, VMUpdateOpts, ImageRef, SharedDirectory
|
||||
from pylume.models import (
|
||||
VMRunOpts,
|
||||
VMUpdateOpts,
|
||||
ImageRef,
|
||||
SharedDirectory,
|
||||
VMStatus,
|
||||
VMConfig,
|
||||
CloneSpec,
|
||||
)
|
||||
import asyncio
|
||||
from .models import Computer as ComputerConfig, Display
|
||||
from .interface.factory import InterfaceFactory
|
||||
@@ -13,6 +21,7 @@ from .logger import Logger, LogLevel
|
||||
import json
|
||||
import logging
|
||||
from .telemetry import record_computer_initialization
|
||||
import os
|
||||
|
||||
OSType = Literal["macos", "linux"]
|
||||
|
||||
@@ -36,6 +45,8 @@ class Computer:
|
||||
use_host_computer_server: bool = False,
|
||||
verbosity: Union[int, LogLevel] = logging.INFO,
|
||||
telemetry_enabled: bool = True,
|
||||
port: Optional[int] = 3000,
|
||||
host: str = os.environ.get("PYLUME_HOST", "localhost"),
|
||||
):
|
||||
"""Initialize a new Computer instance.
|
||||
|
||||
@@ -55,6 +66,8 @@ class Computer:
|
||||
verbosity: Logging level (standard Python logging levels: logging.DEBUG, logging.INFO, etc.)
|
||||
LogLevel enum values are still accepted for backward compatibility
|
||||
telemetry_enabled: Whether to enable telemetry tracking. Defaults to True.
|
||||
port: Optional port to use for the PyLume server
|
||||
host: Host to use for PyLume connections (e.g. "localhost", "host.docker.internal")
|
||||
"""
|
||||
if TYPE_CHECKING:
|
||||
from .interface.base import BaseComputerInterface
|
||||
@@ -64,6 +77,8 @@ class Computer:
|
||||
|
||||
# Store original parameters
|
||||
self.image = image
|
||||
self.port = port
|
||||
self.host = host
|
||||
|
||||
# Store telemetry preference
|
||||
self._telemetry_enabled = telemetry_enabled
|
||||
@@ -185,6 +200,26 @@ class Computer:
|
||||
if not self._pylume_context:
|
||||
try:
|
||||
self.logger.verbose("Initializing PyLume context...")
|
||||
|
||||
# Configure PyLume based on initialization parameters
|
||||
pylume_kwargs = {
|
||||
"debug": self.verbosity <= LogLevel.DEBUG,
|
||||
"server_start_timeout": 120, # Increase timeout to 2 minutes
|
||||
}
|
||||
|
||||
# Add port if specified
|
||||
if hasattr(self, "port") and self.port is not None:
|
||||
pylume_kwargs["port"] = self.port
|
||||
self.logger.verbose(f"Using specified port for PyLume: {self.port}")
|
||||
|
||||
# Add host if specified
|
||||
if hasattr(self, "host") and self.host != "localhost":
|
||||
pylume_kwargs["host"] = self.host
|
||||
self.logger.verbose(f"Using specified host for PyLume: {self.host}")
|
||||
|
||||
# Create PyLume instance with configured parameters
|
||||
self.config.pylume = PyLume(**pylume_kwargs)
|
||||
|
||||
self._pylume_context = await self.config.pylume.__aenter__() # type: ignore[attr-defined]
|
||||
self.logger.verbose("PyLume context initialized successfully")
|
||||
except Exception as e:
|
||||
|
||||
@@ -8,7 +8,12 @@ from typing import Any
|
||||
TELEMETRY_AVAILABLE = False
|
||||
|
||||
try:
|
||||
from core.telemetry import record_event, increment, is_telemetry_enabled
|
||||
from core.telemetry import (
|
||||
record_event,
|
||||
increment,
|
||||
is_telemetry_enabled,
|
||||
is_telemetry_globally_disabled,
|
||||
)
|
||||
|
||||
def increment_counter(counter_name: str, value: int = 1) -> None:
|
||||
"""Wrapper for increment to maintain backward compatibility."""
|
||||
@@ -75,14 +80,8 @@ def enable_telemetry() -> bool:
|
||||
|
||||
# Try to import and enable
|
||||
try:
|
||||
from core.telemetry import (
|
||||
is_telemetry_globally_disabled,
|
||||
)
|
||||
|
||||
# Check again after import
|
||||
if is_telemetry_globally_disabled():
|
||||
logger.info("Telemetry is globally disabled via environment variable - cannot enable")
|
||||
return False
|
||||
# Verify we can import core telemetry
|
||||
from core.telemetry import record_event # type: ignore
|
||||
|
||||
TELEMETRY_AVAILABLE = True
|
||||
logger.info("Telemetry successfully enabled")
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
"""Basic tests for the computer package."""
|
||||
|
||||
import pytest
|
||||
from computer import Computer
|
||||
|
||||
def test_computer_import():
|
||||
"""Test that we can import the Computer class."""
|
||||
assert Computer is not None
|
||||
|
||||
def test_computer_init():
|
||||
"""Test that we can create a Computer instance."""
|
||||
computer = Computer(
|
||||
display={"width": 1920, "height": 1080},
|
||||
memory="16GB",
|
||||
cpu="4",
|
||||
use_host_computer_server=True
|
||||
)
|
||||
assert computer is not None
|
||||
24
libs/core/core/telemetry/sender.py
Normal file
24
libs/core/core/telemetry/sender.py
Normal file
@@ -0,0 +1,24 @@
|
||||
"""Telemetry sender module for sending anonymous usage data."""
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
logger = logging.getLogger("cua.telemetry")
|
||||
|
||||
|
||||
def send_telemetry(payload: Dict[str, Any]) -> bool:
|
||||
"""Send telemetry data to collection endpoint.
|
||||
|
||||
Args:
|
||||
payload: Telemetry data to send
|
||||
|
||||
Returns:
|
||||
bool: True if sending was successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
# For now, just log the payload and return success
|
||||
logger.debug(f"Would send telemetry: {payload}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.debug(f"Error sending telemetry: {e}")
|
||||
return False
|
||||
411
libs/core/pdm.lock
generated
411
libs/core/pdm.lock
generated
@@ -1,411 +0,0 @@
|
||||
# This file is @generated by PDM.
|
||||
# It is not intended for manual editing.
|
||||
|
||||
[metadata]
|
||||
groups = ["default", "dev"]
|
||||
strategy = []
|
||||
lock_version = "4.5.0"
|
||||
content_hash = "sha256:012f523673653e261a7b65007c36c67b540b2477da9bf3a71a849ae36aeeb7b1"
|
||||
|
||||
[[metadata.targets]]
|
||||
requires_python = ">=3.10,<3.13"
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
version = "0.7.0"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
|
||||
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyio"
|
||||
version = "4.8.0"
|
||||
summary = ""
|
||||
dependencies = [
|
||||
"exceptiongroup; python_full_version < \"3.11\"",
|
||||
"idna",
|
||||
"sniffio",
|
||||
"typing-extensions",
|
||||
]
|
||||
files = [
|
||||
{file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"},
|
||||
{file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "backoff"
|
||||
version = "2.2.1"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"},
|
||||
{file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2025.1.31"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"},
|
||||
{file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.4.1"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"},
|
||||
{file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"},
|
||||
{file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"},
|
||||
{file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"},
|
||||
{file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"},
|
||||
{file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "distro"
|
||||
version = "1.9.0"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"},
|
||||
{file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.2.2"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
|
||||
{file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.14.0"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
|
||||
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpcore"
|
||||
version = "1.0.7"
|
||||
summary = ""
|
||||
dependencies = [
|
||||
"certifi",
|
||||
"h11",
|
||||
]
|
||||
files = [
|
||||
{file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"},
|
||||
{file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpx"
|
||||
version = "0.28.1"
|
||||
summary = ""
|
||||
dependencies = [
|
||||
"anyio",
|
||||
"certifi",
|
||||
"httpcore",
|
||||
"idna",
|
||||
]
|
||||
files = [
|
||||
{file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"},
|
||||
{file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.10"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
|
||||
{file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.0.0"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
|
||||
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "monotonic"
|
||||
version = "1.6"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"},
|
||||
{file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "24.2"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"},
|
||||
{file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.5.0"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
|
||||
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "posthog"
|
||||
version = "3.20.0"
|
||||
summary = ""
|
||||
dependencies = [
|
||||
"backoff",
|
||||
"distro",
|
||||
"monotonic",
|
||||
"python-dateutil",
|
||||
"requests",
|
||||
"six",
|
||||
]
|
||||
files = [
|
||||
{file = "posthog-3.20.0-py2.py3-none-any.whl", hash = "sha256:ce3aa75a39c36bc3af2b6947757493e6c7d021fe5088b185d3277157770d4ef4"},
|
||||
{file = "posthog-3.20.0.tar.gz", hash = "sha256:7933f7c98c0152a34e387e441fefdc62e2b86aade5dea94dc6ecbe7358138828"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.10.6"
|
||||
summary = ""
|
||||
dependencies = [
|
||||
"annotated-types",
|
||||
"pydantic-core",
|
||||
"typing-extensions",
|
||||
]
|
||||
files = [
|
||||
{file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"},
|
||||
{file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
version = "2.27.2"
|
||||
summary = ""
|
||||
dependencies = [
|
||||
"typing-extensions",
|
||||
]
|
||||
files = [
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"},
|
||||
{file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.3.5"
|
||||
summary = ""
|
||||
dependencies = [
|
||||
"colorama; sys_platform == \"win32\"",
|
||||
"exceptiongroup; python_full_version < \"3.11\"",
|
||||
"iniconfig",
|
||||
"packaging",
|
||||
"pluggy",
|
||||
"tomli; python_full_version < \"3.11\"",
|
||||
]
|
||||
files = [
|
||||
{file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"},
|
||||
{file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-dateutil"
|
||||
version = "2.9.0.post0"
|
||||
summary = ""
|
||||
dependencies = [
|
||||
"six",
|
||||
]
|
||||
files = [
|
||||
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
|
||||
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.3"
|
||||
summary = ""
|
||||
dependencies = [
|
||||
"certifi",
|
||||
"charset-normalizer",
|
||||
"idna",
|
||||
"urllib3",
|
||||
]
|
||||
files = [
|
||||
{file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
|
||||
{file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
version = "1.17.0"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"},
|
||||
{file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sniffio"
|
||||
version = "1.3.1"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
|
||||
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.2.1"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"},
|
||||
{file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"},
|
||||
{file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"},
|
||||
{file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"},
|
||||
{file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.12.2"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
|
||||
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.3.0"
|
||||
summary = ""
|
||||
files = [
|
||||
{file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"},
|
||||
{file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"},
|
||||
]
|
||||
@@ -1,154 +0,0 @@
|
||||
"""Tests for the PostHog telemetry client."""
|
||||
|
||||
import os
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from core.telemetry.posthog_client import (
|
||||
PostHogTelemetryClient,
|
||||
TelemetryConfig,
|
||||
get_posthog_config,
|
||||
get_posthog_telemetry_client,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_environment():
|
||||
"""Set up and tear down environment variables for testing."""
|
||||
original_env = os.environ.copy()
|
||||
os.environ["CUA_TELEMETRY_SAMPLE_RATE"] = "100"
|
||||
# Remove PostHog env vars as they're hardcoded now
|
||||
# os.environ["CUA_POSTHOG_API_KEY"] = "test-api-key"
|
||||
# os.environ["CUA_POSTHOG_HOST"] = "https://test.posthog.com"
|
||||
|
||||
yield
|
||||
|
||||
# Restore original environment
|
||||
os.environ.clear()
|
||||
os.environ.update(original_env)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_disabled_environment():
|
||||
"""Set up and tear down environment variables with telemetry disabled."""
|
||||
original_env = os.environ.copy()
|
||||
os.environ["CUA_TELEMETRY"] = "off"
|
||||
os.environ["CUA_TELEMETRY_SAMPLE_RATE"] = "100"
|
||||
# Remove PostHog env vars as they're hardcoded now
|
||||
# os.environ["CUA_POSTHOG_API_KEY"] = "test-api-key"
|
||||
# os.environ["CUA_POSTHOG_HOST"] = "https://test.posthog.com"
|
||||
|
||||
yield
|
||||
|
||||
# Restore original environment
|
||||
os.environ.clear()
|
||||
os.environ.update(original_env)
|
||||
|
||||
|
||||
class TestTelemetryConfig:
|
||||
"""Tests for telemetry configuration."""
|
||||
|
||||
def test_from_env_defaults(self):
|
||||
"""Test loading config from environment with defaults."""
|
||||
# Clear relevant environment variables
|
||||
with patch.dict(
|
||||
os.environ,
|
||||
{
|
||||
k: v
|
||||
for k, v in os.environ.items()
|
||||
if k not in ["CUA_TELEMETRY", "CUA_TELEMETRY_SAMPLE_RATE"]
|
||||
},
|
||||
):
|
||||
config = TelemetryConfig.from_env()
|
||||
assert config.enabled is True # Default is now enabled
|
||||
assert config.sample_rate == 5
|
||||
assert config.project_root is None
|
||||
|
||||
def test_from_env_with_vars(self, mock_environment):
|
||||
"""Test loading config from environment variables."""
|
||||
config = TelemetryConfig.from_env()
|
||||
assert config.enabled is True
|
||||
assert config.sample_rate == 100
|
||||
assert config.project_root is None
|
||||
|
||||
def test_from_env_disabled(self, mock_disabled_environment):
|
||||
"""Test disabling telemetry via environment variable."""
|
||||
config = TelemetryConfig.from_env()
|
||||
assert config.enabled is False
|
||||
assert config.sample_rate == 100
|
||||
assert config.project_root is None
|
||||
|
||||
def test_to_dict(self):
|
||||
"""Test converting config to dictionary."""
|
||||
config = TelemetryConfig(enabled=True, sample_rate=50)
|
||||
config_dict = config.to_dict()
|
||||
assert config_dict == {"enabled": True, "sample_rate": 50}
|
||||
|
||||
|
||||
class TestPostHogConfig:
|
||||
"""Tests for PostHog configuration."""
|
||||
|
||||
def test_get_posthog_config(self):
|
||||
"""Test getting PostHog config."""
|
||||
config = get_posthog_config()
|
||||
assert config is not None
|
||||
assert config["api_key"] == "phc_eSkLnbLxsnYFaXksif1ksbrNzYlJShr35miFLDppF14"
|
||||
assert config["host"] == "https://eu.i.posthog.com"
|
||||
|
||||
|
||||
class TestPostHogTelemetryClient:
|
||||
"""Tests for PostHog telemetry client."""
|
||||
|
||||
@patch("posthog.capture")
|
||||
@patch("posthog.identify")
|
||||
def test_initialization(self, mock_identify, mock_capture, mock_environment):
|
||||
"""Test client initialization."""
|
||||
client = PostHogTelemetryClient()
|
||||
assert client.config.enabled is True
|
||||
assert client.initialized is True
|
||||
mock_identify.assert_called_once()
|
||||
|
||||
@patch("posthog.capture")
|
||||
def test_increment_counter(self, mock_capture, mock_environment):
|
||||
"""Test incrementing a counter."""
|
||||
client = PostHogTelemetryClient()
|
||||
client.increment("test_counter", 5)
|
||||
mock_capture.assert_called_once()
|
||||
args, kwargs = mock_capture.call_args
|
||||
assert kwargs["event"] == "counter_increment"
|
||||
assert kwargs["properties"]["counter_name"] == "test_counter"
|
||||
assert kwargs["properties"]["value"] == 5
|
||||
|
||||
@patch("posthog.capture")
|
||||
def test_record_event(self, mock_capture, mock_environment):
|
||||
"""Test recording an event."""
|
||||
client = PostHogTelemetryClient()
|
||||
client.record_event("test_event", {"param": "value"})
|
||||
mock_capture.assert_called_once()
|
||||
args, kwargs = mock_capture.call_args
|
||||
assert kwargs["event"] == "test_event"
|
||||
assert kwargs["properties"]["param"] == "value"
|
||||
|
||||
@patch("posthog.capture")
|
||||
def test_disabled_client(self, mock_capture, mock_environment):
|
||||
"""Test that disabled client doesn't send events."""
|
||||
client = PostHogTelemetryClient()
|
||||
client.disable()
|
||||
client.increment("test_counter")
|
||||
client.record_event("test_event")
|
||||
mock_capture.assert_not_called()
|
||||
|
||||
@patch("posthog.flush")
|
||||
def test_flush(self, mock_flush, mock_environment):
|
||||
"""Test flushing events."""
|
||||
client = PostHogTelemetryClient()
|
||||
result = client.flush()
|
||||
assert result is True
|
||||
mock_flush.assert_called_once()
|
||||
|
||||
def test_global_client(self, mock_environment):
|
||||
"""Test global client initialization."""
|
||||
client1 = get_posthog_telemetry_client()
|
||||
client2 = get_posthog_telemetry_client()
|
||||
assert client1 is client2 # Same instance
|
||||
@@ -1,169 +0,0 @@
|
||||
"""Tests for the telemetry module."""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from core.telemetry import (
|
||||
UniversalTelemetryClient,
|
||||
disable_telemetry,
|
||||
enable_telemetry,
|
||||
get_telemetry_client,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_project_root(tmp_path):
|
||||
"""Create a temporary directory for testing."""
|
||||
return tmp_path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_environment():
|
||||
"""Set up and tear down environment variables for testing."""
|
||||
original_env = os.environ.copy()
|
||||
os.environ["CUA_TELEMETRY_SAMPLE_RATE"] = "100"
|
||||
|
||||
yield
|
||||
|
||||
# Restore original environment
|
||||
os.environ.clear()
|
||||
os.environ.update(original_env)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_disabled_environment():
|
||||
"""Set up environment variables with telemetry disabled."""
|
||||
original_env = os.environ.copy()
|
||||
os.environ["CUA_TELEMETRY"] = "off"
|
||||
os.environ["CUA_TELEMETRY_SAMPLE_RATE"] = "100"
|
||||
|
||||
yield
|
||||
|
||||
# Restore original environment
|
||||
os.environ.clear()
|
||||
os.environ.update(original_env)
|
||||
|
||||
|
||||
class TestTelemetryClient:
|
||||
"""Tests for the universal telemetry client."""
|
||||
|
||||
@patch("core.telemetry.telemetry.POSTHOG_AVAILABLE", True)
|
||||
@patch("core.telemetry.telemetry.get_posthog_telemetry_client")
|
||||
def test_initialization(self, mock_get_posthog, mock_project_root, mock_environment):
|
||||
"""Test client initialization."""
|
||||
mock_client = MagicMock()
|
||||
mock_get_posthog.return_value = mock_client
|
||||
|
||||
client = UniversalTelemetryClient(mock_project_root)
|
||||
assert client._client is not None
|
||||
mock_get_posthog.assert_called_once_with(mock_project_root)
|
||||
|
||||
@patch("core.telemetry.telemetry.POSTHOG_AVAILABLE", True)
|
||||
@patch("core.telemetry.telemetry.get_posthog_telemetry_client")
|
||||
def test_increment(self, mock_get_posthog, mock_project_root, mock_environment):
|
||||
"""Test incrementing counters."""
|
||||
mock_client = MagicMock()
|
||||
mock_get_posthog.return_value = mock_client
|
||||
|
||||
client = UniversalTelemetryClient(mock_project_root)
|
||||
client.increment("test_counter", 5)
|
||||
|
||||
mock_client.increment.assert_called_once_with("test_counter", 5)
|
||||
|
||||
@patch("core.telemetry.telemetry.POSTHOG_AVAILABLE", True)
|
||||
@patch("core.telemetry.telemetry.get_posthog_telemetry_client")
|
||||
def test_record_event(self, mock_get_posthog, mock_project_root, mock_environment):
|
||||
"""Test recording events."""
|
||||
mock_client = MagicMock()
|
||||
mock_get_posthog.return_value = mock_client
|
||||
|
||||
client = UniversalTelemetryClient(mock_project_root)
|
||||
client.record_event("test_event", {"prop1": "value1"})
|
||||
|
||||
mock_client.record_event.assert_called_once_with("test_event", {"prop1": "value1"})
|
||||
|
||||
@patch("core.telemetry.telemetry.POSTHOG_AVAILABLE", True)
|
||||
@patch("core.telemetry.telemetry.get_posthog_telemetry_client")
|
||||
def test_flush(self, mock_get_posthog, mock_project_root, mock_environment):
|
||||
"""Test flushing telemetry data."""
|
||||
mock_client = MagicMock()
|
||||
mock_client.flush.return_value = True
|
||||
mock_get_posthog.return_value = mock_client
|
||||
|
||||
client = UniversalTelemetryClient(mock_project_root)
|
||||
result = client.flush()
|
||||
|
||||
assert result is True
|
||||
mock_client.flush.assert_called_once()
|
||||
|
||||
@patch("core.telemetry.telemetry.POSTHOG_AVAILABLE", True)
|
||||
@patch("core.telemetry.telemetry.get_posthog_telemetry_client")
|
||||
def test_enable_disable(self, mock_get_posthog, mock_project_root):
|
||||
"""Test enabling and disabling telemetry."""
|
||||
mock_client = MagicMock()
|
||||
mock_get_posthog.return_value = mock_client
|
||||
|
||||
client = UniversalTelemetryClient(mock_project_root)
|
||||
|
||||
client.enable()
|
||||
mock_client.enable.assert_called_once()
|
||||
|
||||
client.disable()
|
||||
mock_client.disable.assert_called_once()
|
||||
|
||||
|
||||
def test_get_telemetry_client():
|
||||
"""Test the global client getter."""
|
||||
# Reset global state
|
||||
from core.telemetry.telemetry import _universal_client
|
||||
|
||||
_universal_client = None
|
||||
|
||||
with patch("core.telemetry.telemetry.UniversalTelemetryClient") as mock_client_class:
|
||||
mock_client = MagicMock()
|
||||
mock_client_class.return_value = mock_client
|
||||
|
||||
# First call should create a new client
|
||||
client1 = get_telemetry_client()
|
||||
assert client1 is mock_client
|
||||
mock_client_class.assert_called_once()
|
||||
|
||||
# Second call should return the same client
|
||||
client2 = get_telemetry_client()
|
||||
assert client2 is client1
|
||||
assert mock_client_class.call_count == 1
|
||||
|
||||
|
||||
def test_disable_telemetry():
|
||||
"""Test the global disable function."""
|
||||
# Reset global state
|
||||
from core.telemetry.telemetry import _universal_client
|
||||
|
||||
_universal_client = None
|
||||
|
||||
with patch("core.telemetry.telemetry.get_telemetry_client") as mock_get_client:
|
||||
mock_client = MagicMock()
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
# Disable globally
|
||||
disable_telemetry()
|
||||
mock_client.disable.assert_called_once()
|
||||
|
||||
|
||||
def test_enable_telemetry():
|
||||
"""Test the global enable function."""
|
||||
# Reset global state
|
||||
from core.telemetry.telemetry import _universal_client
|
||||
|
||||
_universal_client = None
|
||||
|
||||
with patch("core.telemetry.telemetry.get_telemetry_client") as mock_get_client:
|
||||
mock_client = MagicMock()
|
||||
mock_get_client.return_value = mock_client
|
||||
|
||||
# Enable globally
|
||||
enable_telemetry()
|
||||
mock_client.enable.assert_called_once()
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
swift build --product lume
|
||||
codesign --force --entitlement resources/lume.entitlements --sign - .build/debug/lume
|
||||
@@ -1,187 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Set default log level if not provided
|
||||
LOG_LEVEL=${LOG_LEVEL:-"normal"}
|
||||
|
||||
# Function to log based on level
|
||||
log() {
|
||||
local level=$1
|
||||
local message=$2
|
||||
|
||||
case "$LOG_LEVEL" in
|
||||
"minimal")
|
||||
# Only show essential or error messages
|
||||
if [ "$level" = "essential" ] || [ "$level" = "error" ]; then
|
||||
echo "$message"
|
||||
fi
|
||||
;;
|
||||
"none")
|
||||
# Show nothing except errors
|
||||
if [ "$level" = "error" ]; then
|
||||
echo "$message" >&2
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
# Normal logging - show everything
|
||||
echo "$message"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Check required environment variables
|
||||
required_vars=(
|
||||
"CERT_APPLICATION_NAME"
|
||||
"CERT_INSTALLER_NAME"
|
||||
"APPLE_ID"
|
||||
"TEAM_ID"
|
||||
"APP_SPECIFIC_PASSWORD"
|
||||
)
|
||||
|
||||
for var in "${required_vars[@]}"; do
|
||||
if [ -z "${!var}" ]; then
|
||||
log "error" "Error: $var is not set"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
# Get VERSION from environment or use default
|
||||
VERSION=${VERSION:-"0.1.0"}
|
||||
|
||||
# Move to the project root directory
|
||||
pushd ../../ > /dev/null
|
||||
|
||||
# Ensure .release directory exists and is clean
|
||||
mkdir -p .release
|
||||
log "normal" "Ensuring .release directory exists and is accessible"
|
||||
|
||||
# Build the release version
|
||||
log "essential" "Building release version..."
|
||||
swift build -c release --product lume > /dev/null
|
||||
|
||||
# Sign the binary with hardened runtime entitlements
|
||||
log "essential" "Signing binary with entitlements..."
|
||||
codesign --force --options runtime \
|
||||
--entitlement ./resources/lume.entitlements \
|
||||
--sign "$CERT_APPLICATION_NAME" \
|
||||
.build/release/lume 2> /dev/null
|
||||
|
||||
# Create a temporary directory for packaging
|
||||
TEMP_ROOT=$(mktemp -d)
|
||||
mkdir -p "$TEMP_ROOT/usr/local/bin"
|
||||
cp -f .build/release/lume "$TEMP_ROOT/usr/local/bin/"
|
||||
|
||||
# Build the installer package
|
||||
log "essential" "Building installer package..."
|
||||
pkgbuild --root "$TEMP_ROOT" \
|
||||
--identifier "com.trycua.lume" \
|
||||
--version "1.0" \
|
||||
--install-location "/" \
|
||||
--sign "$CERT_INSTALLER_NAME" \
|
||||
./.release/lume.pkg 2> /dev/null
|
||||
|
||||
# Submit for notarization using stored credentials
|
||||
log "essential" "Submitting for notarization..."
|
||||
if [ "$LOG_LEVEL" = "minimal" ] || [ "$LOG_LEVEL" = "none" ]; then
|
||||
# Minimal output - capture ID but hide details
|
||||
NOTARY_OUTPUT=$(xcrun notarytool submit ./.release/lume.pkg \
|
||||
--apple-id "${APPLE_ID}" \
|
||||
--team-id "${TEAM_ID}" \
|
||||
--password "${APP_SPECIFIC_PASSWORD}" \
|
||||
--wait 2>&1)
|
||||
|
||||
# Just show success or failure
|
||||
if echo "$NOTARY_OUTPUT" | grep -q "status: Accepted"; then
|
||||
log "essential" "Notarization successful!"
|
||||
else
|
||||
log "error" "Notarization failed. Please check logs."
|
||||
fi
|
||||
else
|
||||
# Normal verbose output
|
||||
xcrun notarytool submit ./.release/lume.pkg \
|
||||
--apple-id "${APPLE_ID}" \
|
||||
--team-id "${TEAM_ID}" \
|
||||
--password "${APP_SPECIFIC_PASSWORD}" \
|
||||
--wait
|
||||
fi
|
||||
|
||||
# Staple the notarization ticket
|
||||
log "essential" "Stapling notarization ticket..."
|
||||
xcrun stapler staple ./.release/lume.pkg > /dev/null 2>&1
|
||||
|
||||
# Create temporary directory for package extraction
|
||||
EXTRACT_ROOT=$(mktemp -d)
|
||||
PKG_PATH="$(pwd)/.release/lume.pkg"
|
||||
|
||||
# Extract the pkg using xar
|
||||
cd "$EXTRACT_ROOT"
|
||||
xar -xf "$PKG_PATH" > /dev/null 2>&1
|
||||
|
||||
# Verify Payload exists before proceeding
|
||||
if [ ! -f "Payload" ]; then
|
||||
log "error" "Error: Payload file not found after xar extraction"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create a directory for the extracted contents
|
||||
mkdir -p extracted
|
||||
cd extracted
|
||||
|
||||
# Extract the Payload
|
||||
cat ../Payload | gunzip -dc | cpio -i > /dev/null 2>&1
|
||||
|
||||
# Verify the binary exists
|
||||
if [ ! -f "usr/local/bin/lume" ]; then
|
||||
log "error" "Error: lume binary not found in expected location"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the release directory absolute path
|
||||
RELEASE_DIR="$(realpath "$(dirname "$PKG_PATH")")"
|
||||
log "normal" "Using release directory: $RELEASE_DIR"
|
||||
|
||||
# Copy extracted lume to the release directory
|
||||
cp -f usr/local/bin/lume "$RELEASE_DIR/lume"
|
||||
|
||||
# Create symbolic link in /usr/local/bin if not in minimal mode
|
||||
if [ "$LOG_LEVEL" != "minimal" ] && [ "$LOG_LEVEL" != "none" ]; then
|
||||
log "normal" "Creating symbolic link..."
|
||||
sudo ln -sf "$RELEASE_DIR/lume" /usr/local/bin/lume
|
||||
fi
|
||||
|
||||
# Get architecture and create OS identifier
|
||||
ARCH=$(uname -m)
|
||||
OS_IDENTIFIER="darwin-${ARCH}"
|
||||
|
||||
# Create versioned archives of the package with OS identifier in the name
|
||||
log "essential" "Creating archives in $RELEASE_DIR..."
|
||||
cd "$RELEASE_DIR"
|
||||
|
||||
# Clean up any existing artifacts first to avoid conflicts
|
||||
rm -f lume-*.tar.gz lume-*.pkg.tar.gz
|
||||
|
||||
# Create version-specific archives
|
||||
log "essential" "Creating version-specific archives (${VERSION})..."
|
||||
# Package the binary
|
||||
tar -czf "lume-${VERSION}-${OS_IDENTIFIER}.tar.gz" lume > /dev/null 2>&1
|
||||
# Package the installer
|
||||
tar -czf "lume-${VERSION}-${OS_IDENTIFIER}.pkg.tar.gz" lume.pkg > /dev/null 2>&1
|
||||
|
||||
# Create sha256 checksum file
|
||||
log "essential" "Generating checksums..."
|
||||
shasum -a 256 lume-*.tar.gz > checksums.txt
|
||||
log "essential" "Package created successfully with checksums generated."
|
||||
|
||||
# Show what's in the release directory
|
||||
log "essential" "Files in release directory:"
|
||||
ls -la "$RELEASE_DIR"
|
||||
|
||||
# Ensure correct permissions
|
||||
chmod 644 "$RELEASE_DIR"/*.tar.gz "$RELEASE_DIR"/*.pkg.tar.gz "$RELEASE_DIR"/checksums.txt
|
||||
|
||||
popd > /dev/null
|
||||
|
||||
# Clean up
|
||||
rm -rf "$TEMP_ROOT"
|
||||
rm -rf "$EXTRACT_ROOT"
|
||||
|
||||
log "essential" "Build and packaging completed successfully."
|
||||
@@ -1,15 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
pushd ../../
|
||||
|
||||
swift build -c release --product lume
|
||||
codesign --force --entitlement ./resources/lume.entitlements --sign - .build/release/lume
|
||||
|
||||
mkdir -p ./.release
|
||||
cp -f .build/release/lume ./.release/lume
|
||||
|
||||
# Create symbolic link in /usr/local/bin
|
||||
sudo mkdir -p /usr/local/bin
|
||||
sudo ln -sf "$(pwd)/.release/lume" /usr/local/bin/lume
|
||||
|
||||
popd
|
||||
@@ -33,18 +33,22 @@ from .exceptions import (
|
||||
)
|
||||
|
||||
# Type variable for the decorator
|
||||
T = TypeVar('T')
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
def ensure_server(func: Callable[..., T]) -> Callable[..., T]:
|
||||
"""Decorator to ensure server is running before executing the method."""
|
||||
|
||||
@wraps(func)
|
||||
async def wrapper(self: 'PyLume', *args: Any, **kwargs: Any) -> T:
|
||||
async def wrapper(self: "PyLume", *args: Any, **kwargs: Any) -> T:
|
||||
# ensure_running is an async method, so we need to await it
|
||||
await self.server.ensure_running()
|
||||
# Initialize client if needed
|
||||
await self._init_client()
|
||||
return await func(self, *args, **kwargs) # type: ignore
|
||||
return wrapper # type: ignore
|
||||
return await func(self, *args, **kwargs) # type: ignore
|
||||
|
||||
return wrapper # type: ignore
|
||||
|
||||
|
||||
class PyLume:
|
||||
def __init__(
|
||||
@@ -52,10 +56,11 @@ class PyLume:
|
||||
debug: bool = False,
|
||||
server_start_timeout: int = 60,
|
||||
port: Optional[int] = None,
|
||||
use_existing_server: bool = False
|
||||
use_existing_server: bool = False,
|
||||
host: str = "localhost",
|
||||
):
|
||||
"""Initialize the async PyLume client.
|
||||
|
||||
|
||||
Args:
|
||||
debug: Enable debug logging
|
||||
auto_start_server: Whether to automatically start the lume server if not running
|
||||
@@ -63,27 +68,35 @@ class PyLume:
|
||||
port: Port number for the lume server. Required when use_existing_server is True.
|
||||
use_existing_server: If True, will try to connect to an existing server on the specified port
|
||||
instead of starting a new one.
|
||||
host: Host to use for connections (e.g., "localhost", "127.0.0.1", "host.docker.internal")
|
||||
"""
|
||||
if use_existing_server and port is None:
|
||||
raise LumeConfigError("Port must be specified when using an existing server")
|
||||
|
||||
|
||||
self.server = LumeServer(
|
||||
debug=debug,
|
||||
debug=debug,
|
||||
server_start_timeout=server_start_timeout,
|
||||
port=port,
|
||||
use_existing_server=use_existing_server
|
||||
use_existing_server=use_existing_server,
|
||||
host=host,
|
||||
)
|
||||
self.client = None
|
||||
|
||||
async def __aenter__(self) -> 'PyLume':
|
||||
async def __aenter__(self) -> "PyLume":
|
||||
"""Async context manager entry."""
|
||||
if self.server.use_existing_server:
|
||||
# Just set up the base URL and initialize client for existing server
|
||||
self.server.port = self.server.requested_port
|
||||
self.server.base_url = f"http://localhost:{self.server.port}/lume"
|
||||
else:
|
||||
await self.server.ensure_running()
|
||||
|
||||
# Just ensure base_url is set for existing server
|
||||
if self.server.requested_port is None:
|
||||
raise LumeConfigError("Port must be specified when using an existing server")
|
||||
|
||||
if not self.server.base_url:
|
||||
self.server.port = self.server.requested_port
|
||||
self.server.base_url = f"http://{self.server.host}:{self.server.port}/lume"
|
||||
|
||||
# Ensure the server is running (will connect to existing or start new as needed)
|
||||
await self.server.ensure_running()
|
||||
|
||||
# Initialize the client
|
||||
await self._init_client()
|
||||
return self
|
||||
|
||||
@@ -98,11 +111,7 @@ class PyLume:
|
||||
if self.client is None:
|
||||
if self.server.base_url is None:
|
||||
raise RuntimeError("Server base URL not set")
|
||||
self.client = LumeClient(
|
||||
base_url=self.server.base_url,
|
||||
timeout=300.0,
|
||||
debug=self.server.debug
|
||||
)
|
||||
self.client = LumeClient(self.server.base_url, debug=self.server.debug)
|
||||
|
||||
def _log_debug(self, message: str, **kwargs) -> None:
|
||||
"""Log debug information if debug mode is enabled."""
|
||||
@@ -117,19 +126,17 @@ class PyLume:
|
||||
raise LumeConnectionError(f"Failed to connect to PyLume server: {str(e)}")
|
||||
elif isinstance(e, asyncio.TimeoutError):
|
||||
raise LumeTimeoutError(f"Request timed out: {str(e)}")
|
||||
|
||||
if not hasattr(e, 'status') and not isinstance(e, subprocess.CalledProcessError):
|
||||
|
||||
if not hasattr(e, "status") and not isinstance(e, subprocess.CalledProcessError):
|
||||
raise LumeServerError(f"Unknown error during {operation}: {str(e)}")
|
||||
|
||||
status_code = getattr(e, 'status', 500)
|
||||
|
||||
status_code = getattr(e, "status", 500)
|
||||
response_text = str(e)
|
||||
|
||||
|
||||
self._log_debug(
|
||||
f"{operation} request failed",
|
||||
status_code=status_code,
|
||||
response_text=response_text
|
||||
f"{operation} request failed", status_code=status_code, response_text=response_text
|
||||
)
|
||||
|
||||
|
||||
if status_code == 404:
|
||||
raise LumeNotFoundError(f"Resource not found during {operation}")
|
||||
elif status_code == 400:
|
||||
@@ -138,13 +145,11 @@ class PyLume:
|
||||
raise LumeServerError(
|
||||
f"Server error during {operation}",
|
||||
status_code=status_code,
|
||||
response_text=response_text
|
||||
response_text=response_text,
|
||||
)
|
||||
else:
|
||||
raise LumeServerError(
|
||||
f"Error during {operation}",
|
||||
status_code=status_code,
|
||||
response_text=response_text
|
||||
f"Error during {operation}", status_code=status_code, response_text=response_text
|
||||
)
|
||||
|
||||
async def _read_output(self) -> None:
|
||||
@@ -163,7 +168,7 @@ class PyLume:
|
||||
break
|
||||
line = line.strip()
|
||||
self._log_debug(f"Server stdout: {line}")
|
||||
if "Server started" in line.decode('utf-8'):
|
||||
if "Server started" in line.decode("utf-8"):
|
||||
self._log_debug("Detected server started message")
|
||||
return
|
||||
|
||||
@@ -175,7 +180,7 @@ class PyLume:
|
||||
break
|
||||
line = line.strip()
|
||||
self._log_debug(f"Server stderr: {line}")
|
||||
if "error" in line.decode('utf-8').lower():
|
||||
if "error" in line.decode("utf-8").lower():
|
||||
raise RuntimeError(f"Server error: {line}")
|
||||
|
||||
await asyncio.sleep(0.1) # Small delay to prevent CPU spinning
|
||||
@@ -188,10 +193,10 @@ class PyLume:
|
||||
"""Create a VM with the given configuration."""
|
||||
# Ensure client is initialized
|
||||
await self._init_client()
|
||||
|
||||
|
||||
if isinstance(spec, VMConfig):
|
||||
spec = spec.model_dump(by_alias=True, exclude_none=True)
|
||||
|
||||
|
||||
# Suppress optional attribute access errors
|
||||
self.client.print_curl("POST", "/vms", spec) # type: ignore[attr-defined]
|
||||
await self.client.post("/vms", spec) # type: ignore[attr-defined]
|
||||
@@ -200,10 +205,10 @@ class PyLume:
|
||||
async def run_vm(self, name: str, opts: Optional[Union[VMRunOpts, dict]] = None) -> None:
|
||||
"""Run a VM."""
|
||||
if opts is None:
|
||||
opts = VMRunOpts(no_display=False) # type: ignore[attr-defined]
|
||||
opts = VMRunOpts(no_display=False) # type: ignore[attr-defined]
|
||||
elif isinstance(opts, dict):
|
||||
opts = VMRunOpts(**opts)
|
||||
|
||||
|
||||
payload = opts.model_dump(by_alias=True, exclude_none=True)
|
||||
self.client.print_curl("POST", f"/vms/{name}/run", payload) # type: ignore[attr-defined]
|
||||
await self.client.post(f"/vms/{name}/run", payload) # type: ignore[attr-defined]
|
||||
@@ -225,7 +230,7 @@ class PyLume:
|
||||
"""Update VM settings."""
|
||||
if isinstance(params, dict):
|
||||
params = VMUpdateOpts(**params)
|
||||
|
||||
|
||||
payload = params.model_dump(by_alias=True, exclude_none=True)
|
||||
self.client.print_curl("PATCH", f"/vms/{name}", payload) # type: ignore[attr-defined]
|
||||
await self.client.patch(f"/vms/{name}", payload) # type: ignore[attr-defined]
|
||||
@@ -241,7 +246,9 @@ class PyLume:
|
||||
await self.client.delete(f"/vms/{name}") # type: ignore[attr-defined]
|
||||
|
||||
@ensure_server
|
||||
async def pull_image(self, spec: Union[ImageRef, dict, str], name: Optional[str] = None) -> None:
|
||||
async def pull_image(
|
||||
self, spec: Union[ImageRef, dict, str], name: Optional[str] = None
|
||||
) -> None:
|
||||
"""Pull a VM image."""
|
||||
await self._init_client()
|
||||
if isinstance(spec, str):
|
||||
@@ -261,14 +268,14 @@ class PyLume:
|
||||
image_str = f"{spec.image}:{spec.tag}"
|
||||
registry = spec.registry
|
||||
organization = spec.organization
|
||||
|
||||
|
||||
payload = {
|
||||
"image": image_str,
|
||||
"name": name,
|
||||
"registry": registry,
|
||||
"organization": organization
|
||||
"organization": organization,
|
||||
}
|
||||
|
||||
|
||||
self.client.print_curl("POST", "/pull", payload) # type: ignore[attr-defined]
|
||||
await self.client.post("/pull", payload, timeout=300.0) # type: ignore[attr-defined]
|
||||
|
||||
@@ -305,4 +312,4 @@ class PyLume:
|
||||
async def _ensure_client(self) -> None:
|
||||
"""Ensure client is initialized."""
|
||||
if self.client is None:
|
||||
await self._init_client()
|
||||
await self._init_client()
|
||||
|
||||
@@ -9,16 +9,31 @@ from typing import Optional
|
||||
import sys
|
||||
from .exceptions import LumeConnectionError
|
||||
import signal
|
||||
import json
|
||||
import shlex
|
||||
import random
|
||||
from logging import getLogger
|
||||
|
||||
|
||||
class LumeServer:
|
||||
def __init__(
|
||||
self,
|
||||
debug: bool = False,
|
||||
self,
|
||||
debug: bool = False,
|
||||
server_start_timeout: int = 60,
|
||||
port: Optional[int] = None,
|
||||
use_existing_server: bool = False
|
||||
use_existing_server: bool = False,
|
||||
host: str = "localhost",
|
||||
):
|
||||
"""Initialize the LumeServer."""
|
||||
"""Initialize the LumeServer.
|
||||
|
||||
Args:
|
||||
debug: Enable debug logging
|
||||
server_start_timeout: Timeout in seconds to wait for server to start
|
||||
port: Specific port to use for the server
|
||||
use_existing_server: If True, will try to connect to an existing server
|
||||
instead of starting a new one
|
||||
host: Host to use for connections (e.g., "localhost", "127.0.0.1", "host.docker.internal")
|
||||
"""
|
||||
self.debug = debug
|
||||
self.server_start_timeout = server_start_timeout
|
||||
self.server_process = None
|
||||
@@ -27,72 +42,58 @@ class LumeServer:
|
||||
self.port = None
|
||||
self.base_url = None
|
||||
self.use_existing_server = use_existing_server
|
||||
|
||||
self.host = host
|
||||
|
||||
# Configure logging
|
||||
self.logger = logging.getLogger('lume_server')
|
||||
self.logger = getLogger("pylume.server")
|
||||
if not self.logger.handlers:
|
||||
handler = logging.StreamHandler()
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
||||
handler.setFormatter(formatter)
|
||||
self.logger.addHandler(handler)
|
||||
self.logger.setLevel(logging.DEBUG if debug else logging.INFO)
|
||||
|
||||
self.logger.debug(f"Server initialized with host: {self.host}")
|
||||
|
||||
def _check_port_available(self, port: int) -> bool:
|
||||
"""Check if a specific port is available."""
|
||||
"""Check if a port is available."""
|
||||
try:
|
||||
# Create a socket
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.logger.debug(f"Created socket for port {port} check")
|
||||
|
||||
# Set socket options
|
||||
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
self.logger.debug("Set SO_REUSEADDR")
|
||||
|
||||
# Bind to the port
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||
s.settimeout(0.5)
|
||||
result = s.connect_ex(("127.0.0.1", port))
|
||||
if result == 0: # Port is in use on localhost
|
||||
return False
|
||||
except:
|
||||
pass
|
||||
|
||||
# Check the specified host (e.g., "host.docker.internal") if it's not a localhost alias
|
||||
if self.host not in ["localhost", "127.0.0.1"]:
|
||||
try:
|
||||
s.bind(('127.0.0.1', port))
|
||||
self.logger.debug(f"Successfully bound to port {port}")
|
||||
s.listen(1)
|
||||
self.logger.debug(f"Successfully listening on port {port}")
|
||||
s.close()
|
||||
self.logger.debug(f"Port {port} is available")
|
||||
return True
|
||||
except OSError as e:
|
||||
self.logger.debug(f"Failed to bind to port {port}: {str(e)}")
|
||||
return False
|
||||
finally:
|
||||
try:
|
||||
s.close()
|
||||
self.logger.debug("Socket closed")
|
||||
except:
|
||||
pass
|
||||
|
||||
except Exception as e:
|
||||
self.logger.debug(f"Unexpected error checking port {port}: {str(e)}")
|
||||
return False
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||
s.settimeout(0.5)
|
||||
result = s.connect_ex((self.host, port))
|
||||
if result == 0: # Port is in use on host
|
||||
return False
|
||||
except:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
def _get_server_port(self) -> int:
|
||||
"""Get and validate the server port."""
|
||||
from .exceptions import LumeConfigError
|
||||
|
||||
if self.requested_port is None:
|
||||
raise LumeConfigError("Port must be specified when starting a new server")
|
||||
|
||||
self.logger.debug(f"Checking availability of port {self.requested_port}")
|
||||
|
||||
# Try multiple times with a small delay
|
||||
for attempt in range(3):
|
||||
if attempt > 0:
|
||||
self.logger.debug(f"Retrying port check (attempt {attempt + 1})")
|
||||
time.sleep(1)
|
||||
|
||||
if self._check_port_available(self.requested_port):
|
||||
self.logger.debug(f"Port {self.requested_port} is available")
|
||||
return self.requested_port
|
||||
else:
|
||||
self.logger.debug(f"Port {self.requested_port} check failed on attempt {attempt + 1}")
|
||||
|
||||
raise LumeConfigError(f"Requested port {self.requested_port} is not available after 3 attempts")
|
||||
"""Get an available port for the server."""
|
||||
# Use requested port if specified
|
||||
if self.requested_port is not None:
|
||||
if not self._check_port_available(self.requested_port):
|
||||
raise RuntimeError(f"Requested port {self.requested_port} is not available")
|
||||
return self.requested_port
|
||||
|
||||
# Find a free port
|
||||
for _ in range(10): # Try up to 10 times
|
||||
port = random.randint(49152, 65535)
|
||||
if self._check_port_available(port):
|
||||
return port
|
||||
|
||||
raise RuntimeError("Could not find an available port")
|
||||
|
||||
async def _ensure_server_running(self) -> None:
|
||||
"""Ensure the lume server is running, start it if it's not."""
|
||||
@@ -101,35 +102,33 @@ class LumeServer:
|
||||
# Try to connect to the server with a short timeout
|
||||
cmd = ["curl", "-s", "-w", "%{http_code}", "-m", "5", f"{self.base_url}/vms"]
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE
|
||||
*cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
)
|
||||
stdout, stderr = await process.communicate()
|
||||
|
||||
|
||||
if process.returncode == 0:
|
||||
response = stdout.decode()
|
||||
status_code = int(response[-3:])
|
||||
if status_code == 200:
|
||||
self.logger.debug("PyLume server is running")
|
||||
return
|
||||
|
||||
|
||||
self.logger.debug("PyLume server not running, attempting to start it")
|
||||
# Server not running, try to start it
|
||||
lume_path = os.path.join(os.path.dirname(__file__), "lume")
|
||||
if not os.path.exists(lume_path):
|
||||
raise RuntimeError(f"Could not find lume binary at {lume_path}")
|
||||
|
||||
|
||||
# Make sure the file is executable
|
||||
os.chmod(lume_path, 0o755)
|
||||
|
||||
|
||||
# Create a temporary file for server output
|
||||
self.output_file = tempfile.NamedTemporaryFile(mode='w+', delete=False)
|
||||
self.output_file = tempfile.NamedTemporaryFile(mode="w+", delete=False)
|
||||
self.logger.debug(f"Using temporary file for server output: {self.output_file.name}")
|
||||
|
||||
|
||||
# Start the server
|
||||
self.logger.debug(f"Starting lume server with: {lume_path} serve --port {self.port}")
|
||||
|
||||
|
||||
# Start server in background using subprocess.Popen
|
||||
try:
|
||||
self.server_process = subprocess.Popen(
|
||||
@@ -137,19 +136,21 @@ class LumeServer:
|
||||
stdout=self.output_file,
|
||||
stderr=self.output_file,
|
||||
cwd=os.path.dirname(lume_path),
|
||||
start_new_session=True # Run in new session to avoid blocking
|
||||
start_new_session=True, # Run in new session to avoid blocking
|
||||
)
|
||||
except Exception as e:
|
||||
self.output_file.close()
|
||||
os.unlink(self.output_file.name)
|
||||
raise RuntimeError(f"Failed to start lume server process: {str(e)}")
|
||||
|
||||
|
||||
# Wait for server to start
|
||||
self.logger.debug(f"Waiting up to {self.server_start_timeout} seconds for server to start...")
|
||||
self.logger.debug(
|
||||
f"Waiting up to {self.server_start_timeout} seconds for server to start..."
|
||||
)
|
||||
start_time = time.time()
|
||||
server_ready = False
|
||||
last_size = 0
|
||||
|
||||
|
||||
while time.time() - start_time < self.server_start_timeout:
|
||||
if self.server_process.poll() is not None:
|
||||
# Process has terminated
|
||||
@@ -163,7 +164,7 @@ class LumeServer:
|
||||
f"Output: {output}"
|
||||
)
|
||||
raise RuntimeError(error_msg)
|
||||
|
||||
|
||||
# Check output file for server ready message
|
||||
self.output_file.seek(0, os.SEEK_END)
|
||||
size = self.output_file.tell()
|
||||
@@ -173,22 +174,20 @@ class LumeServer:
|
||||
if new_output.strip(): # Only log non-empty output
|
||||
self.logger.debug(f"Server output: {new_output.strip()}")
|
||||
last_size = size
|
||||
|
||||
|
||||
if "Server started" in new_output:
|
||||
server_ready = True
|
||||
self.logger.debug("Server startup detected")
|
||||
break
|
||||
|
||||
|
||||
# Try to connect to the server periodically
|
||||
try:
|
||||
cmd = ["curl", "-s", "-w", "%{http_code}", "-m", "5", f"{self.base_url}/vms"]
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE
|
||||
*cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
)
|
||||
stdout, stderr = await process.communicate()
|
||||
|
||||
|
||||
if process.returncode == 0:
|
||||
response = stdout.decode()
|
||||
status_code = int(response[-3:])
|
||||
@@ -198,9 +197,9 @@ class LumeServer:
|
||||
break
|
||||
except:
|
||||
pass # Server not ready yet
|
||||
|
||||
|
||||
await asyncio.sleep(1.0)
|
||||
|
||||
|
||||
if not server_ready:
|
||||
# Cleanup if server didn't start
|
||||
if self.server_process:
|
||||
@@ -215,29 +214,27 @@ class LumeServer:
|
||||
f"Failed to start lume server after {self.server_start_timeout} seconds. "
|
||||
"Check the debug output for more details."
|
||||
)
|
||||
|
||||
|
||||
# Give the server a moment to fully initialize
|
||||
await asyncio.sleep(2.0)
|
||||
|
||||
|
||||
# Verify server is responding
|
||||
try:
|
||||
cmd = ["curl", "-s", "-w", "%{http_code}", "-m", "10", f"{self.base_url}/vms"]
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE
|
||||
*cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
)
|
||||
stdout, stderr = await process.communicate()
|
||||
|
||||
|
||||
if process.returncode != 0:
|
||||
raise RuntimeError(f"Curl command failed: {stderr.decode()}")
|
||||
|
||||
|
||||
response = stdout.decode()
|
||||
status_code = int(response[-3:])
|
||||
|
||||
|
||||
if status_code != 200:
|
||||
raise RuntimeError(f"Server returned status code {status_code}")
|
||||
|
||||
|
||||
self.logger.debug("PyLume server started successfully")
|
||||
except Exception as e:
|
||||
self.logger.debug(f"Server verification failed: {str(e)}")
|
||||
@@ -250,16 +247,16 @@ class LumeServer:
|
||||
self.output_file.close()
|
||||
os.unlink(self.output_file.name)
|
||||
raise RuntimeError(f"Server started but is not responding: {str(e)}")
|
||||
|
||||
|
||||
self.logger.debug("Server startup completed successfully")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Failed to start lume server: {str(e)}")
|
||||
|
||||
async def _start_server(self) -> None:
|
||||
"""Start the lume server using the lume executable."""
|
||||
self.logger.debug("Starting PyLume server")
|
||||
|
||||
|
||||
# Get absolute path to lume executable in the same directory as this file
|
||||
lume_path = os.path.join(os.path.dirname(__file__), "lume")
|
||||
if not os.path.exists(lume_path):
|
||||
@@ -268,24 +265,25 @@ class LumeServer:
|
||||
try:
|
||||
# Make executable
|
||||
os.chmod(lume_path, 0o755)
|
||||
|
||||
|
||||
# Get and validate port
|
||||
self.port = self._get_server_port()
|
||||
self.base_url = f"http://localhost:{self.port}/lume"
|
||||
self.base_url = f"http://{self.host}:{self.port}/lume"
|
||||
|
||||
# Set up output handling
|
||||
self.output_file = tempfile.NamedTemporaryFile(mode='w+', delete=False)
|
||||
|
||||
self.output_file = tempfile.NamedTemporaryFile(mode="w+", delete=False)
|
||||
|
||||
# Start the server process with the lume executable
|
||||
env = os.environ.copy()
|
||||
env["RUST_BACKTRACE"] = "1" # Enable backtrace for better error reporting
|
||||
|
||||
|
||||
# Specify the host to bind to (0.0.0.0 to allow external connections)
|
||||
self.server_process = subprocess.Popen(
|
||||
[lume_path, "serve", "--port", str(self.port)],
|
||||
stdout=self.output_file,
|
||||
stderr=subprocess.STDOUT,
|
||||
cwd=os.path.dirname(lume_path), # Run from same directory as executable
|
||||
env=env
|
||||
env=env,
|
||||
)
|
||||
|
||||
# Wait for server to initialize
|
||||
@@ -300,13 +298,13 @@ class LumeServer:
|
||||
"""Read and display server log output in debug mode."""
|
||||
while True:
|
||||
try:
|
||||
self.output_file.seek(0, os.SEEK_END) # type: ignore[attr-defined]
|
||||
line = self.output_file.readline() # type: ignore[attr-defined]
|
||||
self.output_file.seek(0, os.SEEK_END) # type: ignore[attr-defined]
|
||||
line = self.output_file.readline() # type: ignore[attr-defined]
|
||||
if line:
|
||||
line = line.strip()
|
||||
if line:
|
||||
print(f"SERVER: {line}")
|
||||
if self.server_process.poll() is not None: # type: ignore[attr-defined]
|
||||
if self.server_process.poll() is not None: # type: ignore[attr-defined]
|
||||
print("Server process ended")
|
||||
break
|
||||
await asyncio.sleep(0.1)
|
||||
@@ -318,11 +316,11 @@ class LumeServer:
|
||||
"""Wait for server to start and become responsive with increased timeout."""
|
||||
start_time = time.time()
|
||||
while time.time() - start_time < self.server_start_timeout:
|
||||
if self.server_process.poll() is not None: # type: ignore[attr-defined]
|
||||
if self.server_process.poll() is not None: # type: ignore[attr-defined]
|
||||
error_msg = await self._get_error_output()
|
||||
await self._cleanup()
|
||||
raise RuntimeError(error_msg)
|
||||
|
||||
|
||||
try:
|
||||
await self._verify_server()
|
||||
self.logger.debug("Server is now responsive")
|
||||
@@ -330,30 +328,36 @@ class LumeServer:
|
||||
except Exception as e:
|
||||
self.logger.debug(f"Server not ready yet: {str(e)}")
|
||||
await asyncio.sleep(1.0)
|
||||
|
||||
|
||||
await self._cleanup()
|
||||
raise RuntimeError(f"Server failed to start after {self.server_start_timeout} seconds")
|
||||
|
||||
async def _verify_server(self) -> None:
|
||||
"""Verify server is responding to requests."""
|
||||
try:
|
||||
cmd = ["curl", "-s", "-w", "%{http_code}", "-m", "10", f"{self.base_url}/vms"]
|
||||
cmd = [
|
||||
"curl",
|
||||
"-s",
|
||||
"-w",
|
||||
"%{http_code}",
|
||||
"-m",
|
||||
"10",
|
||||
f"http://{self.host}:{self.port}/lume/vms",
|
||||
]
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE
|
||||
*cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
)
|
||||
stdout, stderr = await process.communicate()
|
||||
|
||||
|
||||
if process.returncode != 0:
|
||||
raise RuntimeError(f"Curl command failed: {stderr.decode()}")
|
||||
|
||||
|
||||
response = stdout.decode()
|
||||
status_code = int(response[-3:])
|
||||
|
||||
|
||||
if status_code != 200:
|
||||
raise RuntimeError(f"Server returned status code {status_code}")
|
||||
|
||||
|
||||
self.logger.debug("PyLume server started successfully")
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Server not responding: {str(e)}")
|
||||
@@ -366,7 +370,7 @@ class LumeServer:
|
||||
output = self.output_file.read()
|
||||
return (
|
||||
f"Server process terminated unexpectedly.\n"
|
||||
f"Exit code: {self.server_process.returncode}\n" # type: ignore[attr-defined]
|
||||
f"Exit code: {self.server_process.returncode}\n" # type: ignore[attr-defined]
|
||||
f"Output: {output}"
|
||||
)
|
||||
|
||||
@@ -393,12 +397,84 @@ class LumeServer:
|
||||
self.output_file = None
|
||||
|
||||
async def ensure_running(self) -> None:
|
||||
"""Start the server if we're managing it."""
|
||||
if not self.use_existing_server:
|
||||
"""Ensure the server is running.
|
||||
|
||||
If use_existing_server is True, will only try to connect to an existing server.
|
||||
Otherwise will:
|
||||
1. Try to connect to an existing server on the specified port
|
||||
2. If that fails and not in Docker, start a new server
|
||||
3. If in Docker and no existing server is found, raise an error
|
||||
"""
|
||||
# First check if we're in Docker
|
||||
in_docker = os.path.exists("/.dockerenv") or (
|
||||
os.path.exists("/proc/1/cgroup") and "docker" in open("/proc/1/cgroup", "r").read()
|
||||
)
|
||||
|
||||
# If using a non-localhost host like host.docker.internal, set up the connection details
|
||||
if self.host not in ["localhost", "127.0.0.1"]:
|
||||
if self.requested_port is None:
|
||||
raise RuntimeError("Port must be specified when using a remote host")
|
||||
|
||||
self.port = self.requested_port
|
||||
self.base_url = f"http://{self.host}:{self.port}/lume"
|
||||
self.logger.debug(f"Using remote host server at {self.base_url}")
|
||||
|
||||
# Try to verify the server is accessible
|
||||
try:
|
||||
await self._verify_server()
|
||||
self.logger.debug("Successfully connected to remote server")
|
||||
return
|
||||
except Exception as e:
|
||||
if self.use_existing_server or in_docker:
|
||||
# If explicitly requesting an existing server or in Docker, we can't start a new one
|
||||
raise RuntimeError(
|
||||
f"Failed to connect to remote server at {self.base_url}: {str(e)}"
|
||||
)
|
||||
else:
|
||||
self.logger.debug(f"Remote server not available at {self.base_url}: {str(e)}")
|
||||
# Fall back to localhost for starting a new server
|
||||
self.host = "localhost"
|
||||
|
||||
# If explicitly using an existing server, verify it's running
|
||||
if self.use_existing_server:
|
||||
if self.requested_port is None:
|
||||
raise RuntimeError("Port must be specified when using an existing server")
|
||||
|
||||
self.port = self.requested_port
|
||||
self.base_url = f"http://{self.host}:{self.port}/lume"
|
||||
|
||||
try:
|
||||
await self._verify_server()
|
||||
self.logger.debug("Successfully connected to existing server")
|
||||
except Exception as e:
|
||||
raise RuntimeError(
|
||||
f"Failed to connect to existing server at {self.base_url}: {str(e)}"
|
||||
)
|
||||
else:
|
||||
# Try to connect to an existing server first
|
||||
if self.requested_port is not None:
|
||||
self.port = self.requested_port
|
||||
self.base_url = f"http://{self.host}:{self.port}/lume"
|
||||
|
||||
try:
|
||||
await self._verify_server()
|
||||
self.logger.debug("Successfully connected to existing server")
|
||||
return
|
||||
except Exception:
|
||||
self.logger.debug(f"No existing server found at {self.base_url}")
|
||||
|
||||
# If in Docker and can't connect to existing server, raise an error
|
||||
if in_docker:
|
||||
raise RuntimeError(
|
||||
f"Failed to connect to server at {self.base_url} and cannot start a new server in Docker"
|
||||
)
|
||||
|
||||
# Start a new server
|
||||
self.logger.debug("Starting a new server instance")
|
||||
await self._start_server()
|
||||
|
||||
async def stop(self) -> None:
|
||||
"""Stop the server if we're managing it."""
|
||||
if not self.use_existing_server:
|
||||
self.logger.debug("Stopping lume server...")
|
||||
await self._cleanup()
|
||||
await self._cleanup()
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
"""
|
||||
PyLume tests package
|
||||
"""
|
||||
@@ -1,20 +0,0 @@
|
||||
"""
|
||||
Basic tests for the pylume package
|
||||
"""
|
||||
import pytest
|
||||
|
||||
|
||||
def test_import():
|
||||
"""Test that the package can be imported"""
|
||||
import pylume
|
||||
try:
|
||||
assert pylume.__version__ == "0.1.0"
|
||||
except AttributeError:
|
||||
# If __version__ is not defined, that's okay for this test
|
||||
pass
|
||||
|
||||
|
||||
def test_pylume_import():
|
||||
"""Test that the PyLume class can be imported"""
|
||||
from pylume import PyLume
|
||||
assert PyLume is not None
|
||||
@@ -1,5 +1,5 @@
|
||||
from pathlib import Path
|
||||
from typing import Union, List, Dict, Any, Tuple, Optional
|
||||
from typing import Union, List, Dict, Any, Tuple, Optional, cast
|
||||
import logging
|
||||
import torch
|
||||
import torchvision.ops
|
||||
@@ -179,16 +179,23 @@ class OmniParser:
|
||||
logger.info(f"Found {len(icon_detections)} interactive elements")
|
||||
|
||||
# Convert icon detections to typed objects
|
||||
elements: List[UIElement] = [
|
||||
IconElement(
|
||||
bbox=BoundingBox(
|
||||
x1=det["bbox"][0], y1=det["bbox"][1], x2=det["bbox"][2], y2=det["bbox"][3]
|
||||
),
|
||||
confidence=det["confidence"],
|
||||
scale=det.get("scale"),
|
||||
)
|
||||
for det in icon_detections
|
||||
]
|
||||
elements: List[UIElement] = cast(
|
||||
List[UIElement],
|
||||
[
|
||||
IconElement(
|
||||
id=i + 1,
|
||||
bbox=BoundingBox(
|
||||
x1=det["bbox"][0],
|
||||
y1=det["bbox"][1],
|
||||
x2=det["bbox"][2],
|
||||
y2=det["bbox"][3],
|
||||
),
|
||||
confidence=det["confidence"],
|
||||
scale=det.get("scale"),
|
||||
)
|
||||
for i, det in enumerate(icon_detections)
|
||||
],
|
||||
)
|
||||
|
||||
# Run OCR if enabled
|
||||
if use_ocr:
|
||||
@@ -198,21 +205,25 @@ class OmniParser:
|
||||
text_detections = []
|
||||
logger.info(f"Found {len(text_detections)} text regions")
|
||||
|
||||
# Convert text detections to typed objects
|
||||
# Convert text detections to typed objects and extend the list
|
||||
elements.extend(
|
||||
[
|
||||
TextElement(
|
||||
bbox=BoundingBox(
|
||||
x1=det["bbox"][0],
|
||||
y1=det["bbox"][1],
|
||||
x2=det["bbox"][2],
|
||||
y2=det["bbox"][3],
|
||||
),
|
||||
content=det["content"],
|
||||
confidence=det["confidence"],
|
||||
)
|
||||
for det in text_detections
|
||||
]
|
||||
cast(
|
||||
List[UIElement],
|
||||
[
|
||||
TextElement(
|
||||
id=len(elements) + i + 1,
|
||||
bbox=BoundingBox(
|
||||
x1=det["bbox"][0],
|
||||
y1=det["bbox"][1],
|
||||
x2=det["bbox"][2],
|
||||
y2=det["bbox"][3],
|
||||
),
|
||||
content=det["content"],
|
||||
confidence=det["confidence"],
|
||||
)
|
||||
for i, det in enumerate(text_detections)
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
# Calculate drawing parameters based on image size
|
||||
|
||||
97
scripts/run-docker-dev.sh
Executable file
97
scripts/run-docker-dev.sh
Executable file
@@ -0,0 +1,97 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Colors for output
|
||||
GREEN='\033[0;32m'
|
||||
BLUE='\033[0;34m'
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Print with color
|
||||
print_info() {
|
||||
echo -e "${BLUE}==> $1${NC}"
|
||||
}
|
||||
|
||||
print_success() {
|
||||
echo -e "${GREEN}==> $1${NC}"
|
||||
}
|
||||
|
||||
print_error() {
|
||||
echo -e "${RED}==> $1${NC}"
|
||||
}
|
||||
|
||||
# Docker image name
|
||||
IMAGE_NAME="cua-dev-image"
|
||||
CONTAINER_NAME="cua-dev-container"
|
||||
PLATFORM="linux/arm64"
|
||||
|
||||
# Environment variables
|
||||
PYTHONPATH="/app/libs/core:/app/libs/computer:/app/libs/agent:/app/libs/som:/app/libs/pylume:/app/libs/computer-server"
|
||||
|
||||
# Check if Docker is installed
|
||||
if ! command -v docker &> /dev/null; then
|
||||
print_error "Docker is not installed. Please install Docker first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Command options
|
||||
case "$1" in
|
||||
build)
|
||||
print_info "Building the development Docker image..."
|
||||
print_info "This will install all dependencies but won't include source code"
|
||||
docker build -f Dockerfile --platform=${PLATFORM} -t ${IMAGE_NAME} .
|
||||
print_success "Development Docker image built successfully!"
|
||||
;;
|
||||
|
||||
run)
|
||||
# Check for interactive flag
|
||||
if [ "$2" == "--interactive" ]; then
|
||||
print_info "Running the development Docker container with interactive shell..."
|
||||
print_info "Mounting source code from host"
|
||||
print_info "Connecting to host.docker.internal:3000"
|
||||
|
||||
docker run -it --rm \
|
||||
--platform=${PLATFORM} \
|
||||
--name ${CONTAINER_NAME} \
|
||||
-v "$(pwd):/app" \
|
||||
-e PYTHONPATH=${PYTHONPATH} \
|
||||
-e DISPLAY=${DISPLAY:-:0} \
|
||||
-e PYLUME_HOST="host.docker.internal" \
|
||||
${IMAGE_NAME} bash
|
||||
else
|
||||
# Run the specified example
|
||||
if [ -z "$2" ]; then
|
||||
print_error "Please specify an example file, e.g., ./run-docker-dev.sh run computer_examples.py"
|
||||
exit 1
|
||||
fi
|
||||
print_info "Running example: $2"
|
||||
print_info "Connecting to host.docker.internal:3000"
|
||||
|
||||
docker run -it --rm \
|
||||
--platform=${PLATFORM} \
|
||||
--name ${CONTAINER_NAME} \
|
||||
-v "$(pwd):/app" \
|
||||
-e PYTHONPATH=${PYTHONPATH} \
|
||||
-e DISPLAY=${DISPLAY:-:0} \
|
||||
-e PYLUME_HOST="host.docker.internal" \
|
||||
${IMAGE_NAME} python "/app/examples/$2"
|
||||
fi
|
||||
;;
|
||||
|
||||
stop)
|
||||
print_info "Stopping any running containers..."
|
||||
docker stop ${CONTAINER_NAME} 2>/dev/null || true
|
||||
print_success "Done!"
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Usage: $0 {build|run [--interactive] [filename]|stop}"
|
||||
echo ""
|
||||
echo "Commands:"
|
||||
echo " build Build the development Docker image with dependencies"
|
||||
echo " run [example_filename] Run the specified example file in the container"
|
||||
echo " run --interactive Run the container with mounted code and get an interactive shell"
|
||||
echo " stop Stop the container"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
exit 0
|
||||
Reference in New Issue
Block a user