From d930032c82551e55bf03571fe59965ba67a6a6ca Mon Sep 17 00:00:00 2001 From: Dillon DuPont Date: Wed, 19 Nov 2025 10:52:50 -0500 Subject: [PATCH] Fix cua adapter --- .../agent/agent/adapters/cua_adapter.py | 89 +++++++++++++++++-- 1 file changed, 81 insertions(+), 8 deletions(-) diff --git a/libs/python/agent/agent/adapters/cua_adapter.py b/libs/python/agent/agent/adapters/cua_adapter.py index 890ae342..1bc2ca24 100644 --- a/libs/python/agent/agent/adapters/cua_adapter.py +++ b/libs/python/agent/agent/adapters/cua_adapter.py @@ -19,25 +19,98 @@ class CUAAdapter(CustomLLM): return model.split("/", 1)[1] if model and model.startswith("cua/") else model def completion(self, *args, **kwargs) -> ModelResponse: + model = kwargs.get("model", "") + api_base = kwargs.get("api_base") or self.base_url + if "anthropic/" in model: + model = f"anthropic/{self._normalize_model(model)}" + api_base = api_base.removesuffix("/v1") + else: + model = f"openai/{self._normalize_model(model)}" + params = { - "model": f"openai/{self._normalize_model(kwargs.get("model", ""))}", + "model": model, "messages": kwargs.get("messages", []), - "api_base": self.base_url, - "api_key": self.api_key, + "api_base": api_base, + "api_key": kwargs.get("api_key") or self.api_key, "stream": False, } - + + if "optional_params" in kwargs: + params.update(kwargs["optional_params"]) + del kwargs["optional_params"] + + if "headers" in kwargs: + params["headers"] = kwargs["headers"] + del kwargs["headers"] + + # Print dropped parameters + original_keys = set(kwargs.keys()) + used_keys = set(params.keys()) # Only these are extracted from kwargs + ignored_keys = { + "litellm_params", + "client", + "print_verbose", + "acompletion", + "timeout", + "logging_obj", + "encoding", + "custom_prompt_dict", + "model_response", + "logger_fn", + } + dropped_keys = original_keys - used_keys - ignored_keys + if dropped_keys: + dropped_keyvals = {k: kwargs[k] for k in dropped_keys} + # print(f"CUAAdapter.completion: Dropped parameters: {dropped_keyvals}") + return completion(**params) # type: ignore async def acompletion(self, *args, **kwargs) -> ModelResponse: + model = kwargs.get("model", "") + api_base = kwargs.get("api_base") or self.base_url + if "anthropic/" in model: + model = f"anthropic/{self._normalize_model(model)}" + api_base = api_base.removesuffix("/v1") + api_base = "http://127.0.0.1:5001" + else: + model = f"openai/{self._normalize_model(model)}" + params = { - "model": f"openai/{self._normalize_model(kwargs.get("model", ""))}", + "model": model, "messages": kwargs.get("messages", []), - "api_base": self.base_url, - "api_key": self.api_key, + "api_base": api_base, + "api_key": kwargs.get("api_key") or self.api_key, "stream": False, } - + + if "optional_params" in kwargs: + params.update(kwargs["optional_params"]) + del kwargs["optional_params"] + + if "headers" in kwargs: + params["headers"] = kwargs["headers"] + del kwargs["headers"] + + # Print dropped parameters + original_keys = set(kwargs.keys()) + used_keys = set(params.keys()) # Only these are extracted from kwargs + ignored_keys = { + "litellm_params", + "client", + "print_verbose", + "acompletion", + "timeout", + "logging_obj", + "encoding", + "custom_prompt_dict", + "model_response", + "logger_fn", + } + dropped_keys = original_keys - used_keys - ignored_keys + if dropped_keys: + dropped_keyvals = {k: kwargs[k] for k in dropped_keys} + # print(f"CUAAdapter.acompletion: Dropped parameters: {dropped_keyvals}") + response = await acompletion(**params) # type: ignore return response