Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .env.template
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@ ASKUI_WORKSPACE_ID=
# OpenRouter
OPEN_ROUTER_API_KEY=

# Models
VLM_PROVIDER_MODEL_ID=

# Telemetry
ASKUI__VA__TELEMETRY__ENABLED=True # Set to "False" to disable telemetry

17 changes: 11 additions & 6 deletions docs/04_using_models.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,19 @@ with ComputerAgent() as agent:

## Configuring Model IDs

If you want to use another model, you select one of the available ones and set is through overriding the model_id in the provider:
If you want to use another model, you select one of the available ones and set as an environment variable (**currently only supported for vlm_provider!**):
```
VLM_PROVIDER_MODEL_ID=claude-opus-4-6
```

Alternatively, you can also set it through overriding the model_id in the provider:

```python
from askui import AgentSettings, ComputerAgent
from askui.model_providers import AskUIVlmProvider, AskUIImageQAProvider

with ComputerAgent(settings=AgentSettings(
vlm_provider=AskUIVlmProvider(model_id="claude-opus-4-5-20251101"),
vlm_provider=AskUIVlmProvider(model_id="claude-opus-4-6"),
image_qa_provider=AskUIImageQAProvider(model_id="gemini-2.5-pro"),
)) as agent:
agent.act("Complete the checkout process")
Expand All @@ -34,11 +39,11 @@ with ComputerAgent(settings=AgentSettings(
The following models are available with your AskUI credentials through the AskUI API:

**VLM Provider** (for `act()`): Claude models via AskUI's Anthropic proxy
- `claude-haiku-4-5-20251001`
- `claude-sonnet-4-5-20250929` (default)
- `claude-haiku-4-5-20251001` (most cost efficient)
- `claude-sonnet-4-5-20250929`
- `claude-opus-4-5-20251101`
- `claude-opus-4-6`(coming soon!)
- `claude-sonnet-4-6`(coming soon!)
- `claude-sonnet-4-6`(default)
- `claude-opus-4-6` (most capable)


**Image Q&A Provider** (for `get()`): Gemini models via AskUI's Gemini proxy
Expand Down
2 changes: 1 addition & 1 deletion docs/05_bring_your_own_model_provider.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ from askui.model_providers import AnthropicVlmProvider

with ComputerAgent(settings=AgentSettings(
vlm_provider=AnthropicVlmProvider(
model_id="claude-sonnet-4-5-20251101",
model_id="claude-opus-4-6",
),
)) as agent:
agent.act("Navigate to settings")
Expand Down
2 changes: 1 addition & 1 deletion src/askui/agent_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ class AgentSettings:
from askui.model_providers import AskUIVlmProvider, AskUIImageQAProvider

agent = ComputerAgent(settings=AgentSettings(
vlm_provider=AskUIVlmProvider(model_id=\"claude-opus-4-5-20251101\"),
vlm_provider=AskUIVlmProvider(model_id=\"claude-opus-4-6\"),
image_qa_provider=AskUIImageQAProvider(model_id=\"gemini-2.5-pro\"),
))
```
Expand Down
9 changes: 6 additions & 3 deletions src/askui/model_providers/anthropic_vlm_provider.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""AnthropicVlmProvider — VLM access via direct Anthropic API."""

import os
from functools import cached_property
from typing import Any

Expand Down Expand Up @@ -34,7 +35,7 @@ class AnthropicVlmProvider(VlmProvider):
auth_token (str | None, optional): Authorization token for custom
authentication. Added as an `Authorization` header.
model_id (str, optional): Claude model to use. Defaults to
`\"claude-sonnet-4-5-20251101\"`.
`\"claude-sonnet-4-6\"`.
client (Anthropic | None, optional): Pre-configured Anthropic client.
If provided, other connection parameters are ignored.

Expand All @@ -57,10 +58,12 @@ def __init__(
api_key: str | None = None,
base_url: str | None = None,
auth_token: str | None = None,
model_id: str = _DEFAULT_MODEL_ID,
model_id: str | None = None,
client: Anthropic | None = None,
) -> None:
self._model_id_value = model_id
self._model_id_value = (
model_id or os.environ.get("VLM_PROVIDER_MODEL_ID") or _DEFAULT_MODEL_ID
)
if client is not None:
self.client = client
else:
Expand Down
13 changes: 8 additions & 5 deletions src/askui/model_providers/askui_vlm_provider.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""AskUIVlmProvider — VLM access via AskUI's hosted Anthropic proxy."""

import os
from functools import cached_property
from typing import Any

Expand Down Expand Up @@ -33,7 +34,7 @@ class AskUIVlmProvider(VlmProvider):
token (str | None, optional): AskUI API token. Reads `ASKUI_TOKEN`
from the environment if not provided.
model_id (str, optional): Claude model to use. Defaults to
`"claude-sonnet-4-5-20250929"`.
`"claude-sonnet-4-6"`.
client (Anthropic | None, optional): Pre-configured Anthropic client.
If provided, `workspace_id` and `token` are ignored.

Expand All @@ -55,17 +56,19 @@ class AskUIVlmProvider(VlmProvider):
def __init__(
self,
askui_settings: AskUiInferenceApiSettings | None = None,
model_id: str = _DEFAULT_MODEL_ID,
model_id: str | None = None,
client: Anthropic | None = None,
) -> None:
self._askui_settings = askui_settings or AskUiInferenceApiSettings()
self._model_id = model_id
self._model_id_value = (
model_id or os.environ.get("VLM_PROVIDER_MODEL_ID") or _DEFAULT_MODEL_ID
)
self._injected_client = client

@property
@override
def model_id(self) -> str:
return self._model_id
return self._model_id_value

@cached_property
def _messages_api(self) -> AnthropicMessagesApi:
Expand Down Expand Up @@ -100,7 +103,7 @@ def create_message(
) -> MessageParam:
result: MessageParam = self._messages_api.create_message(
messages=messages,
model_id=self._model_id,
model_id=self._model_id_value,
tools=tools,
max_tokens=max_tokens,
system=system,
Expand Down