Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions packages/gooddata-sdk/src/gooddata_sdk/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,10 @@
CatalogDependentEntitiesResponse,
CatalogEntityIdentifier,
)
from gooddata_sdk.catalog.workspace.entity_model.resolved_llm_provider import (
CatalogResolvedLlmModel,
CatalogResolvedLlmProvider,
)
from gooddata_sdk.catalog.workspace.entity_model.user_data_filter import (
CatalogUserDataFilter,
CatalogUserDataFilterAttributes,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# (C) 2026 GoodData Corporation
from __future__ import annotations

import attrs

from gooddata_sdk.catalog.base import Base


@attrs.define(kw_only=True)
class CatalogResolvedLlmModel(Base):
"""Represents a single model available from a resolved LLM provider."""

id: str
family: str

@staticmethod
def client_class() -> type:
from gooddata_api_client.model.llm_model import LlmModel

return LlmModel


@attrs.define(kw_only=True)
class CatalogResolvedLlmProvider(Base):
"""The resolved LLM provider configuration for a workspace."""

id: str
title: str
models: list[CatalogResolvedLlmModel] = attrs.field(factory=list)

@staticmethod
def client_class() -> type:
from gooddata_api_client.model.resolved_llm_provider import ResolvedLlmProvider

return ResolvedLlmProvider

@classmethod
def from_dict(cls, data: dict) -> CatalogResolvedLlmProvider: # type: ignore[override]
models = [CatalogResolvedLlmModel(id=m["id"], family=m["family"]) for m in data.get("models", [])]
return cls(
id=data["id"],
title=data["title"],
models=models,
)
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@
CatalogFilterView,
CatalogFilterViewDocument,
)
from gooddata_sdk.catalog.workspace.entity_model.resolved_llm_provider import (
CatalogResolvedLlmProvider,
)
from gooddata_sdk.catalog.workspace.entity_model.user_data_filter import (
CatalogUserDataFilter,
CatalogUserDataFilterDocument,
Expand Down Expand Up @@ -239,6 +242,30 @@ def resolve_workspace_settings(self, workspace_id: str, settings: list) -> dict:
]
return {setting["type"]: setting for setting in resolved_workspace_settings}

def resolve_llm_providers(self, workspace_id: str) -> CatalogResolvedLlmProvider | None:
"""Get the active LLM provider configuration for a workspace.

Resolves the LLM provider currently active for the given workspace and returns it
as a :class:`CatalogResolvedLlmProvider` object, or ``None`` if no LLM provider
is configured.

Args:
workspace_id (str): Workspace identification string e.g. ``"demo"``.

Returns:
CatalogResolvedLlmProvider | None:
The resolved LLM provider or ``None`` if none is configured.
"""
response = self._client.actions_api.resolve_llm_providers(
workspace_id,
_check_return_type=False,
)
response_dict = response.to_dict()
data = response_dict.get("data")
if data is None:
return None
return CatalogResolvedLlmProvider.from_dict(data)

# Declarative methods - workspaces

def get_declarative_workspaces(self, exclude: list[str] | None = None) -> CatalogDeclarativeWorkspaces:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
# (C) 2026 GoodData Corporation
from __future__ import annotations

import pytest
from gooddata_sdk.catalog.workspace.entity_model.resolved_llm_provider import (
CatalogResolvedLlmModel,
CatalogResolvedLlmProvider,
)


@pytest.mark.parametrize(
"scenario, input_data, expected_id, expected_title, expected_model_count",
[
(
"provider_with_models",
{
"id": "openai-provider",
"title": "OpenAI Provider",
"models": [{"id": "gpt-4o", "family": "OPENAI"}],
},
"openai-provider",
"OpenAI Provider",
1,
),
(
"provider_without_models",
{
"id": "bedrock-provider",
"title": "AWS Bedrock Provider",
"models": [],
},
"bedrock-provider",
"AWS Bedrock Provider",
0,
),
],
)
def test_resolved_llm_provider_from_dict(scenario, input_data, expected_id, expected_title, expected_model_count):
provider = CatalogResolvedLlmProvider.from_dict(input_data)
assert provider.id == expected_id
assert provider.title == expected_title
assert len(provider.models) == expected_model_count


def test_resolved_llm_model_fields():
model = CatalogResolvedLlmModel(id="gpt-4o", family="OPENAI")
assert model.id == "gpt-4o"
assert model.family == "OPENAI"


def test_resolved_llm_provider_models_populated():
data = {
"id": "openai-provider",
"title": "OpenAI Provider",
"models": [
{"id": "gpt-4o", "family": "OPENAI"},
{"id": "gpt-3.5-turbo", "family": "OPENAI"},
],
}
provider = CatalogResolvedLlmProvider.from_dict(data)
assert len(provider.models) == 2
assert provider.models[0].id == "gpt-4o"
assert provider.models[0].family == "OPENAI"
assert provider.models[1].id == "gpt-3.5-turbo"
Loading