Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .copywrite.hcl
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,7 @@ project {
header_ignore = [
# "vendors/**",
# "**autogen**",
".venv",
"__pycache__"
]
}
13 changes: 13 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -40,3 +40,16 @@ my.*.tfvars
*.env
*.kubeconfig

## Python

__pycache__/
*.pyc
*.pyo
*.pyd
*.pyw
*.pyz
*.pywz
*.pyzw
*.pyzwz

.venv/
2 changes: 0 additions & 2 deletions .husky/commit-msg
Original file line number Diff line number Diff line change
@@ -1,3 +1 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
npx --no -- commitlint --edit ''
37 changes: 37 additions & 0 deletions Taskfile.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# Copyright (c) Spectro Cloud
# SPDX-License-Identifier: Apache-2.0

version: "3"

tasks:
init:
desc: Install dependencies and setup the project
cmds:
- echo "initializing npm dependencies"
- npm ci
- npx husky install

help:
desc: Display this help
cmds:
- task --list

build-docker:
desc: Build docker image
cmds:
- echo "Building docker image"
- |
docker build --build-arg PALETTE_VERSION=$PALETTE_VERSION \
--build-arg PALETTE_CLI_VERSION=$PALETTE_CLI_VERION \
--build-arg PALETTE_EDGE_VERSION=$PALETTE_EDGE_VERSION \
--build-arg PACKER_VERSION=$PACKER_VERSION \
--build-arg ORAS_VERSION=$ORAS_VERSION \
--build-arg TERRAFORM_VERSION=$TERRAFORM_VERSION \
--build-arg K9S_VERSION=$K9S_VERSION \
-t tutorials .

license:
desc: Adds a license header to all files. Reference https://github.com/hashicorp/copywrite to learn more.
cmds:
- echo "Applying license headers..."
- copywrite headers
1 change: 1 addition & 0 deletions ai/palette-mcp/integrate-palette-mcp/.python-version
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
3.12
3 changes: 3 additions & 0 deletions ai/palette-mcp/integrate-palette-mcp/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Integrate Palette MCP

This folder contains the demo code for the Integrate Palette MCP tutorial. The user will learn how to integrate Palette MCP into a LangChain agent workflow.
11 changes: 11 additions & 0 deletions ai/palette-mcp/integrate-palette-mcp/Taskfile.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
version: "3"

dotenv: [".env", "../../../.env"]

tasks:
start-agent:
desc: Start the Palette MCP LangChain agent
env:
DEBUG: "info"
cmds:
- uv run python main.py {{.CLI_ARGS}}
3 changes: 3 additions & 0 deletions ai/palette-mcp/integrate-palette-mcp/agents/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Copyright (c) Spectro Cloud
# SPDX-License-Identifier: Apache-2.0

103 changes: 103 additions & 0 deletions ai/palette-mcp/integrate-palette-mcp/agents/active_cluster_agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
# Copyright (c) Spectro Cloud
# SPDX-License-Identifier: Apache-2.0

"""Palette-focused agent for active cluster mapping."""

from __future__ import annotations

import importlib
from typing import Any

from helpers import build_palette_server_config, extract_text_response, suppress_console_output

ACTIVE_CLUSTER_SYSTEM_PROMPT = (
"You are a Palette active-cluster mapping specialist. "
"Use only Palette MCP tools to identify active clusters that use a provided set of cluster profile UIDs. "
"Return factual results only."
)

async def initialize_active_cluster_agent(
model: str,
debug_level: str,
default_env_file: str,
default_kubeconfig_dir: str,
default_mcp_image: str,
) -> Any:
from langchain.agents import create_agent
from langchain_openai import ChatOpenAI

checkpoint_module = importlib.import_module("langgraph.checkpoint.memory")
InMemorySaver = checkpoint_module.InMemorySaver

try:
mcp_client_module = importlib.import_module("langchain_mcp_adapters.client")
MultiServerMCPClient = mcp_client_module.MultiServerMCPClient
except (ImportError, AttributeError):
mcp_module = importlib.import_module("langchain_mcp_adapters")
MultiServerMCPClient = mcp_module.MultiServerMCPClient

mcp_client = MultiServerMCPClient(
build_palette_server_config(
default_env_file=default_env_file,
default_kubeconfig_dir=default_kubeconfig_dir,
default_mcp_image=default_mcp_image,
)
)
hide_mcp_output = debug_level != "verbose"
with suppress_console_output(hide_mcp_output):
mcp_tools = await mcp_client.get_tools()

llm = ChatOpenAI(model=model)
return create_agent(
model=llm,
tools=mcp_tools,
system_prompt=ACTIVE_CLUSTER_SYSTEM_PROMPT,
checkpointer=InMemorySaver(),
)


async def run_active_cluster_agent(
agent: Any,
pack_name: str,
matched_profiles_output: str,
debug_level: str,
run_id: str,
) -> str:
hide_mcp_output = debug_level != "verbose"
active_cluster_prompt = (
f"Given this profile discovery result for pack '{pack_name}':\n"
f"{matched_profiles_output}\n\n"
"Required process:\n"
"1) Extract matched profile UIDs from the input JSON.\n"
"2) Call gather_or_delete_clusters with action='list' and active_only=true.\n"
"3) For each active cluster uid from step 2, call gather_or_delete_clusters with action='get'.\n"
"4) Match clusters using explicit profile UID fields only.\n"
"5) If no clusters match, return an empty list and include every checked active cluster uid.\n\n"
"Return JSON with this shape:\n"
"{\n"
' "pack_name": "<pack>",\n'
' "target_profile_uids": ["<uid1>", "<uid2>"],\n'
' "total_active_clusters_scanned": <int>,\n'
' "active_clusters_using_matched_profiles": [\n'
" {\n"
' "uid": "<cluster_uid>",\n'
' "name": "<cluster_name>",\n'
' "cluster_profile_uid": "<profile_uid>",\n'
' "cluster_profile_name": "<profile_name>",\n'
' "evidence_field_path": "<json.path.to.profile.uid>",\n'
' "evidence": "<short evidence>"\n'
" }\n"
" ],\n"
' "checked_active_cluster_uids": ["<uid1>", "<uid2>"],\n'
' "notes": "<short note>"\n'
"}\n"
)
run_config = {
"configurable": {"thread_id": f"active-cluster:{pack_name.lower()}:{run_id}"}
}
with suppress_console_output(hide_mcp_output):
result = await agent.ainvoke(
{"messages": [{"role": "user", "content": active_cluster_prompt}]},
config=run_config,
)
return extract_text_response(result)
107 changes: 107 additions & 0 deletions ai/palette-mcp/integrate-palette-mcp/agents/palette_profile_agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
# Copyright (c) Spectro Cloud
# SPDX-License-Identifier: Apache-2.0

"""Palette-focused agent for cluster profile discovery."""

from __future__ import annotations

import importlib
from typing import Any

from helpers import (
build_palette_server_config,
extract_text_response,
suppress_console_output,
)

PROFILE_FINDER_SYSTEM_PROMPT = (
"You are a Palette profile discovery specialist. "
"Use only Palette MCP tools to find cluster profiles that include a target pack. "
"First list cluster profiles, then inspect details when needed. "
"Always capture and return cluster profile scope. "
"Return factual results only."
)

async def initialize_profile_finder_agent(
model: str,
debug_level: str,
default_env_file: str,
default_kubeconfig_dir: str,
default_mcp_image: str,
) -> Any:
from langchain.agents import create_agent
from langchain_openai import ChatOpenAI

checkpoint_module = importlib.import_module("langgraph.checkpoint.memory")
InMemorySaver = checkpoint_module.InMemorySaver

try:
mcp_client_module = importlib.import_module("langchain_mcp_adapters.client")
MultiServerMCPClient = mcp_client_module.MultiServerMCPClient
except (ImportError, AttributeError):
mcp_module = importlib.import_module("langchain_mcp_adapters")
MultiServerMCPClient = mcp_module.MultiServerMCPClient

mcp_client = MultiServerMCPClient(
build_palette_server_config(
default_env_file=default_env_file,
default_kubeconfig_dir=default_kubeconfig_dir,
default_mcp_image=default_mcp_image,
)
)
hide_mcp_output = debug_level != "verbose"
with suppress_console_output(hide_mcp_output):
mcp_tools = await mcp_client.get_tools()

llm = ChatOpenAI(model=model)
return create_agent(
model=llm,
tools=mcp_tools,
system_prompt=PROFILE_FINDER_SYSTEM_PROMPT,
checkpointer=InMemorySaver(),
)


async def run_profile_finder_agent(
agent: Any,
pack_name: str,
debug_level: str,
run_id: str,
) -> str:
hide_mcp_output = debug_level != "verbose"
profile_finder_prompt = (
"Find all cluster profiles in Palette that use the pack named "
f"'{pack_name}'. Use Palette MCP tools only.\n\n"
"Required process:\n"
"1) Call gather_or_delete_clusterprofiles with action='list'.\n"
"2) If list output lacks pack details, call action='get' for relevant cluster profile uids.\n"
"3) Match pack name case-insensitively.\n"
"4) For each matched profile, include scope from metadata.annotations.scope when available.\n"
"5) If scope is missing, set scope to 'unknown' and mention in notes.\n\n"
"Important:\n"
"- Return only profile-level results. Do not query clusters in this agent.\n\n"
"Return JSON with this shape:\n"
"{\n"
' "pack_name": "<pack>",\n'
' "total_profiles_scanned": <int>,\n'
' "matched_profiles": [\n'
" {\n"
' "uid": "<uid>",\n'
' "name": "<name>",\n'
' "scope": "<tenant|project|system|unknown>",\n'
' "pack_references": ["<ref1>", "<ref2>"],\n'
' "evidence": "<short evidence>"\n'
" }\n"
" ],\n"
' "notes": "<short note>"\n'
"}\n"
)
run_config = {
"configurable": {"thread_id": f"profile-finder:{pack_name.lower()}:{run_id}"}
}
with suppress_console_output(hide_mcp_output):
result = await agent.ainvoke(
{"messages": [{"role": "user", "content": profile_finder_prompt}]},
config=run_config,
)
return extract_text_response(result)
73 changes: 73 additions & 0 deletions ai/palette-mcp/integrate-palette-mcp/agents/reporter_agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
# Copyright (c) Spectro Cloud
# SPDX-License-Identifier: Apache-2.0

"""Reporter agent that formats discovery output for humans."""

from __future__ import annotations

import importlib
from typing import Any

from helpers import extract_text_response

REPORTER_SYSTEM_PROMPT = (
"You are a reporting agent. Produce clear, structured, concise reports. "
"Do not invent data. If discovery data is uncertain, call that out clearly."
)

async def initialize_reporter_agent(model: str) -> Any:
from langchain.agents import create_agent
from langchain_openai import ChatOpenAI

checkpoint_module = importlib.import_module("langgraph.checkpoint.memory")
InMemorySaver = checkpoint_module.InMemorySaver

llm = ChatOpenAI(model=model)
return create_agent(
model=llm,
tools=[],
system_prompt=REPORTER_SYSTEM_PROMPT,
checkpointer=InMemorySaver(),
)


async def run_reporter_agent(
agent: Any,
pack_name: str,
profile_discovery_output: str,
active_cluster_output: str,
tagging_output: str,
run_id: str,
) -> str:
reporter_prompt = (
f"Create a report for cluster profiles using pack '{pack_name}'.\n\n"
"Use these outputs as the source of truth:\n\n"
"Profile discovery output:\n"
f"{profile_discovery_output}\n\n"
"Active cluster mapping output:\n"
f"{active_cluster_output}\n\n"
"Tagging output:\n"
f"{tagging_output}\n\n"
"Output format:\n"
"1) Summary (1-2 sentences)\n"
"2) Matching cluster profiles (bullet list with uid, name, and evidence)\n"
"3) Active clusters using the matched cluster profiles (bullet list with uid, name, cluster profile uid, and cluster profile name)\n"
"4) Tagging results for clusters and cluster profiles (what was tagged, success/failure)\n"
" Include a separate list of skipped cluster profiles with reason (for example, scope=system).\n"
"5) Notes and caveats\n"
"If active cluster data appears incomplete or unchecked, explicitly say the result may be incomplete.\n"
"If there are no matching cluster profiles, return 'No matching cluster profiles found' in the summary and omit the rest of the output."
)
run_config = {"configurable": {"thread_id": f"reporter:{pack_name.lower()}:{run_id}"}}
result = await agent.ainvoke(
{
"messages": [
{
"role": "user",
"content": reporter_prompt,
}
]
},
config=run_config,
)
return extract_text_response(result)
Loading