Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 74 additions & 0 deletions tests/unit/vertexai/genai/replays/test_skills_create.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests the skills.create() method against the Vertex AI endpoint using replays."""

import io
import zipfile

from tests.unit.vertexai.genai.replays import pytest_helper
from vertexai._genai import types

# MANDATORY: Initialize the replay test framework for this module
pytestmark = pytest_helper.setup(
file=__file__,
globals_for_file=globals(),
)


def test_create_skill(client, tmp_path):
client._api_client._http_options.base_url = (
"https://us-central1-aiplatform.googleapis.com"
)

# Create a dummy skill structure (SKILL.md is required by the spec)
with open(tmp_path / "SKILL.md", "w") as f:
f.write("# My Replay Skill\nThis is a test skill for replay tests.")

skill = client.skills.create(
display_name="My Replay Skill",
description="My Replay Skill Description",
config=types.CreateSkillConfig(
local_path=str(tmp_path), wait_for_completion=True
),
)

assert skill.name is not None
assert skill.display_name == "My Replay Skill"
assert skill.description == "My Replay Skill Description"


def test_create_skill_with_prezipped_bytes(client):
"""Tests the creation of a skill with pre-zipped bytes."""
client._api_client._http_options.base_url = (
"https://us-central1-aiplatform.googleapis.com"
)

zip_buffer = io.BytesIO()
zinfo = zipfile.ZipInfo("SKILL.md", date_time=(1980, 1, 1, 0, 0, 0))
with zipfile.ZipFile(zip_buffer, "w") as zip_file:
zip_file.writestr(zinfo, "# My Zipped Replay Skill\nThis is a test.")
zipped_bytes = zip_buffer.getvalue()

skill = client.skills.create(
display_name="My Zipped Replay Skill",
description="My Zipped Replay Skill Description",
config=types.CreateSkillConfig(
zipped_filesystem=zipped_bytes, wait_for_completion=True
),
)

assert skill.name is not None
assert skill.display_name == "My Zipped Replay Skill"
assert skill.description == "My Zipped Replay Skill Description"
147 changes: 147 additions & 0 deletions vertexai/_genai/_skills_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Utility functions for Skills."""

import asyncio
import base64
import datetime
import io
import os
import pathlib
import time
from typing import Any, Awaitable, Callable
import zipfile


def zip_directory(directory_path: pathlib.Path | str) -> bytes:
"""Zips a directory into memory and returns the bytes.

Args:
directory_path (pathlib.Path | str): Required. The local path to the
directory.

Returns:
bytes: The zipped directory content.
"""
directory_str = os.fspath(directory_path)
if not os.path.isdir(directory_str):
raise ValueError(f"Path is not a directory: {directory_str}")

zip_buffer = io.BytesIO()
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zip_file:
for root, _, files in os.walk(directory_str):
for file in files:
file_path = os.path.join(root, file)
arcname = os.path.relpath(file_path, directory_str)

# Read actual file data
with open(file_path, "rb") as f:
file_data = f.read()

# Use deterministic ZipInfo (mtime: 1980-01-01 00:00:00)
zinfo = zipfile.ZipInfo(arcname, date_time=(1980, 1, 1, 0, 0, 0))
zinfo.compress_type = zipfile.ZIP_DEFLATED
zinfo.external_attr = 0o644 << 16 # Constant file permissions

zip_file.writestr(zinfo, file_data)
return zip_buffer.getvalue()


def get_zipped_filesystem_payload(directory_path: pathlib.Path | str) -> str:
"""Zips a directory and base64-encodes the result to a UTF-8 string.

Args:
directory_path (pathlib.Path | str): Required. The local path to the
directory.

Returns:
str: The base64-encoded zipped directory.
"""
zip_bytes = zip_directory(directory_path)
return base64.b64encode(zip_bytes).decode("utf-8")


def await_operation(
*,
operation_name: str,
get_operation_fn: Callable[..., Any],
poll_interval: datetime.timedelta | float = 10.0,
timeout_seconds: float = 300.0,
) -> Any:
"""Waits for a long running operation to complete.

Args:
operation_name (str): Required. The name of the operation.
get_operation_fn (Callable): Required. Function to get the operation
status.
poll_interval (datetime.timedelta | float): The interval between polls.
timeout_seconds (float): The maximum wait duration in seconds.

Returns:
Any: The completed operation.
"""
if isinstance(poll_interval, datetime.timedelta):
poll_seconds = poll_interval.total_seconds()
else:
poll_seconds = float(poll_interval)

start_time = time.time()
operation = get_operation_fn(operation_name=operation_name)
while not operation.done:
if (time.time() - start_time) > timeout_seconds:
raise TimeoutError(
f"Operation {operation_name} did not complete within the timeout "
f"of {timeout_seconds} seconds."
)
time.sleep(poll_seconds)
operation = get_operation_fn(operation_name=operation.name)
return operation


async def await_operation_async(
*,
operation_name: str,
get_operation_fn: Callable[..., Awaitable[Any]],
poll_interval: datetime.timedelta | float = 10.0,
timeout_seconds: float = 300.0,
) -> Any:
"""Waits for a long running operation to complete asynchronously.

Args:
operation_name (str): Required. The name of the operation.
get_operation_fn (Callable): Required. Async function to get the operation
status.
poll_interval (datetime.timedelta | float): The interval between polls.
timeout_seconds (float): The maximum wait duration in seconds.

Returns:
Any: The completed operation.
"""
if isinstance(poll_interval, datetime.timedelta):
poll_seconds = poll_interval.total_seconds()
else:
poll_seconds = float(poll_interval)

start_time = time.time()
operation = await get_operation_fn(operation_name=operation_name)
while not operation.done:
if (time.time() - start_time) > timeout_seconds:
raise TimeoutError(
f"Operation {operation_name} did not complete within the timeout "
f"of {timeout_seconds} seconds."
)
await asyncio.sleep(poll_seconds)
operation = await get_operation_fn(operation_name=operation.name)
return operation
Loading
Loading