Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
55 changes: 34 additions & 21 deletions src/fromager/commands/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,17 +353,13 @@ def _build(
pbi = wkctx.package_build_info(req)
prebuilt = pbi.pre_built

wheel_server_urls = wheels.get_wheel_server_urls(
wkctx, req, cache_wheel_server_url=cache_wheel_server_url
)

# See if we can reuse an existing wheel.
if not force:
wheel_filename = _is_wheel_built(
wkctx,
req.name,
resolved_version,
wheel_server_urls,
cache_wheel_server_url=cache_wheel_server_url,
)
if wheel_filename:
logger.info("using existing wheel from %s", wheel_filename)
Expand Down Expand Up @@ -476,21 +472,40 @@ def _is_wheel_built(
wkctx: context.WorkContext,
dist_name: str,
resolved_version: Version,
wheel_server_urls: list[str],
*,
cache_wheel_server_url: str | None = None,
) -> pathlib.Path | None:
req = Requirement(f"{dist_name}=={resolved_version}")

cache_servers: list[str] = list(
filter(None, [wkctx.wheel_server_url, cache_wheel_server_url])
)
if not cache_servers:
return None

logger.info(
"checking if a suitable wheel for %s was already built on %s",
req,
cache_servers,
)

try:
logger.info(
"checking if a suitable wheel for %s was already built on %s",
req,
wheel_server_urls,
)
url, _ = wheels.resolve_prebuilt_wheel(
ctx=wkctx,
req=req,
wheel_server_urls=wheel_server_urls,
)
url: str | None = None
for server_url in cache_servers:
try:
url, _ = wheels.resolve_cached_wheel(
ctx=wkctx,
req=req,
cache_server_url=server_url,
)
break
except Exception:
logger.debug("wheel not found on %s", server_url, exc_info=True)

if url is None:
logger.info("could not locate existing wheel")
return None

logger.info("found candidate wheel %s", url)
pbi = wkctx.package_build_info(req)
build_tag_from_settings = pbi.build_tag(resolved_version)
Expand All @@ -512,28 +527,26 @@ def _is_wheel_built(
return None

wheel_filename: pathlib.Path | None = None
if url.startswith(wkctx.wheel_server_url):
if wkctx.wheel_server_url and url.startswith(wkctx.wheel_server_url):
logging.debug("found wheel on local server")
wheel_filename = wkctx.wheels_downloads / wheel_basename
if not wheel_filename.exists():
logger.info("wheel not found in local cache, preparing to download")
wheel_filename = None

if not wheel_filename:
# if the found wheel was on an external server, then download it
logger.info("downloading wheel from %s", url)
wheel_filename = wheels.download_wheel(req, url, wkctx.wheels_downloads)

return wheel_filename
except Exception:
logger.debug(
"could not locate prebuilt wheel %s-%s on %s",
"could not locate existing wheel %s-%s",
dist_name,
resolved_version,
wheel_server_urls,
exc_info=True,
)
logger.info("could not locate prebuilt wheel")
logger.info("could not locate existing wheel")
return None


Expand Down
29 changes: 20 additions & 9 deletions src/fromager/commands/download_sequence.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,13 @@ def download_sequence(
the build order file that have source_url_type as sdist.

"""
if wkctx.wheel_server_url:
wheel_servers = [wkctx.wheel_server_url]
else:
wheel_servers = [sdist_server_url]
# The local wheel server is a trusted cache (wheels built in this or
# prior runs). An external sdist_server_url used as a wheel fallback
# is an upstream index that needs cooldown and override hooks.
use_cache_resolution = bool(wkctx.wheel_server_url)
wheel_server_url = (
wkctx.wheel_server_url if use_cache_resolution else sdist_server_url
)

logger.info("reading build order from %s", build_order_file)
with read.open_file_or_url(build_order_file) as f:
Expand Down Expand Up @@ -83,7 +86,6 @@ def download_one(entry: dict[str, typing.Any]) -> None:
except Exception as err:
logger.error(f"failed to download sdist for {req}: {err}")
if not ignore_missing_sdists:
# Re-raise with package context since context var is lost across threads
raise RuntimeError(f"Failed to download sdist for {req}") from err
else:
logger.info(
Expand All @@ -92,12 +94,21 @@ def download_one(entry: dict[str, typing.Any]) -> None:

if include_wheels:
try:
wheel_url, _ = wheels.resolve_prebuilt_wheel(
ctx=wkctx, req=req, wheel_server_urls=wheel_servers
)
if use_cache_resolution:
resolved_url, _ = wheels.resolve_cached_wheel(
ctx=wkctx,
req=req,
cache_server_url=wheel_server_url,
)
else:
resolved_url, _ = wheels.resolve_prebuilt_wheel(
ctx=wkctx,
req=req,
wheel_server_urls=[wheel_server_url],
)
wheels.download_wheel(
req=req,
wheel_url=wheel_url,
wheel_url=resolved_url,
output_directory=wkctx.wheels_downloads,
)
except Exception as err:
Expand Down
31 changes: 23 additions & 8 deletions src/fromager/wheels.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
from . import (
dependencies,
external_commands,
finders,
metrics,
overrides,
packagesettings,
Expand Down Expand Up @@ -528,6 +529,25 @@ def get_prebuilt_wheel_provider(
)


def resolve_cached_wheel(
*,
ctx: context.WorkContext,
req: Requirement,
cache_server_url: str,
) -> tuple[str, Version]:
"""Resolve a wheel from a trusted cache server (local or remote).

Uses ``PyPICacheProvider`` -- no cooldown, no hooks, no upload-time checks.
"""
provider = finders.PyPICacheProvider(
cache_server_url=cache_server_url,
constraints=ctx.constraints,
)
results = resolver.find_all_matching_from_provider(provider, req)
wheel_url, version = results[0]
return str(wheel_url), version


def resolve_all_prebuilt_wheels(
*,
ctx: context.WorkContext,
Expand All @@ -552,11 +572,6 @@ def resolve_all_prebuilt_wheels(
provider.cooldown = resolver.resolve_package_cooldown(
ctx, req, req_type=req_type
)
# The local fromager wheel server is PEP 503-only and serves
# packages that were already resolved and vetted earlier in the
# same run. Don't fail-closed on missing upload_time there.
if ctx.wheel_server_url and url == ctx.wheel_server_url:
provider.supports_upload_time = False

# Get all matching candidates from provider
results = resolver.find_all_matching_from_provider(provider, req)
Expand All @@ -579,10 +594,10 @@ def resolve_prebuilt_wheel(
wheel_server_urls: list[str],
req_type: requirements_file.RequirementType | None = None,
) -> tuple[str, Version]:
"""Return (URL, version) for the best matching wheel version.
"""Return (URL, version) for the best matching pre-built wheel.

Tries wheel servers in order and returns result from the first that succeeds.
Returns the highest matching version.
Tries wheel servers in order and returns the highest matching version
from the first server that succeeds.
"""
results = resolve_all_prebuilt_wheels(
ctx=ctx, req=req, wheel_server_urls=wheel_server_urls, req_type=req_type
Expand Down
115 changes: 104 additions & 11 deletions tests/test_cooldown.py
Original file line number Diff line number Diff line change
Expand Up @@ -637,17 +637,16 @@ def test_github_cooldown_skips_with_warning(
assert "cooldown cannot be enforced" in caplog.text


def test_local_wheel_server_allows_without_upload_time(
def test_local_wheel_server_skips_cooldown_entirely(
tmp_path: pathlib.Path,
caplog: pytest.LogCaptureFixture,
) -> None:
"""resolve_all_prebuilt_wheels() allows candidates from the local wheel server
even when upload_time is missing.
"""resolve_cached_wheel() disables cooldown for the local wheel server.

The local fromager wheel server is PEP 503-only and serves packages that were
already resolved and built earlier in the same run. They are trusted and must
not be fail-closed by the cooldown just because the local server cannot supply
upload timestamps.
The local fromager wheel server serves packages that were already resolved
and built earlier in the same run. They are trusted, so resolve_cached_wheel
uses PyPICacheProvider (cooldown=None) and no cooldown-related warnings are
emitted.
"""
local_server_url = "http://127.0.0.1:9999/simple/"
ctx = context.WorkContext(
Expand All @@ -658,7 +657,6 @@ def test_local_wheel_server_allows_without_upload_time(
work_dir=tmp_path / "work-dir",
cooldown=_COOLDOWN,
)
ctx.wheel_server_url = local_server_url

no_timestamp_response = {
"meta": {"api-version": "1.1"},
Expand All @@ -668,7 +666,6 @@ def test_local_wheel_server_allows_without_upload_time(
"filename": "test_pkg-1.3.2-py3-none-any.whl",
"url": f"{local_server_url}test-pkg/test_pkg-1.3.2-py3-none-any.whl",
"hashes": {"sha256": "bbb"},
# no upload-time — as served by fromager's local PEP 503 server
},
],
}
Expand All @@ -679,16 +676,112 @@ def test_local_wheel_server_allows_without_upload_time(
json=no_timestamp_response,
headers={"Content-Type": _PYPI_SIMPLE_JSON_CONTENT_TYPE},
)
_url, version = wheels.resolve_cached_wheel(
ctx=ctx,
req=Requirement("test-pkg"),
cache_server_url=local_server_url,
)

assert str(version) == "1.3.2"
assert "cooldown" not in caplog.text.lower()


def test_cache_wheel_server_skips_cooldown_entirely(
tmp_path: pathlib.Path,
caplog: pytest.LogCaptureFixture,
) -> None:
"""resolve_cached_wheel() disables cooldown for the cache wheel server.

The cache wheel server contains wheels from previous fromager runs that
were already vetted during original sdist resolution. Cooldown is not
applicable.
"""
cache_server_url = "https://registry.example.com/packages/pypi/simple/"
ctx = context.WorkContext(
active_settings=None,
patches_dir=tmp_path / "patches",
sdists_repo=tmp_path / "sdists-repo",
wheels_repo=tmp_path / "wheels-repo",
work_dir=tmp_path / "work-dir",
cooldown=_COOLDOWN,
)

no_timestamp_response = {
"meta": {"api-version": "1.1"},
"name": "test-pkg",
"files": [
{
"filename": "test_pkg-1.3.2-py3-none-any.whl",
"url": f"{cache_server_url}test-pkg/test_pkg-1.3.2-py3-none-any.whl",
"hashes": {"sha256": "bbb"},
},
],
}
with caplog.at_level(logging.WARNING, logger="fromager.resolver"):
with requests_mock.Mocker() as r:
r.get(
f"{cache_server_url}test-pkg/",
json=no_timestamp_response,
headers={"Content-Type": _PYPI_SIMPLE_JSON_CONTENT_TYPE},
)
_url, version = wheels.resolve_cached_wheel(
ctx=ctx,
req=Requirement("test-pkg"),
cache_server_url=cache_server_url,
)

assert str(version) == "1.3.2"
assert "cooldown" not in caplog.text.lower()


def test_non_cache_server_retains_cooldown(
tmp_path: pathlib.Path,
caplog: pytest.LogCaptureFixture,
) -> None:
"""resolve_all_prebuilt_wheels() keeps cooldown active for non-cache servers.

When a server URL is an external/upstream index (not local or cache),
cooldown remains active. Candidates without upload timestamps trigger a
warning because the server does not support upload_time.
"""
external_server_url = "https://external.example.com/simple/"
ctx = context.WorkContext(
active_settings=None,
patches_dir=tmp_path / "patches",
sdists_repo=tmp_path / "sdists-repo",
wheels_repo=tmp_path / "wheels-repo",
work_dir=tmp_path / "work-dir",
cooldown=_COOLDOWN,
)

no_timestamp_response = {
"meta": {"api-version": "1.1"},
"name": "test-pkg",
"files": [
{
"filename": "test_pkg-1.3.2-py3-none-any.whl",
"url": f"{external_server_url}test-pkg/test_pkg-1.3.2-py3-none-any.whl",
"hashes": {"sha256": "bbb"},
},
],
}
with caplog.at_level(logging.WARNING, logger="fromager.resolver"):
with requests_mock.Mocker() as r:
r.get(
f"{external_server_url}test-pkg/",
json=no_timestamp_response,
headers={"Content-Type": _PYPI_SIMPLE_JSON_CONTENT_TYPE},
)
results = wheels.resolve_all_prebuilt_wheels(
ctx=ctx,
req=Requirement("test-pkg"),
wheel_server_urls=[local_server_url],
wheel_server_urls=[external_server_url],
)

assert len(results) == 1
_, version = results[0]
assert str(version) == "1.3.2"
assert "cooldown check skipped" in caplog.text
assert "cooldown" in caplog.text.lower()


# ---------------------------------------------------------------------------
Expand Down
Loading