Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
# NEXT CHANGELOG

## Release v0.300.0
## Release v0.299.2

### CLI

### Bundles

* Propagate authentication environment (including `DATABRICKS_CONFIG_PROFILE`) to the `experimental.python` subprocess so bundle validate/deploy no longer fails with a multi-profile host ambiguity error when several profiles in `~/.databrickscfg` share the same host.
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

"to Python subprocesses" -- the Python support is no longer experimental.

* Fixed `--force-pull` on `bundle summary` and `bundle open` so the flag bypasses the local state cache and reads state from the workspace.

### Dependency updates
Expand Down
7 changes: 7 additions & 0 deletions acceptance/bundle/python/propagates-auth-env/.databrickscfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
[my-profile]
host = $DATABRICKS_HOST
token = $DATABRICKS_TOKEN

[other-profile]
host = $DATABRICKS_HOST
token = other-token
16 changes: 16 additions & 0 deletions acceptance/bundle/python/propagates-auth-env/databricks.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
bundle:
name: my_project

sync: {paths: []} # don't need to copy files

python:
mutators:
- "mutators:capture_profile_env"

workspace:
profile: my-profile

resources:
jobs:
my_job:
name: "Job"
13 changes: 13 additions & 0 deletions acceptance/bundle/python/propagates-auth-env/mutators.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
from databricks.bundles.jobs import Job
from databricks.bundles.core import job_mutator, Bundle
import os


@job_mutator
def capture_profile_env(bundle: Bundle, job: Job) -> Job:
# The CLI must propagate DATABRICKS_CONFIG_PROFILE to the python subprocess
# so the Databricks SDK can disambiguate when multiple profiles share a host.
value = os.getenv("DATABRICKS_CONFIG_PROFILE", "<unset>")
with open("captured_env.txt", "w") as f:
f.write(value)
return job
4 changes: 4 additions & 0 deletions acceptance/bundle/python/propagates-auth-env/out.test.toml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions acceptance/bundle/python/propagates-auth-env/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@

>>> uv run [UV_ARGS] -q [CLI] bundle summary -o json
{
"profile": "my-profile"
}

>>> cat captured_env.txt
my-profile
18 changes: 18 additions & 0 deletions acceptance/bundle/python/propagates-auth-env/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@

# Two workspace profiles share the same host so picking one is meaningful.
envsubst < .databrickscfg > out && mv out .databrickscfg
export DATABRICKS_CONFIG_FILE=.databrickscfg
unset DATABRICKS_HOST
unset DATABRICKS_TOKEN
unset DATABRICKS_CONFIG_PROFILE
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can use a comment why they need to be cleared.


trace uv run $UV_ARGS -q $CLI bundle summary -o json | jq '{profile: .workspace.profile}'

# The python mutator captures DATABRICKS_CONFIG_PROFILE from its subprocess env.
# Without the fix, the CLI does not propagate the bundle's resolved profile,
# so the SDK inside python re-invokes the CLI without a profile and fails on
# multi-profile ambiguity.
trace cat captured_env.txt
echo ""
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nit: let Python write the newline. I was confused as to why this was necessary.


rm -fr .databricks __pycache__ captured_env.txt
12 changes: 12 additions & 0 deletions bundle/config/mutator/python/python_mutator.go
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@ type runPythonMutatorOpts struct {
bundleRootPath string
pythonPath string
loadLocations bool
authEnv map[string]string
}

// getOpts adapts deprecated PyDABs and upcoming Python configuration
Expand Down Expand Up @@ -217,6 +218,15 @@ func (m *pythonMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagno
return diag.Errorf("Running Python code is not allowed when DATABRICKS_BUNDLE_RESTRICTED_CODE_EXECUTION is set")
}

// Propagate auth env so the Databricks SDK in the Python subprocess uses the
// same credentials as the CLI. In particular this carries DATABRICKS_CONFIG_PROFILE,
// which lets the CLI disambiguate profiles sharing the same host when the SDK
// re-invokes `databricks auth token --host <host>`.
authEnv, err := b.AuthEnv(ctx)
if err != nil {
return diag.FromErr(err)
}

// mutateDiags is used because Mutate returns 'error' instead of 'diag.Diagnostics'
var mutateDiags diag.Diagnostics
var result applyPythonOutputResult
Expand All @@ -238,6 +248,7 @@ func (m *pythonMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagno
bundleRootPath: b.BundleRootPath,
pythonPath: pythonPath,
loadLocations: opts.loadLocations,
authEnv: authEnv,
})
mutateDiags = diags
if diags.HasError() {
Expand Down Expand Up @@ -364,6 +375,7 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, root dyn.Value, op
process.WithDir(opts.bundleRootPath),
process.WithStderrWriter(stderrWriter),
process.WithStdoutWriter(stdoutWriter),
process.WithEnvs(opts.authEnv),
)
if processErr != nil {
logger.Debugf(ctx, "python mutator process failed: %s", processErr)
Expand Down
Loading