From 2ef0ecabbc9b2b52d8a9210be3c2a96ca71cd2d4 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 2 Mar 2026 23:39:34 +0100 Subject: [PATCH 01/13] Add declarative import support for direct deployment engine Allow users to import existing workspace resources into bundle management at deploy time using import blocks in target configuration. This is analogous to Terraform's import blocks and enables taking over management of resources without recreating them. Co-Authored-By: Claude Opus 4.6 --- .../bundle/deploy/import/basic/databricks.yml | 23 +++ .../bundle/deploy/import/basic/hello.py | 1 + .../bundle/deploy/import/basic/out.test.toml | 5 + .../bundle/deploy/import/basic/output.txt | 66 ++++++++ acceptance/bundle/deploy/import/basic/script | 21 +++ .../import/block-migrate/databricks.yml | 23 +++ .../deploy/import/block-migrate/hello.py | 1 + .../deploy/import/block-migrate/out.test.toml | 5 + .../deploy/import/block-migrate/output.txt | 3 + .../bundle/deploy/import/block-migrate/script | 2 + .../deploy/import/block-migrate/test.toml | 2 + .../import/import-and-update/databricks.yml | 23 +++ .../deploy/import/import-and-update/hello.py | 1 + .../import/import-and-update/out.test.toml | 5 + .../import/import-and-update/output.txt | 23 +++ .../deploy/import/import-and-update/script | 18 ++ .../import/resource-not-found/databricks.yml | 23 +++ .../deploy/import/resource-not-found/hello.py | 1 + .../import/resource-not-found/out.test.toml | 5 + .../import/resource-not-found/output.txt | 6 + .../deploy/import/resource-not-found/script | 2 + acceptance/bundle/deploy/import/test.toml | 5 + bundle/config/import.go | 156 ++++++++++++++++++ bundle/config/target.go | 5 + bundle/configsync/diff.go | 2 +- bundle/deployplan/action.go | 43 +++-- bundle/deployplan/plan.go | 1 + bundle/direct/apply.go | 34 ++++ bundle/direct/bind.go | 4 +- bundle/direct/bundle_apply.go | 3 + bundle/direct/bundle_plan.go | 120 +++++++++++++- bundle/phases/deploy.go | 6 +- bundle/phases/destroy.go | 2 +- .../statemgmt/upload_state_for_yaml_sync.go | 2 +- cmd/bundle/deployment/migrate.go | 7 +- cmd/bundle/plan.go | 22 ++- 36 files changed, 641 insertions(+), 30 deletions(-) create mode 100644 acceptance/bundle/deploy/import/basic/databricks.yml create mode 100644 acceptance/bundle/deploy/import/basic/hello.py create mode 100644 acceptance/bundle/deploy/import/basic/out.test.toml create mode 100644 acceptance/bundle/deploy/import/basic/output.txt create mode 100644 acceptance/bundle/deploy/import/basic/script create mode 100644 acceptance/bundle/deploy/import/block-migrate/databricks.yml create mode 100644 acceptance/bundle/deploy/import/block-migrate/hello.py create mode 100644 acceptance/bundle/deploy/import/block-migrate/out.test.toml create mode 100644 acceptance/bundle/deploy/import/block-migrate/output.txt create mode 100644 acceptance/bundle/deploy/import/block-migrate/script create mode 100644 acceptance/bundle/deploy/import/block-migrate/test.toml create mode 100644 acceptance/bundle/deploy/import/import-and-update/databricks.yml create mode 100644 acceptance/bundle/deploy/import/import-and-update/hello.py create mode 100644 acceptance/bundle/deploy/import/import-and-update/out.test.toml create mode 100644 acceptance/bundle/deploy/import/import-and-update/output.txt create mode 100644 acceptance/bundle/deploy/import/import-and-update/script create mode 100644 acceptance/bundle/deploy/import/resource-not-found/databricks.yml create mode 100644 acceptance/bundle/deploy/import/resource-not-found/hello.py create mode 100644 acceptance/bundle/deploy/import/resource-not-found/out.test.toml create mode 100644 acceptance/bundle/deploy/import/resource-not-found/output.txt create mode 100644 acceptance/bundle/deploy/import/resource-not-found/script create mode 100644 acceptance/bundle/deploy/import/test.toml create mode 100644 bundle/config/import.go diff --git a/acceptance/bundle/deploy/import/basic/databricks.yml b/acceptance/bundle/deploy/import/basic/databricks.yml new file mode 100644 index 0000000000..d0a642bfef --- /dev/null +++ b/acceptance/bundle/deploy/import/basic/databricks.yml @@ -0,0 +1,23 @@ +bundle: + name: test-import-basic + +resources: + jobs: + foo: + name: test-import-job + environments: + - environment_key: default + spec: + client: "1" + tasks: + - task_key: my_task + environment_key: default + spark_python_task: + python_file: ./hello.py + +targets: + default: + import: + jobs: + foo: + id: "PLACEHOLDER_JOB_ID" diff --git a/acceptance/bundle/deploy/import/basic/hello.py b/acceptance/bundle/deploy/import/basic/hello.py new file mode 100644 index 0000000000..11b15b1a45 --- /dev/null +++ b/acceptance/bundle/deploy/import/basic/hello.py @@ -0,0 +1 @@ +print("hello") diff --git a/acceptance/bundle/deploy/import/basic/out.test.toml b/acceptance/bundle/deploy/import/basic/out.test.toml new file mode 100644 index 0000000000..19b2c349a3 --- /dev/null +++ b/acceptance/bundle/deploy/import/basic/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = true + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/acceptance/bundle/deploy/import/basic/output.txt b/acceptance/bundle/deploy/import/basic/output.txt new file mode 100644 index 0000000000..ccc962c120 --- /dev/null +++ b/acceptance/bundle/deploy/import/basic/output.txt @@ -0,0 +1,66 @@ + +>>> [CLI] bundle plan +import jobs.foo (id: [NEW_JOB_ID]) + +Plan: 0 to add, 1 to change, 0 to delete, 0 unchanged, 1 to import + +>>> [CLI] bundle deploy --auto-approve +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-import-basic/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> [CLI] bundle plan +Plan: 0 to add, 0 to change, 0 to delete, 1 unchanged + +>>> print_state.py +{ + "state_version": 1, + "cli_version": "[DEV_VERSION]", + "lineage": "[UUID]", + "serial": 1, + "state": { + "resources.jobs.foo": { + "__id__": "[NEW_JOB_ID]", + "state": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-import-basic/default/state/metadata.json" + }, + "edit_mode": "UI_LOCKED", + "environments": [ + { + "environment_key": "default", + "spec": { + "client": "1" + } + } + ], + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "test-import-job", + "queue": { + "enabled": true + }, + "tasks": [ + { + "environment_key": "default", + "spark_python_task": { + "python_file": "/Workspace/Users/[USERNAME]/.bundle/test-import-basic/default/files/hello.py" + }, + "task_key": "my_task" + } + ] + } + } + } +} + +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete resources.jobs.foo + +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-import-basic/default + +Deleting files... +Destroy complete! diff --git a/acceptance/bundle/deploy/import/basic/script b/acceptance/bundle/deploy/import/basic/script new file mode 100644 index 0000000000..2252f3a630 --- /dev/null +++ b/acceptance/bundle/deploy/import/basic/script @@ -0,0 +1,21 @@ +# Create a job in the workspace +NEW_JOB_ID=$($CLI jobs create --json '{"name": "test-import-job", "environments": [{"environment_key": "default", "spec": {"client": "1"}}], "tasks": [{"task_key": "my_task", "environment_key": "default", "spark_python_task": {"python_file": "/Workspace/test.py"}}]}' | jq -r .job_id) +add_repl.py $NEW_JOB_ID NEW_JOB_ID + +# Update the databricks.yml with the actual job ID +update_file.py databricks.yml 'PLACEHOLDER_JOB_ID' "$NEW_JOB_ID" + +# Run plan - should show import action +trace $CLI bundle plan + +# Deploy with auto-approve +trace $CLI bundle deploy --auto-approve + +# Plan again - should show no changes (skip) +trace $CLI bundle plan + +# Verify state file contains the imported ID +trace print_state.py | contains.py "$NEW_JOB_ID" + +# Cleanup +trace $CLI bundle destroy --auto-approve diff --git a/acceptance/bundle/deploy/import/block-migrate/databricks.yml b/acceptance/bundle/deploy/import/block-migrate/databricks.yml new file mode 100644 index 0000000000..75fd19e4c9 --- /dev/null +++ b/acceptance/bundle/deploy/import/block-migrate/databricks.yml @@ -0,0 +1,23 @@ +bundle: + name: test-import-block-migrate + +resources: + jobs: + foo: + name: test-job + environments: + - environment_key: default + spec: + client: "1" + tasks: + - task_key: my_task + environment_key: default + spark_python_task: + python_file: ./hello.py + +targets: + default: + import: + jobs: + foo: + id: "12345" diff --git a/acceptance/bundle/deploy/import/block-migrate/hello.py b/acceptance/bundle/deploy/import/block-migrate/hello.py new file mode 100644 index 0000000000..11b15b1a45 --- /dev/null +++ b/acceptance/bundle/deploy/import/block-migrate/hello.py @@ -0,0 +1 @@ +print("hello") diff --git a/acceptance/bundle/deploy/import/block-migrate/out.test.toml b/acceptance/bundle/deploy/import/block-migrate/out.test.toml new file mode 100644 index 0000000000..19b2c349a3 --- /dev/null +++ b/acceptance/bundle/deploy/import/block-migrate/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = true + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/acceptance/bundle/deploy/import/block-migrate/output.txt b/acceptance/bundle/deploy/import/block-migrate/output.txt new file mode 100644 index 0000000000..df257a1bfb --- /dev/null +++ b/acceptance/bundle/deploy/import/block-migrate/output.txt @@ -0,0 +1,3 @@ + +>>> musterr [CLI] bundle deployment migrate +Error: cannot run 'bundle deployment migrate' when import blocks are defined in the target configuration; import blocks are only supported with the direct deployment engine diff --git a/acceptance/bundle/deploy/import/block-migrate/script b/acceptance/bundle/deploy/import/block-migrate/script new file mode 100644 index 0000000000..15528a0e4b --- /dev/null +++ b/acceptance/bundle/deploy/import/block-migrate/script @@ -0,0 +1,2 @@ +# Try to run migration with import blocks - should fail +trace musterr $CLI bundle deployment migrate diff --git a/acceptance/bundle/deploy/import/block-migrate/test.toml b/acceptance/bundle/deploy/import/block-migrate/test.toml new file mode 100644 index 0000000000..680c17c1e0 --- /dev/null +++ b/acceptance/bundle/deploy/import/block-migrate/test.toml @@ -0,0 +1,2 @@ +# Migration test does not need engine matrix +[EnvMatrix] diff --git a/acceptance/bundle/deploy/import/import-and-update/databricks.yml b/acceptance/bundle/deploy/import/import-and-update/databricks.yml new file mode 100644 index 0000000000..584bf3d979 --- /dev/null +++ b/acceptance/bundle/deploy/import/import-and-update/databricks.yml @@ -0,0 +1,23 @@ +bundle: + name: test-import-update + +resources: + jobs: + foo: + name: updated-job-name + environments: + - environment_key: default + spec: + client: "1" + tasks: + - task_key: my_task + environment_key: default + spark_python_task: + python_file: ./hello.py + +targets: + default: + import: + jobs: + foo: + id: "PLACEHOLDER_JOB_ID" diff --git a/acceptance/bundle/deploy/import/import-and-update/hello.py b/acceptance/bundle/deploy/import/import-and-update/hello.py new file mode 100644 index 0000000000..11b15b1a45 --- /dev/null +++ b/acceptance/bundle/deploy/import/import-and-update/hello.py @@ -0,0 +1 @@ +print("hello") diff --git a/acceptance/bundle/deploy/import/import-and-update/out.test.toml b/acceptance/bundle/deploy/import/import-and-update/out.test.toml new file mode 100644 index 0000000000..19b2c349a3 --- /dev/null +++ b/acceptance/bundle/deploy/import/import-and-update/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = true + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/acceptance/bundle/deploy/import/import-and-update/output.txt b/acceptance/bundle/deploy/import/import-and-update/output.txt new file mode 100644 index 0000000000..61be2f37f0 --- /dev/null +++ b/acceptance/bundle/deploy/import/import-and-update/output.txt @@ -0,0 +1,23 @@ + +>>> [CLI] bundle plan +import jobs.foo (id: [NEW_JOB_ID]) + +Plan: 0 to add, 1 to change, 0 to delete, 0 unchanged, 1 to import + +>>> [CLI] bundle deploy --auto-approve +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-import-update/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> [CLI] jobs get [NEW_JOB_ID] +updated-job-name + +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete resources.jobs.foo + +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-import-update/default + +Deleting files... +Destroy complete! diff --git a/acceptance/bundle/deploy/import/import-and-update/script b/acceptance/bundle/deploy/import/import-and-update/script new file mode 100644 index 0000000000..b0a27071c8 --- /dev/null +++ b/acceptance/bundle/deploy/import/import-and-update/script @@ -0,0 +1,18 @@ +# Create a job in the workspace with a different name +NEW_JOB_ID=$($CLI jobs create --json '{"name": "original-job-name", "environments": [{"environment_key": "default", "spec": {"client": "1"}}], "tasks": [{"task_key": "my_task", "environment_key": "default", "spark_python_task": {"python_file": "/Workspace/test.py"}}]}' | jq -r .job_id) +add_repl.py $NEW_JOB_ID NEW_JOB_ID + +# Update the databricks.yml with the actual job ID +update_file.py databricks.yml 'PLACEHOLDER_JOB_ID' "$NEW_JOB_ID" + +# Run plan - should show import_and_update action (name differs from config) +trace $CLI bundle plan + +# Deploy with auto-approve +trace $CLI bundle deploy --auto-approve + +# Verify the job was updated +trace $CLI jobs get $NEW_JOB_ID | jq -r .settings.name + +# Cleanup +trace $CLI bundle destroy --auto-approve diff --git a/acceptance/bundle/deploy/import/resource-not-found/databricks.yml b/acceptance/bundle/deploy/import/resource-not-found/databricks.yml new file mode 100644 index 0000000000..7efb2bc8d0 --- /dev/null +++ b/acceptance/bundle/deploy/import/resource-not-found/databricks.yml @@ -0,0 +1,23 @@ +bundle: + name: test-import-not-found + +resources: + jobs: + foo: + name: test-job + environments: + - environment_key: default + spec: + client: "1" + tasks: + - task_key: my_task + environment_key: default + spark_python_task: + python_file: ./hello.py + +targets: + default: + import: + jobs: + foo: + id: "999999999" diff --git a/acceptance/bundle/deploy/import/resource-not-found/hello.py b/acceptance/bundle/deploy/import/resource-not-found/hello.py new file mode 100644 index 0000000000..11b15b1a45 --- /dev/null +++ b/acceptance/bundle/deploy/import/resource-not-found/hello.py @@ -0,0 +1 @@ +print("hello") diff --git a/acceptance/bundle/deploy/import/resource-not-found/out.test.toml b/acceptance/bundle/deploy/import/resource-not-found/out.test.toml new file mode 100644 index 0000000000..19b2c349a3 --- /dev/null +++ b/acceptance/bundle/deploy/import/resource-not-found/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = true + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/acceptance/bundle/deploy/import/resource-not-found/output.txt b/acceptance/bundle/deploy/import/resource-not-found/output.txt new file mode 100644 index 0000000000..958309e7d8 --- /dev/null +++ b/acceptance/bundle/deploy/import/resource-not-found/output.txt @@ -0,0 +1,6 @@ + +>>> musterr [CLI] bundle plan +Error: cannot plan resources.jobs.foo: resource with ID "[NUMID]" does not exist in workspace + +Error: planning failed + diff --git a/acceptance/bundle/deploy/import/resource-not-found/script b/acceptance/bundle/deploy/import/resource-not-found/script new file mode 100644 index 0000000000..6226cb8591 --- /dev/null +++ b/acceptance/bundle/deploy/import/resource-not-found/script @@ -0,0 +1,2 @@ +# Try to plan with a non-existent job ID - should fail +trace musterr $CLI bundle plan diff --git a/acceptance/bundle/deploy/import/test.toml b/acceptance/bundle/deploy/import/test.toml new file mode 100644 index 0000000000..931833f6cc --- /dev/null +++ b/acceptance/bundle/deploy/import/test.toml @@ -0,0 +1,5 @@ +Cloud = true +Ignore = [".databricks"] + +[EnvMatrix] +DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/bundle/config/import.go b/bundle/config/import.go new file mode 100644 index 0000000000..ddaa1aee16 --- /dev/null +++ b/bundle/config/import.go @@ -0,0 +1,156 @@ +package config + +// ImportResource represents a single resource to import with its workspace ID. +type ImportResource struct { + ID string `json:"id"` +} + +// Import defines resources to import at the target level. +// Resources listed here will be bound to the bundle at deploy time. +// This field is only valid for the direct deployment engine. +type Import struct { + Jobs map[string]ImportResource `json:"jobs,omitempty"` + Pipelines map[string]ImportResource `json:"pipelines,omitempty"` + Models map[string]ImportResource `json:"models,omitempty"` + Experiments map[string]ImportResource `json:"experiments,omitempty"` + ModelServingEndpoints map[string]ImportResource `json:"model_serving_endpoints,omitempty"` + RegisteredModels map[string]ImportResource `json:"registered_models,omitempty"` + QualityMonitors map[string]ImportResource `json:"quality_monitors,omitempty"` + Schemas map[string]ImportResource `json:"schemas,omitempty"` + Volumes map[string]ImportResource `json:"volumes,omitempty"` + Clusters map[string]ImportResource `json:"clusters,omitempty"` + Dashboards map[string]ImportResource `json:"dashboards,omitempty"` + Apps map[string]ImportResource `json:"apps,omitempty"` + SecretScopes map[string]ImportResource `json:"secret_scopes,omitempty"` + Alerts map[string]ImportResource `json:"alerts,omitempty"` + SqlWarehouses map[string]ImportResource `json:"sql_warehouses,omitempty"` + DatabaseInstances map[string]ImportResource `json:"database_instances,omitempty"` + DatabaseCatalogs map[string]ImportResource `json:"database_catalogs,omitempty"` + SyncedDatabaseTables map[string]ImportResource `json:"synced_database_tables,omitempty"` + PostgresProjects map[string]ImportResource `json:"postgres_projects,omitempty"` + PostgresBranches map[string]ImportResource `json:"postgres_branches,omitempty"` + PostgresEndpoints map[string]ImportResource `json:"postgres_endpoints,omitempty"` +} + +// GetImportID returns the import ID for a given resource type and name. +// Returns empty string if no import is defined for the resource. +func (i *Import) GetImportID(resourceType, resourceName string) string { + if i == nil { + return "" + } + switch resourceType { + case "jobs": + if r, ok := i.Jobs[resourceName]; ok { + return r.ID + } + case "pipelines": + if r, ok := i.Pipelines[resourceName]; ok { + return r.ID + } + case "models": + if r, ok := i.Models[resourceName]; ok { + return r.ID + } + case "experiments": + if r, ok := i.Experiments[resourceName]; ok { + return r.ID + } + case "model_serving_endpoints": + if r, ok := i.ModelServingEndpoints[resourceName]; ok { + return r.ID + } + case "registered_models": + if r, ok := i.RegisteredModels[resourceName]; ok { + return r.ID + } + case "quality_monitors": + if r, ok := i.QualityMonitors[resourceName]; ok { + return r.ID + } + case "schemas": + if r, ok := i.Schemas[resourceName]; ok { + return r.ID + } + case "volumes": + if r, ok := i.Volumes[resourceName]; ok { + return r.ID + } + case "clusters": + if r, ok := i.Clusters[resourceName]; ok { + return r.ID + } + case "dashboards": + if r, ok := i.Dashboards[resourceName]; ok { + return r.ID + } + case "apps": + if r, ok := i.Apps[resourceName]; ok { + return r.ID + } + case "secret_scopes": + if r, ok := i.SecretScopes[resourceName]; ok { + return r.ID + } + case "alerts": + if r, ok := i.Alerts[resourceName]; ok { + return r.ID + } + case "sql_warehouses": + if r, ok := i.SqlWarehouses[resourceName]; ok { + return r.ID + } + case "database_instances": + if r, ok := i.DatabaseInstances[resourceName]; ok { + return r.ID + } + case "database_catalogs": + if r, ok := i.DatabaseCatalogs[resourceName]; ok { + return r.ID + } + case "synced_database_tables": + if r, ok := i.SyncedDatabaseTables[resourceName]; ok { + return r.ID + } + case "postgres_projects": + if r, ok := i.PostgresProjects[resourceName]; ok { + return r.ID + } + case "postgres_branches": + if r, ok := i.PostgresBranches[resourceName]; ok { + return r.ID + } + case "postgres_endpoints": + if r, ok := i.PostgresEndpoints[resourceName]; ok { + return r.ID + } + } + return "" +} + +// IsEmpty returns true if no imports are defined. +func (i *Import) IsEmpty() bool { + if i == nil { + return true + } + return len(i.Jobs) == 0 && + len(i.Pipelines) == 0 && + len(i.Models) == 0 && + len(i.Experiments) == 0 && + len(i.ModelServingEndpoints) == 0 && + len(i.RegisteredModels) == 0 && + len(i.QualityMonitors) == 0 && + len(i.Schemas) == 0 && + len(i.Volumes) == 0 && + len(i.Clusters) == 0 && + len(i.Dashboards) == 0 && + len(i.Apps) == 0 && + len(i.SecretScopes) == 0 && + len(i.Alerts) == 0 && + len(i.SqlWarehouses) == 0 && + len(i.DatabaseInstances) == 0 && + len(i.DatabaseCatalogs) == 0 && + len(i.SyncedDatabaseTables) == 0 && + len(i.PostgresProjects) == 0 && + len(i.PostgresBranches) == 0 && + len(i.PostgresEndpoints) == 0 +} diff --git a/bundle/config/target.go b/bundle/config/target.go index fae9c940b3..e6c4eb2e9b 100644 --- a/bundle/config/target.go +++ b/bundle/config/target.go @@ -69,6 +69,11 @@ type Target struct { Sync *Sync `json:"sync,omitempty"` Permissions []resources.Permission `json:"permissions,omitempty"` + + // Import specifies existing workspace resources to import into bundle management. + // Resources listed here will be bound to the bundle at deploy time. + // This field is only valid for the direct deployment engine. + Import *Import `json:"import,omitempty"` } const ( diff --git a/bundle/configsync/diff.go b/bundle/configsync/diff.go index 1e27da9150..ee1421c4b8 100644 --- a/bundle/configsync/diff.go +++ b/bundle/configsync/diff.go @@ -139,7 +139,7 @@ func DetectChanges(ctx context.Context, b *bundle.Bundle, engine engine.EngineTy _, statePath = b.StateFilenameConfigSnapshot(ctx) } - plan, err := deployBundle.CalculatePlan(ctx, b.WorkspaceClient(), &b.Config, statePath) + plan, err := deployBundle.CalculatePlan(ctx, b.WorkspaceClient(), &b.Config, statePath, nil) if err != nil { return nil, fmt.Errorf("failed to calculate plan: %w", err) } diff --git a/bundle/deployplan/action.go b/bundle/deployplan/action.go index e8e1427956..b1e817922d 100644 --- a/bundle/deployplan/action.go +++ b/bundle/deployplan/action.go @@ -28,36 +28,45 @@ type ActionType string // If case of several options, action with highest severity wins. // Note, Create/Delete are handled explicitly and never compared. const ( - Undefined ActionType = "" - Skip ActionType = "skip" - Resize ActionType = "resize" - Update ActionType = "update" - UpdateWithID ActionType = "update_id" - Create ActionType = "create" - Recreate ActionType = "recreate" - Delete ActionType = "delete" + Undefined ActionType = "" + Skip ActionType = "skip" + Resize ActionType = "resize" + Update ActionType = "update" + UpdateWithID ActionType = "update_id" + Create ActionType = "create" + Recreate ActionType = "recreate" + Delete ActionType = "delete" + Import ActionType = "import" + ImportAndUpdate ActionType = "import_and_update" ) var actionOrder = map[ActionType]int{ - Undefined: 0, - Skip: 1, - Resize: 2, - Update: 3, - UpdateWithID: 4, - Create: 5, - Recreate: 6, - Delete: 7, + Undefined: 0, + Skip: 1, + Resize: 2, + Update: 3, + UpdateWithID: 4, + Create: 5, + Recreate: 6, + Delete: 7, + Import: 8, + ImportAndUpdate: 9, } func (a ActionType) KeepsID() bool { switch a { - case Create, UpdateWithID, Recreate, Delete: + case Create, UpdateWithID, Recreate, Delete, Import, ImportAndUpdate: return false default: return true } } +// IsImport returns true if the action is an import action. +func (a ActionType) IsImport() bool { + return a == Import || a == ImportAndUpdate +} + // StringShort short version of action string, without suffix func (a ActionType) StringShort() string { items := strings.SplitN(string(a), "_", 2) diff --git a/bundle/deployplan/plan.go b/bundle/deployplan/plan.go index 22f7db34c7..9c2ced4376 100644 --- a/bundle/deployplan/plan.go +++ b/bundle/deployplan/plan.go @@ -74,6 +74,7 @@ func LoadPlanFromFile(path string) (*Plan, error) { type PlanEntry struct { ID string `json:"id,omitempty"` + ImportID string `json:"import_id,omitempty"` DependsOn []DependsOnEntry `json:"depends_on,omitempty"` Action ActionType `json:"action,omitempty"` NewState *structvar.StructVarJSON `json:"new_state,omitempty"` diff --git a/bundle/direct/apply.go b/bundle/direct/apply.go index 04bb7054a2..f90e2eabf0 100644 --- a/bundle/direct/apply.go +++ b/bundle/direct/apply.go @@ -56,6 +56,40 @@ func (d *DeploymentUnit) Deploy(ctx context.Context, db *dstate.DeploymentState, } } +// Import handles importing an existing workspace resource into the bundle state. +// For Import action, it just saves the state with the import ID. +// For ImportAndUpdate action, it also applies config changes to the resource. +func (d *DeploymentUnit) Import(ctx context.Context, db *dstate.DeploymentState, importID string, newState any, actionType deployplan.ActionType, changes deployplan.Changes) error { + if actionType == deployplan.ImportAndUpdate { + // Apply updates to the imported resource + if !d.Adapter.HasDoUpdate() { + return fmt.Errorf("internal error: DoUpdate not implemented for resource %s", d.ResourceKey) + } + + remoteState, err := d.Adapter.DoUpdate(ctx, importID, newState, changes) + if err != nil { + return fmt.Errorf("updating imported resource id=%s: %w", importID, err) + } + + err = d.SetRemoteState(remoteState) + if err != nil { + return err + } + + log.Infof(ctx, "Imported and updated %s id=%s", d.ResourceKey, importID) + } else { + log.Infof(ctx, "Imported %s id=%s", d.ResourceKey, importID) + } + + // Save state with the imported ID + err := db.SaveState(d.ResourceKey, importID, newState, d.DependsOn) + if err != nil { + return fmt.Errorf("saving state id=%s: %w", importID, err) + } + + return nil +} + func (d *DeploymentUnit) Create(ctx context.Context, db *dstate.DeploymentState, newState any) error { newID, remoteState, err := d.Adapter.DoCreate(ctx, newState) if err != nil { diff --git a/bundle/direct/bind.go b/bundle/direct/bind.go index b476319ed7..f16a5b2e0f 100644 --- a/bundle/direct/bind.go +++ b/bundle/direct/bind.go @@ -105,7 +105,7 @@ func (b *DeploymentBundle) Bind(ctx context.Context, client *databricks.Workspac log.Infof(ctx, "Bound %s to id=%s (in temp state)", resourceKey, resourceID) // First plan + update: populate state with resolved config - plan, err := b.CalculatePlan(ctx, client, configRoot, tmpStatePath) + plan, err := b.CalculatePlan(ctx, client, configRoot, tmpStatePath, nil) if err != nil { os.Remove(tmpStatePath) return nil, err @@ -146,7 +146,7 @@ func (b *DeploymentBundle) Bind(ctx context.Context, client *databricks.Workspac } // Second plan: this is the plan to present to the user (change between remote resource and config) - plan, err = b.CalculatePlan(ctx, client, configRoot, tmpStatePath) + plan, err = b.CalculatePlan(ctx, client, configRoot, tmpStatePath, nil) if err != nil { os.Remove(tmpStatePath) return nil, err diff --git a/bundle/direct/bundle_apply.go b/bundle/direct/bundle_apply.go index 4a0b9359ee..902dc4a47f 100644 --- a/bundle/direct/bundle_apply.go +++ b/bundle/direct/bundle_apply.go @@ -120,6 +120,9 @@ func (b *DeploymentBundle) Apply(ctx context.Context, client *databricks.Workspa return false } err = b.StateDB.SaveState(resourceKey, dbentry.ID, sv.Value, entry.DependsOn) + } else if action.IsImport() { + // Handle import actions + err = d.Import(ctx, &b.StateDB, entry.ImportID, sv.Value, action, entry.Changes) } else { // TODO: redo calcDiff to downgrade planned action if possible (?) err = d.Deploy(ctx, &b.StateDB, sv.Value, action, entry.Changes) diff --git a/bundle/direct/bundle_plan.go b/bundle/direct/bundle_plan.go index cd2d8a18ab..e74cc3cee0 100644 --- a/bundle/direct/bundle_plan.go +++ b/bundle/direct/bundle_plan.go @@ -109,7 +109,7 @@ func (b *DeploymentBundle) InitForApply(ctx context.Context, client *databricks. return nil } -func (b *DeploymentBundle) CalculatePlan(ctx context.Context, client *databricks.WorkspaceClient, configRoot *config.Root, statePath string) (*deployplan.Plan, error) { +func (b *DeploymentBundle) CalculatePlan(ctx context.Context, client *databricks.WorkspaceClient, configRoot *config.Root, statePath string, importConfig *config.Import) (*deployplan.Plan, error) { err := b.StateDB.Open(statePath) if err != nil { return nil, fmt.Errorf("reading state from %s: %w", statePath, err) @@ -120,7 +120,7 @@ func (b *DeploymentBundle) CalculatePlan(ctx context.Context, client *databricks return nil, err } - plan, err := b.makePlan(ctx, configRoot, &b.StateDB.Data) + plan, err := b.makePlan(ctx, configRoot, &b.StateDB.Data, importConfig) if err != nil { return nil, fmt.Errorf("reading config: %w", err) } @@ -196,6 +196,22 @@ func (b *DeploymentBundle) CalculatePlan(ctx context.Context, client *databricks } dbentry, hasEntry := b.StateDB.GetResourceEntry(resourceKey) + + // Handle import block: if ImportID is set, this resource should be imported + if entry.ImportID != "" { + if hasEntry { + // Resource is already in state - check if IDs match + if dbentry.ID != entry.ImportID { + logdiag.LogError(ctx, fmt.Errorf("%s: resource already bound to ID %q, cannot import as %q; remove the import block or unbind the existing resource", errorPrefix, dbentry.ID, entry.ImportID)) + return false + } + // IDs match - proceed with normal planning (resource was previously imported) + } else { + // Not in state - this is a new import + return b.handleImportPlan(ctx, resourceKey, entry, adapter, errorPrefix) + } + } + if !hasEntry { entry.Action = deployplan.Create return true @@ -709,7 +725,7 @@ func (b *DeploymentBundle) resolveReferences(ctx context.Context, resourceKey st return true } -func (b *DeploymentBundle) makePlan(ctx context.Context, configRoot *config.Root, db *dstate.Database) (*deployplan.Plan, error) { +func (b *DeploymentBundle) makePlan(ctx context.Context, configRoot *config.Root, db *dstate.Database, importConfig *config.Import) (*deployplan.Plan, error) { p := deployplan.NewPlanDirect() // Copy state metadata to plan for validation during apply @@ -871,9 +887,17 @@ func (b *DeploymentBundle) makePlan(ctx context.Context, configRoot *config.Root return nil, fmt.Errorf("%s: cannot serialize state: %w", node, err) } + // Check if this resource has an import block defined + resourceType, resourceName := getResourceTypeAndName(node) + importID := "" + if importConfig != nil { + importID = importConfig.GetImportID(resourceType, resourceName) + } + e := deployplan.PlanEntry{ DependsOn: dependsOn, NewState: newStateJSON, + ImportID: importID, } p.Plan[node] = &e @@ -944,3 +968,93 @@ func (b *DeploymentBundle) getAdapterForKey(resourceKey string) (*dresources.Ada return adapter, nil } + +// handleImportPlan handles planning for resources that should be imported from the workspace. +// This is called when a resource has an import block defined and is not yet in the state. +func (b *DeploymentBundle) handleImportPlan(ctx context.Context, resourceKey string, entry *deployplan.PlanEntry, adapter *dresources.Adapter, errorPrefix string) bool { + importID := entry.ImportID + + // Read the remote resource to verify it exists and get current state + remoteState, err := adapter.DoRead(ctx, importID) + if err != nil { + if isResourceGone(err) { + logdiag.LogError(ctx, fmt.Errorf("%s: resource with ID %q does not exist in workspace", errorPrefix, importID)) + } else { + logdiag.LogError(ctx, fmt.Errorf("%s: reading remote resource id=%q: %w", errorPrefix, importID, err)) + } + return false + } + + entry.RemoteState = remoteState + b.RemoteStateCache.Store(resourceKey, remoteState) + + // Get the new state config + sv, ok := b.StateCache.Load(resourceKey) + if !ok { + logdiag.LogError(ctx, fmt.Errorf("%s: internal error: no state cache entry", errorPrefix)) + return false + } + + // Compare remote state with config to determine if update needed + remoteStateComparable, err := adapter.RemapState(remoteState) + if err != nil { + logdiag.LogError(ctx, fmt.Errorf("%s: interpreting remote state: %w", errorPrefix, err)) + return false + } + + remoteDiff, err := structdiff.GetStructDiff(remoteStateComparable, sv.Value, adapter.KeyedSlices()) + if err != nil { + logdiag.LogError(ctx, fmt.Errorf("%s: diffing remote state: %w", errorPrefix, err)) + return false + } + + // For imports, there's no "saved state" so we compare remote directly with config + entry.Changes, err = prepareChanges(ctx, adapter, nil, remoteDiff, nil, remoteStateComparable) + if err != nil { + logdiag.LogError(ctx, fmt.Errorf("%s: %w", errorPrefix, err)) + return false + } + + err = addPerFieldActions(ctx, adapter, entry.Changes, remoteState) + if err != nil { + logdiag.LogError(ctx, fmt.Errorf("%s: classifying changes: %w", errorPrefix, err)) + return false + } + + // Determine action based on changes + maxAction := getMaxAction(entry.Changes) + + // Block recreate action for imported resources + if maxAction == deployplan.Recreate { + logdiag.LogError(ctx, fmt.Errorf("%s: cannot recreate resource with import block; this would destroy the existing workspace resource. Remove the import block to allow recreation", errorPrefix)) + return false + } + + if maxAction == deployplan.Skip || maxAction == deployplan.Undefined { + entry.Action = deployplan.Import + } else { + entry.Action = deployplan.ImportAndUpdate + } + + return true +} + +// getResourceTypeAndName extracts the resource type and name from a resource key. +// For example, "resources.jobs.my_job" returns ("jobs", "my_job"). +// For child resources like "resources.jobs.my_job.permissions", returns ("jobs.permissions", "my_job"). +func getResourceTypeAndName(resourceKey string) (resourceType, resourceName string) { + dp, err := dyn.NewPathFromString(resourceKey) + if err != nil || len(dp) < 3 { + return "", "" + } + + resourceType = dp[1].Key() + resourceName = dp[2].Key() + + // Handle child resources (permissions, grants) + if len(dp) >= 4 && (dp[3].Key() == "permissions" || dp[3].Key() == "grants") { + resourceType = dp[1].Key() + "." + dp[3].Key() + } + + return resourceType, resourceName +} diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 35c4161eae..0812afe39b 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -215,7 +215,11 @@ func Deploy(ctx context.Context, b *bundle.Bundle, outputHandler sync.OutputHand func RunPlan(ctx context.Context, b *bundle.Bundle, engine engine.EngineType) *deployplan.Plan { if engine.IsDirect() { _, localPath := b.StateFilenameDirect(ctx) - plan, err := b.DeploymentBundle.CalculatePlan(ctx, b.WorkspaceClient(), &b.Config, localPath) + var importConfig *config.Import + if b.Target != nil { + importConfig = b.Target.Import + } + plan, err := b.DeploymentBundle.CalculatePlan(ctx, b.WorkspaceClient(), &b.Config, localPath, importConfig) if err != nil { logdiag.LogError(ctx, err) return nil diff --git a/bundle/phases/destroy.go b/bundle/phases/destroy.go index 6c6657c8ce..68dca4f154 100644 --- a/bundle/phases/destroy.go +++ b/bundle/phases/destroy.go @@ -158,7 +158,7 @@ func Destroy(ctx context.Context, b *bundle.Bundle, engine engine.EngineType) { var plan *deployplan.Plan if engine.IsDirect() { _, localPath := b.StateFilenameDirect(ctx) - plan, err = b.DeploymentBundle.CalculatePlan(ctx, b.WorkspaceClient(), nil, localPath) + plan, err = b.DeploymentBundle.CalculatePlan(ctx, b.WorkspaceClient(), nil, localPath, nil) if err != nil { logdiag.LogError(ctx, err) return diff --git a/bundle/statemgmt/upload_state_for_yaml_sync.go b/bundle/statemgmt/upload_state_for_yaml_sync.go index a71fa241cb..b1eddafc2f 100644 --- a/bundle/statemgmt/upload_state_for_yaml_sync.go +++ b/bundle/statemgmt/upload_state_for_yaml_sync.go @@ -152,7 +152,7 @@ func (m *uploadStateForYamlSync) convertState(ctx context.Context, b *bundle.Bun return diag.FromErr(fmt.Errorf("failed to create uninterpolated config: %w", err)) } - plan, err := deploymentBundle.CalculatePlan(ctx, b.WorkspaceClient(), &uninterpolatedConfig, snapshotPath) + plan, err := deploymentBundle.CalculatePlan(ctx, b.WorkspaceClient(), &uninterpolatedConfig, snapshotPath, nil) if err != nil { return diag.FromErr(err) } diff --git a/cmd/bundle/deployment/migrate.go b/cmd/bundle/deployment/migrate.go index 6f67f8ce43..d81c6aecfe 100644 --- a/cmd/bundle/deployment/migrate.go +++ b/cmd/bundle/deployment/migrate.go @@ -154,6 +154,11 @@ WARNING: Both direct deployment engine and this command are experimental and not } ctx := cmd.Context() + // Check for import blocks - migration is not allowed with import blocks defined + if b.Target != nil && b.Target.Import != nil && !b.Target.Import.IsEmpty() { + return errors.New("cannot run 'bundle deployment migrate' when import blocks are defined in the target configuration; import blocks are only supported with the direct deployment engine") + } + if stateDesc.Lineage == "" { // TODO: mention bundle.engine once it's there cmdio.LogString(ctx, `Error: This command migrates the existing Terraform state file (terraform.tfstate) to a direct deployment state file (resources.json). However, no existing local or remote state was found. @@ -231,7 +236,7 @@ To start using direct engine, deploy with DATABRICKS_BUNDLE_ENGINE=direct env va } }() - plan, err := deploymentBundle.CalculatePlan(ctx, b.WorkspaceClient(), &b.Config, tempStatePath) + plan, err := deploymentBundle.CalculatePlan(ctx, b.WorkspaceClient(), &b.Config, tempStatePath, nil) if err != nil { return err } diff --git a/cmd/bundle/plan.go b/cmd/bundle/plan.go index e3dd63929e..0bd6e0ea57 100644 --- a/cmd/bundle/plan.go +++ b/cmd/bundle/plan.go @@ -72,6 +72,7 @@ It is useful for previewing changes before running 'bundle deploy'.`, updateCount := 0 deleteCount := 0 unchangedCount := 0 + importCount := 0 for _, change := range plan.GetActions() { switch change.ActionType { @@ -87,6 +88,11 @@ It is useful for previewing changes before running 'bundle deploy'.`, createCount++ case deployplan.Skip, deployplan.Undefined: unchangedCount++ + case deployplan.Import: + importCount++ + case deployplan.ImportAndUpdate: + importCount++ + updateCount++ } } @@ -95,7 +101,7 @@ It is useful for previewing changes before running 'bundle deploy'.`, switch root.OutputType(cmd) { case flags.OutputText: // Print summary line and actions to stdout - totalChanges := createCount + updateCount + deleteCount + totalChanges := createCount + updateCount + deleteCount + importCount if totalChanges > 0 { // Print all actions in the order they were processed for _, action := range plan.GetActions() { @@ -103,12 +109,22 @@ It is useful for previewing changes before running 'bundle deploy'.`, continue } key := strings.TrimPrefix(action.ResourceKey, "resources.") - fmt.Fprintf(out, "%s %s\n", action.ActionType.StringShort(), key) + // For import actions, include the import ID + if action.ActionType.IsImport() { + entry := plan.Plan[action.ResourceKey] + fmt.Fprintf(out, "%s %s (id: %s)\n", action.ActionType.StringShort(), key, entry.ImportID) + } else { + fmt.Fprintf(out, "%s %s\n", action.ActionType.StringShort(), key) + } } fmt.Fprintln(out) } // Note, this string should not be changed, "bundle deployment migrate" depends on this format: - fmt.Fprintf(out, "Plan: %d to add, %d to change, %d to delete, %d unchanged\n", createCount, updateCount, deleteCount, unchangedCount) + if importCount > 0 { + fmt.Fprintf(out, "Plan: %d to add, %d to change, %d to delete, %d unchanged, %d to import\n", createCount, updateCount, deleteCount, unchangedCount, importCount) + } else { + fmt.Fprintf(out, "Plan: %d to add, %d to change, %d to delete, %d unchanged\n", createCount, updateCount, deleteCount, unchangedCount) + } case flags.OutputJSON: buf, err := json.MarshalIndent(plan, "", " ") if err != nil { From 7b044e9c8292f3dc5bdfc8584d823b9435ddbe0c Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 2 Mar 2026 23:40:25 +0100 Subject: [PATCH 02/13] Add NEXT_CHANGELOG entry for declarative import Co-Authored-By: Claude Opus 4.6 --- NEXT_CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 2cf027cb00..d07dc4ce91 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -8,6 +8,8 @@ ### Bundles +* Add declarative import support for direct deployment engine ([#4630](https://github.com/databricks/cli/pull/4630)). + ### Dependency updates ### API Changes From 5d3089291fdf8a4e14b49594680646309d476059 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 2 Mar 2026 23:58:38 +0100 Subject: [PATCH 03/13] Add import block validations and error case acceptance tests Add validations for: - Import blocks used with terraform engine (must use direct engine) - Import blocks referencing undefined resources (orphaned imports) Add acceptance tests for: - Recreate blocked for imported resources (changing immutable field) - Orphaned import blocks (import references non-existent resource) - Import blocks rejected with terraform engine Co-Authored-By: Claude Opus 4.6 --- .../import/orphaned-import/databricks.yml | 23 ++++++ .../deploy/import/orphaned-import/hello.py | 1 + .../import/orphaned-import/out.test.toml | 5 ++ .../deploy/import/orphaned-import/output.txt | 6 ++ .../deploy/import/orphaned-import/script | 2 + .../import/recreate-blocked/databricks.yml | 18 +++++ .../deploy/import/recreate-blocked/nb.sql | 2 + .../import/recreate-blocked/out.test.toml | 5 ++ .../deploy/import/recreate-blocked/output.txt | 6 ++ .../deploy/import/recreate-blocked/script | 9 +++ .../terraform-with-import/databricks.yml | 23 ++++++ .../import/terraform-with-import/hello.py | 1 + .../terraform-with-import/out.test.toml | 5 ++ .../import/terraform-with-import/output.txt | 4 ++ .../import/terraform-with-import/script | 2 + .../import/terraform-with-import/test.toml | 3 + bundle/config/import.go | 70 +++++++++++++++++++ bundle/direct/bundle_plan.go | 15 ++++ bundle/phases/deploy.go | 6 ++ 19 files changed, 206 insertions(+) create mode 100644 acceptance/bundle/deploy/import/orphaned-import/databricks.yml create mode 100644 acceptance/bundle/deploy/import/orphaned-import/hello.py create mode 100644 acceptance/bundle/deploy/import/orphaned-import/out.test.toml create mode 100644 acceptance/bundle/deploy/import/orphaned-import/output.txt create mode 100644 acceptance/bundle/deploy/import/orphaned-import/script create mode 100644 acceptance/bundle/deploy/import/recreate-blocked/databricks.yml create mode 100644 acceptance/bundle/deploy/import/recreate-blocked/nb.sql create mode 100644 acceptance/bundle/deploy/import/recreate-blocked/out.test.toml create mode 100644 acceptance/bundle/deploy/import/recreate-blocked/output.txt create mode 100644 acceptance/bundle/deploy/import/recreate-blocked/script create mode 100644 acceptance/bundle/deploy/import/terraform-with-import/databricks.yml create mode 100644 acceptance/bundle/deploy/import/terraform-with-import/hello.py create mode 100644 acceptance/bundle/deploy/import/terraform-with-import/out.test.toml create mode 100644 acceptance/bundle/deploy/import/terraform-with-import/output.txt create mode 100644 acceptance/bundle/deploy/import/terraform-with-import/script create mode 100644 acceptance/bundle/deploy/import/terraform-with-import/test.toml diff --git a/acceptance/bundle/deploy/import/orphaned-import/databricks.yml b/acceptance/bundle/deploy/import/orphaned-import/databricks.yml new file mode 100644 index 0000000000..152a901e9e --- /dev/null +++ b/acceptance/bundle/deploy/import/orphaned-import/databricks.yml @@ -0,0 +1,23 @@ +bundle: + name: test-import-orphaned + +resources: + jobs: + foo: + name: test-job + environments: + - environment_key: default + spec: + client: "1" + tasks: + - task_key: my_task + environment_key: default + spark_python_task: + python_file: ./hello.py + +targets: + default: + import: + jobs: + bar: + id: "12345" diff --git a/acceptance/bundle/deploy/import/orphaned-import/hello.py b/acceptance/bundle/deploy/import/orphaned-import/hello.py new file mode 100644 index 0000000000..11b15b1a45 --- /dev/null +++ b/acceptance/bundle/deploy/import/orphaned-import/hello.py @@ -0,0 +1 @@ +print("hello") diff --git a/acceptance/bundle/deploy/import/orphaned-import/out.test.toml b/acceptance/bundle/deploy/import/orphaned-import/out.test.toml new file mode 100644 index 0000000000..19b2c349a3 --- /dev/null +++ b/acceptance/bundle/deploy/import/orphaned-import/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = true + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/acceptance/bundle/deploy/import/orphaned-import/output.txt b/acceptance/bundle/deploy/import/orphaned-import/output.txt new file mode 100644 index 0000000000..92c0ac5099 --- /dev/null +++ b/acceptance/bundle/deploy/import/orphaned-import/output.txt @@ -0,0 +1,6 @@ + +>>> musterr [CLI] bundle plan +Error: import block references undefined resource "resources.jobs.bar"; define it in the resources section or remove the import block + +Error: import validation failed + diff --git a/acceptance/bundle/deploy/import/orphaned-import/script b/acceptance/bundle/deploy/import/orphaned-import/script new file mode 100644 index 0000000000..aeae9c12fa --- /dev/null +++ b/acceptance/bundle/deploy/import/orphaned-import/script @@ -0,0 +1,2 @@ +# Import block references jobs.bar but only jobs.foo exists in resources +trace musterr $CLI bundle plan diff --git a/acceptance/bundle/deploy/import/recreate-blocked/databricks.yml b/acceptance/bundle/deploy/import/recreate-blocked/databricks.yml new file mode 100644 index 0000000000..4e397497ab --- /dev/null +++ b/acceptance/bundle/deploy/import/recreate-blocked/databricks.yml @@ -0,0 +1,18 @@ +bundle: + name: test-import-recreate + +resources: + pipelines: + foo: + name: test-pipeline + storage: /new/storage/path + libraries: + - notebook: + path: ./nb.sql + +targets: + default: + import: + pipelines: + foo: + id: "PLACEHOLDER_PIPELINE_ID" diff --git a/acceptance/bundle/deploy/import/recreate-blocked/nb.sql b/acceptance/bundle/deploy/import/recreate-blocked/nb.sql new file mode 100644 index 0000000000..199ff50788 --- /dev/null +++ b/acceptance/bundle/deploy/import/recreate-blocked/nb.sql @@ -0,0 +1,2 @@ +-- Databricks notebook source +select 1 diff --git a/acceptance/bundle/deploy/import/recreate-blocked/out.test.toml b/acceptance/bundle/deploy/import/recreate-blocked/out.test.toml new file mode 100644 index 0000000000..19b2c349a3 --- /dev/null +++ b/acceptance/bundle/deploy/import/recreate-blocked/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = true + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/acceptance/bundle/deploy/import/recreate-blocked/output.txt b/acceptance/bundle/deploy/import/recreate-blocked/output.txt new file mode 100644 index 0000000000..adc0da4968 --- /dev/null +++ b/acceptance/bundle/deploy/import/recreate-blocked/output.txt @@ -0,0 +1,6 @@ + +>>> musterr [CLI] bundle plan +Error: cannot plan resources.pipelines.foo: cannot recreate resource with import block; this would destroy the existing workspace resource. Remove the import block to allow recreation + +Error: planning failed + diff --git a/acceptance/bundle/deploy/import/recreate-blocked/script b/acceptance/bundle/deploy/import/recreate-blocked/script new file mode 100644 index 0000000000..458ae27e88 --- /dev/null +++ b/acceptance/bundle/deploy/import/recreate-blocked/script @@ -0,0 +1,9 @@ +# Create a pipeline with a different storage path +NEW_PIPELINE_ID=$($CLI pipelines create --json '{"name": "test-pipeline", "storage": "/old/storage/path", "allow_duplicate_names": true, "libraries": [{"notebook": {"path": "/Workspace/test"}}]}' | jq -r .pipeline_id) +add_repl.py $NEW_PIPELINE_ID NEW_PIPELINE_ID + +# Update the databricks.yml with the actual pipeline ID +update_file.py databricks.yml 'PLACEHOLDER_PIPELINE_ID' "$NEW_PIPELINE_ID" + +# Run plan - should fail because changing storage requires recreate which is blocked for imports +trace musterr $CLI bundle plan diff --git a/acceptance/bundle/deploy/import/terraform-with-import/databricks.yml b/acceptance/bundle/deploy/import/terraform-with-import/databricks.yml new file mode 100644 index 0000000000..73d8ae2cf8 --- /dev/null +++ b/acceptance/bundle/deploy/import/terraform-with-import/databricks.yml @@ -0,0 +1,23 @@ +bundle: + name: test-import-terraform + +resources: + jobs: + foo: + name: test-job + environments: + - environment_key: default + spec: + client: "1" + tasks: + - task_key: my_task + environment_key: default + spark_python_task: + python_file: ./hello.py + +targets: + default: + import: + jobs: + foo: + id: "12345" diff --git a/acceptance/bundle/deploy/import/terraform-with-import/hello.py b/acceptance/bundle/deploy/import/terraform-with-import/hello.py new file mode 100644 index 0000000000..11b15b1a45 --- /dev/null +++ b/acceptance/bundle/deploy/import/terraform-with-import/hello.py @@ -0,0 +1 @@ +print("hello") diff --git a/acceptance/bundle/deploy/import/terraform-with-import/out.test.toml b/acceptance/bundle/deploy/import/terraform-with-import/out.test.toml new file mode 100644 index 0000000000..a9f28de48a --- /dev/null +++ b/acceptance/bundle/deploy/import/terraform-with-import/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = true + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform"] diff --git a/acceptance/bundle/deploy/import/terraform-with-import/output.txt b/acceptance/bundle/deploy/import/terraform-with-import/output.txt new file mode 100644 index 0000000000..af654f0f8e --- /dev/null +++ b/acceptance/bundle/deploy/import/terraform-with-import/output.txt @@ -0,0 +1,4 @@ + +>>> musterr [CLI] bundle plan +Error: import blocks in the target configuration are only supported with the direct deployment engine; set DATABRICKS_BUNDLE_ENGINE=direct or remove the import blocks + diff --git a/acceptance/bundle/deploy/import/terraform-with-import/script b/acceptance/bundle/deploy/import/terraform-with-import/script new file mode 100644 index 0000000000..4b3c7a8ecd --- /dev/null +++ b/acceptance/bundle/deploy/import/terraform-with-import/script @@ -0,0 +1,2 @@ +# Import blocks should error with terraform engine +trace musterr $CLI bundle plan diff --git a/acceptance/bundle/deploy/import/terraform-with-import/test.toml b/acceptance/bundle/deploy/import/terraform-with-import/test.toml new file mode 100644 index 0000000000..272dde4b9c --- /dev/null +++ b/acceptance/bundle/deploy/import/terraform-with-import/test.toml @@ -0,0 +1,3 @@ +# Override engine to terraform to test import block rejection +[EnvMatrix] +DATABRICKS_BUNDLE_ENGINE = ["terraform"] diff --git a/bundle/config/import.go b/bundle/config/import.go index ddaa1aee16..7f7e711d39 100644 --- a/bundle/config/import.go +++ b/bundle/config/import.go @@ -127,6 +127,76 @@ func (i *Import) GetImportID(resourceType, resourceName string) string { return "" } +// ForEach calls fn for each import entry in the configuration. +func (i *Import) ForEach(fn func(resourceType, resourceName, importID string)) { + if i == nil { + return + } + for name, r := range i.Jobs { + fn("jobs", name, r.ID) + } + for name, r := range i.Pipelines { + fn("pipelines", name, r.ID) + } + for name, r := range i.Models { + fn("models", name, r.ID) + } + for name, r := range i.Experiments { + fn("experiments", name, r.ID) + } + for name, r := range i.ModelServingEndpoints { + fn("model_serving_endpoints", name, r.ID) + } + for name, r := range i.RegisteredModels { + fn("registered_models", name, r.ID) + } + for name, r := range i.QualityMonitors { + fn("quality_monitors", name, r.ID) + } + for name, r := range i.Schemas { + fn("schemas", name, r.ID) + } + for name, r := range i.Volumes { + fn("volumes", name, r.ID) + } + for name, r := range i.Clusters { + fn("clusters", name, r.ID) + } + for name, r := range i.Dashboards { + fn("dashboards", name, r.ID) + } + for name, r := range i.Apps { + fn("apps", name, r.ID) + } + for name, r := range i.SecretScopes { + fn("secret_scopes", name, r.ID) + } + for name, r := range i.Alerts { + fn("alerts", name, r.ID) + } + for name, r := range i.SqlWarehouses { + fn("sql_warehouses", name, r.ID) + } + for name, r := range i.DatabaseInstances { + fn("database_instances", name, r.ID) + } + for name, r := range i.DatabaseCatalogs { + fn("database_catalogs", name, r.ID) + } + for name, r := range i.SyncedDatabaseTables { + fn("synced_database_tables", name, r.ID) + } + for name, r := range i.PostgresProjects { + fn("postgres_projects", name, r.ID) + } + for name, r := range i.PostgresBranches { + fn("postgres_branches", name, r.ID) + } + for name, r := range i.PostgresEndpoints { + fn("postgres_endpoints", name, r.ID) + } +} + // IsEmpty returns true if no imports are defined. func (i *Import) IsEmpty() bool { if i == nil { diff --git a/bundle/direct/bundle_plan.go b/bundle/direct/bundle_plan.go index e74cc3cee0..eedc1fe332 100644 --- a/bundle/direct/bundle_plan.go +++ b/bundle/direct/bundle_plan.go @@ -125,6 +125,21 @@ func (b *DeploymentBundle) CalculatePlan(ctx context.Context, client *databricks return nil, fmt.Errorf("reading config: %w", err) } + // Validate all import entries reference resources defined in config. + var importErrors []string + importConfig.ForEach(func(resourceType, resourceName, importID string) { + key := "resources." + resourceType + "." + resourceName + if _, ok := plan.Plan[key]; !ok { + importErrors = append(importErrors, fmt.Sprintf("import block references undefined resource %q; define it in the resources section or remove the import block", key)) + } + }) + if len(importErrors) > 0 { + for _, msg := range importErrors { + logdiag.LogError(ctx, errors.New(msg)) + } + return nil, errors.New("import validation failed") + } + b.Plan = plan g, err := makeGraph(plan) diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 0812afe39b..403d180c7c 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -213,6 +213,12 @@ func Deploy(ctx context.Context, b *bundle.Bundle, outputHandler sync.OutputHand } func RunPlan(ctx context.Context, b *bundle.Bundle, engine engine.EngineType) *deployplan.Plan { + // Validate import blocks are only used with the direct deployment engine. + if !engine.IsDirect() && b.Target != nil && b.Target.Import != nil && !b.Target.Import.IsEmpty() { + logdiag.LogError(ctx, errors.New("import blocks in the target configuration are only supported with the direct deployment engine; set DATABRICKS_BUNDLE_ENGINE=direct or remove the import blocks")) + return nil + } + if engine.IsDirect() { _, localPath := b.StateFilenameDirect(ctx) var importConfig *config.Import From 39ef7df2a7b13b60f9db4fa3be6f5594da3cae46 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 3 Mar 2026 09:54:42 +0100 Subject: [PATCH 04/13] Regenerate schema and add annotations for Import types Co-Authored-By: Claude Opus 4.6 --- bundle/internal/schema/annotations.yml | 71 +++++++++++++ bundle/schema/jsonschema.json | 137 +++++++++++++++++++++++++ 2 files changed, 208 insertions(+) diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index 6464e26cac..ab11009bea 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -96,6 +96,74 @@ github.com/databricks/cli/bundle/config.Git: The origin URL of the repository. "markdown_description": |- The origin URL of the repository. See [\_](/dev-tools/bundles/settings.md#git). +github.com/databricks/cli/bundle/config.Import: + "alerts": + "description": |- + The alert resources to import, where each key is the resource name in the bundle. + "apps": + "description": |- + The app resources to import, where each key is the resource name in the bundle. + "clusters": + "description": |- + The cluster resources to import, where each key is the resource name in the bundle. + "dashboards": + "description": |- + The dashboard resources to import, where each key is the resource name in the bundle. + "database_catalogs": + "description": |- + The database catalog resources to import, where each key is the resource name in the bundle. + "database_instances": + "description": |- + The database instance resources to import, where each key is the resource name in the bundle. + "experiments": + "description": |- + The experiment resources to import, where each key is the resource name in the bundle. + "jobs": + "description": |- + The job resources to import, where each key is the resource name in the bundle. + "model_serving_endpoints": + "description": |- + The model serving endpoint resources to import, where each key is the resource name in the bundle. + "models": + "description": |- + The model resources to import, where each key is the resource name in the bundle. + "pipelines": + "description": |- + The pipeline resources to import, where each key is the resource name in the bundle. + "postgres_branches": + "description": |- + The Postgres branch resources to import, where each key is the resource name in the bundle. + "postgres_endpoints": + "description": |- + The Postgres endpoint resources to import, where each key is the resource name in the bundle. + "postgres_projects": + "description": |- + The Postgres project resources to import, where each key is the resource name in the bundle. + "quality_monitors": + "description": |- + The quality monitor resources to import, where each key is the resource name in the bundle. + "registered_models": + "description": |- + The registered model resources to import, where each key is the resource name in the bundle. + "schemas": + "description": |- + The schema resources to import, where each key is the resource name in the bundle. + "secret_scopes": + "description": |- + The secret scope resources to import, where each key is the resource name in the bundle. + "sql_warehouses": + "description": |- + The SQL warehouse resources to import, where each key is the resource name in the bundle. + "synced_database_tables": + "description": |- + The synced database table resources to import, where each key is the resource name in the bundle. + "volumes": + "description": |- + The volume resources to import, where each key is the resource name in the bundle. +github.com/databricks/cli/bundle/config.ImportResource: + "id": + "description": |- + The ID of the existing workspace resource to import. github.com/databricks/cli/bundle/config.Lock: "enabled": "description": |- @@ -380,6 +448,9 @@ github.com/databricks/cli/bundle/config.Target: "git": "description": |- The Git version control settings for the target. + "import": + "description": |- + The existing workspace resources to import into bundle management for this target. "mode": "description": |- The deployment mode for the target. diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 0a25ac5a95..f9a06b5cda 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -2632,6 +2632,125 @@ } ] }, + "config.Import": { + "oneOf": [ + { + "type": "object", + "properties": { + "alerts": { + "description": "The alert resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "apps": { + "description": "The app resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "clusters": { + "description": "The cluster resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "dashboards": { + "description": "The dashboard resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "database_catalogs": { + "description": "The database catalog resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "database_instances": { + "description": "The database instance resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "experiments": { + "description": "The experiment resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "jobs": { + "description": "The job resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "model_serving_endpoints": { + "description": "The model serving endpoint resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "models": { + "description": "The model resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "pipelines": { + "description": "The pipeline resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "postgres_branches": { + "description": "The Postgres branch resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "postgres_endpoints": { + "description": "The Postgres endpoint resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "postgres_projects": { + "description": "The Postgres project resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "quality_monitors": { + "description": "The quality monitor resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "registered_models": { + "description": "The registered model resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "schemas": { + "description": "The schema resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "secret_scopes": { + "description": "The secret scope resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "sql_warehouses": { + "description": "The SQL warehouse resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "synced_database_tables": { + "description": "The synced database table resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + }, + "volumes": { + "description": "The volume resources to import, where each key is the resource name in the bundle.", + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" + } + }, + "additionalProperties": false + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "config.ImportResource": { + "oneOf": [ + { + "type": "object", + "properties": { + "id": { + "description": "The ID of the existing workspace resource to import.", + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false, + "required": [ + "id" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "config.Lock": { "oneOf": [ { @@ -2928,6 +3047,10 @@ "description": "The Git version control settings for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git" }, + "import": { + "description": "The existing workspace resources to import into bundle management for this target.", + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Import" + }, "mode": { "description": "The deployment mode for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Mode", @@ -10637,6 +10760,20 @@ } ] }, + "config.ImportResource": { + "oneOf": [ + { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.ImportResource" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "config.Script": { "oneOf": [ { From 47d4e3e2c57d9d7039461f877ff7e21de55e102d Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 3 Mar 2026 12:58:34 +0100 Subject: [PATCH 05/13] Rename import block to bind block and refactor to dynamic map type Rename the "import" configuration block to "bind" throughout the codebase to better reflect the semantics of binding existing workspace resources into bundle management. Also refactor the Bind type from a hardcoded struct with 21 resource type fields to a dynamic map[string]map[string]BindResource, reducing the implementation from ~225 lines to ~42 lines and eliminating the need to update the type when new resource types are added. Co-Authored-By: Claude Opus 4.6 --- NEXT_CHANGELOG.md | 2 +- .../{import => bind}/basic/databricks.yml | 6 +- .../deploy/{import => bind}/basic/hello.py | 0 .../{import => bind}/basic/out.test.toml | 0 .../deploy/{import => bind}/basic/output.txt | 14 +- .../deploy/{import => bind}/basic/script | 0 .../bind-and-update}/databricks.yml | 4 +- .../bind-and-update}/hello.py | 0 .../bind-and-update}/out.test.toml | 0 .../bind-and-update}/output.txt | 8 +- .../bind-and-update}/script | 0 .../block-migrate}/databricks.yml | 4 +- .../block-migrate}/hello.py | 0 .../block-migrate}/out.test.toml | 0 .../deploy/bind/block-migrate/output.txt | 3 + .../bundle/deploy/bind/block-migrate/script | 2 + .../{import => bind}/block-migrate/test.toml | 0 .../orphaned-bind}/databricks.yml | 4 +- .../orphaned-bind}/hello.py | 0 .../orphaned-bind}/out.test.toml | 0 .../deploy/bind/orphaned-bind/output.txt | 6 + .../orphaned-bind}/script | 0 .../recreate-blocked/databricks.yml | 4 +- .../{import => bind}/recreate-blocked/nb.sql | 0 .../recreate-blocked/out.test.toml | 0 .../recreate-blocked/output.txt | 2 +- .../{import => bind}/recreate-blocked/script | 2 +- .../resource-not-found/databricks.yml | 4 +- .../resource-not-found/hello.py | 0 .../resource-not-found/out.test.toml | 0 .../resource-not-found/output.txt | 0 .../resource-not-found/script | 0 .../terraform-with-bind}/databricks.yml | 4 +- .../terraform-with-bind}/hello.py | 0 .../terraform-with-bind}/out.test.toml | 0 .../bind/terraform-with-bind/output.txt | 4 + .../terraform-with-bind}/script | 0 .../terraform-with-bind}/test.toml | 0 .../bundle/deploy/{import => bind}/test.toml | 0 .../deploy/import/block-migrate/output.txt | 3 - .../bundle/deploy/import/block-migrate/script | 2 - .../deploy/import/orphaned-import/output.txt | 6 - .../import/terraform-with-import/output.txt | 4 - bundle/config/bind.go | 42 ++++ bundle/config/import.go | 226 ------------------ bundle/config/target.go | 4 +- bundle/deployplan/action.go | 32 +-- bundle/deployplan/plan.go | 2 +- bundle/direct/apply.go | 26 +- bundle/direct/bundle_apply.go | 6 +- bundle/direct/bundle_plan.go | 67 +++--- bundle/internal/schema/annotations.yml | 78 +----- bundle/phases/deploy.go | 12 +- bundle/schema/jsonschema.json | 180 ++++---------- cmd/bundle/deployment/migrate.go | 6 +- cmd/bundle/plan.go | 22 +- 56 files changed, 233 insertions(+), 558 deletions(-) rename acceptance/bundle/deploy/{import => bind}/basic/databricks.yml (84%) rename acceptance/bundle/deploy/{import => bind}/basic/hello.py (100%) rename acceptance/bundle/deploy/{import => bind}/basic/out.test.toml (100%) rename acceptance/bundle/deploy/{import => bind}/basic/output.txt (79%) rename acceptance/bundle/deploy/{import => bind}/basic/script (100%) rename acceptance/bundle/deploy/{import/import-and-update => bind/bind-and-update}/databricks.yml (90%) rename acceptance/bundle/deploy/{import/block-migrate => bind/bind-and-update}/hello.py (100%) rename acceptance/bundle/deploy/{import/block-migrate => bind/bind-and-update}/out.test.toml (100%) rename acceptance/bundle/deploy/{import/import-and-update => bind/bind-and-update}/output.txt (69%) rename acceptance/bundle/deploy/{import/import-and-update => bind/bind-and-update}/script (100%) rename acceptance/bundle/deploy/{import/terraform-with-import => bind/block-migrate}/databricks.yml (89%) rename acceptance/bundle/deploy/{import/import-and-update => bind/block-migrate}/hello.py (100%) rename acceptance/bundle/deploy/{import/import-and-update => bind/block-migrate}/out.test.toml (100%) create mode 100644 acceptance/bundle/deploy/bind/block-migrate/output.txt create mode 100644 acceptance/bundle/deploy/bind/block-migrate/script rename acceptance/bundle/deploy/{import => bind}/block-migrate/test.toml (100%) rename acceptance/bundle/deploy/{import/orphaned-import => bind/orphaned-bind}/databricks.yml (89%) rename acceptance/bundle/deploy/{import/orphaned-import => bind/orphaned-bind}/hello.py (100%) rename acceptance/bundle/deploy/{import/orphaned-import => bind/orphaned-bind}/out.test.toml (100%) create mode 100644 acceptance/bundle/deploy/bind/orphaned-bind/output.txt rename acceptance/bundle/deploy/{import/orphaned-import => bind/orphaned-bind}/script (100%) rename acceptance/bundle/deploy/{import => bind}/recreate-blocked/databricks.yml (86%) rename acceptance/bundle/deploy/{import => bind}/recreate-blocked/nb.sql (100%) rename acceptance/bundle/deploy/{import => bind}/recreate-blocked/out.test.toml (100%) rename acceptance/bundle/deploy/{import => bind}/recreate-blocked/output.txt (50%) rename acceptance/bundle/deploy/{import => bind}/recreate-blocked/script (94%) rename acceptance/bundle/deploy/{import => bind}/resource-not-found/databricks.yml (89%) rename acceptance/bundle/deploy/{import => bind}/resource-not-found/hello.py (100%) rename acceptance/bundle/deploy/{import => bind}/resource-not-found/out.test.toml (100%) rename acceptance/bundle/deploy/{import => bind}/resource-not-found/output.txt (100%) rename acceptance/bundle/deploy/{import => bind}/resource-not-found/script (100%) rename acceptance/bundle/deploy/{import/block-migrate => bind/terraform-with-bind}/databricks.yml (88%) rename acceptance/bundle/deploy/{import/terraform-with-import => bind/terraform-with-bind}/hello.py (100%) rename acceptance/bundle/deploy/{import/terraform-with-import => bind/terraform-with-bind}/out.test.toml (100%) create mode 100644 acceptance/bundle/deploy/bind/terraform-with-bind/output.txt rename acceptance/bundle/deploy/{import/terraform-with-import => bind/terraform-with-bind}/script (100%) rename acceptance/bundle/deploy/{import/terraform-with-import => bind/terraform-with-bind}/test.toml (100%) rename acceptance/bundle/deploy/{import => bind}/test.toml (100%) delete mode 100644 acceptance/bundle/deploy/import/block-migrate/output.txt delete mode 100644 acceptance/bundle/deploy/import/block-migrate/script delete mode 100644 acceptance/bundle/deploy/import/orphaned-import/output.txt delete mode 100644 acceptance/bundle/deploy/import/terraform-with-import/output.txt create mode 100644 bundle/config/bind.go delete mode 100644 bundle/config/import.go diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index d07dc4ce91..4ef3e9df24 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -8,7 +8,7 @@ ### Bundles -* Add declarative import support for direct deployment engine ([#4630](https://github.com/databricks/cli/pull/4630)). +* Add declarative bind support for direct deployment engine ([#4630](https://github.com/databricks/cli/pull/4630)). ### Dependency updates diff --git a/acceptance/bundle/deploy/import/basic/databricks.yml b/acceptance/bundle/deploy/bind/basic/databricks.yml similarity index 84% rename from acceptance/bundle/deploy/import/basic/databricks.yml rename to acceptance/bundle/deploy/bind/basic/databricks.yml index d0a642bfef..a6b280560a 100644 --- a/acceptance/bundle/deploy/import/basic/databricks.yml +++ b/acceptance/bundle/deploy/bind/basic/databricks.yml @@ -1,10 +1,10 @@ bundle: - name: test-import-basic + name: test-bind-basic resources: jobs: foo: - name: test-import-job + name: test-bind-job environments: - environment_key: default spec: @@ -17,7 +17,7 @@ resources: targets: default: - import: + bind: jobs: foo: id: "PLACEHOLDER_JOB_ID" diff --git a/acceptance/bundle/deploy/import/basic/hello.py b/acceptance/bundle/deploy/bind/basic/hello.py similarity index 100% rename from acceptance/bundle/deploy/import/basic/hello.py rename to acceptance/bundle/deploy/bind/basic/hello.py diff --git a/acceptance/bundle/deploy/import/basic/out.test.toml b/acceptance/bundle/deploy/bind/basic/out.test.toml similarity index 100% rename from acceptance/bundle/deploy/import/basic/out.test.toml rename to acceptance/bundle/deploy/bind/basic/out.test.toml diff --git a/acceptance/bundle/deploy/import/basic/output.txt b/acceptance/bundle/deploy/bind/basic/output.txt similarity index 79% rename from acceptance/bundle/deploy/import/basic/output.txt rename to acceptance/bundle/deploy/bind/basic/output.txt index ccc962c120..8e4787d6c0 100644 --- a/acceptance/bundle/deploy/import/basic/output.txt +++ b/acceptance/bundle/deploy/bind/basic/output.txt @@ -1,11 +1,11 @@ >>> [CLI] bundle plan -import jobs.foo (id: [NEW_JOB_ID]) +bind jobs.foo (id: [NEW_JOB_ID]) -Plan: 0 to add, 1 to change, 0 to delete, 0 unchanged, 1 to import +Plan: 0 to add, 1 to change, 0 to delete, 0 unchanged, 1 to bind >>> [CLI] bundle deploy --auto-approve -Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-import-basic/default/files... +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bind-basic/default/files... Deploying resources... Updating deployment state... Deployment complete! @@ -25,7 +25,7 @@ Plan: 0 to add, 0 to change, 0 to delete, 1 unchanged "state": { "deployment": { "kind": "BUNDLE", - "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-import-basic/default/state/metadata.json" + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bind-basic/default/state/metadata.json" }, "edit_mode": "UI_LOCKED", "environments": [ @@ -38,7 +38,7 @@ Plan: 0 to add, 0 to change, 0 to delete, 1 unchanged ], "format": "MULTI_TASK", "max_concurrent_runs": 1, - "name": "test-import-job", + "name": "test-bind-job", "queue": { "enabled": true }, @@ -46,7 +46,7 @@ Plan: 0 to add, 0 to change, 0 to delete, 1 unchanged { "environment_key": "default", "spark_python_task": { - "python_file": "/Workspace/Users/[USERNAME]/.bundle/test-import-basic/default/files/hello.py" + "python_file": "/Workspace/Users/[USERNAME]/.bundle/test-bind-basic/default/files/hello.py" }, "task_key": "my_task" } @@ -60,7 +60,7 @@ Plan: 0 to add, 0 to change, 0 to delete, 1 unchanged The following resources will be deleted: delete resources.jobs.foo -All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-import-basic/default +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bind-basic/default Deleting files... Destroy complete! diff --git a/acceptance/bundle/deploy/import/basic/script b/acceptance/bundle/deploy/bind/basic/script similarity index 100% rename from acceptance/bundle/deploy/import/basic/script rename to acceptance/bundle/deploy/bind/basic/script diff --git a/acceptance/bundle/deploy/import/import-and-update/databricks.yml b/acceptance/bundle/deploy/bind/bind-and-update/databricks.yml similarity index 90% rename from acceptance/bundle/deploy/import/import-and-update/databricks.yml rename to acceptance/bundle/deploy/bind/bind-and-update/databricks.yml index 584bf3d979..3b813772d0 100644 --- a/acceptance/bundle/deploy/import/import-and-update/databricks.yml +++ b/acceptance/bundle/deploy/bind/bind-and-update/databricks.yml @@ -1,5 +1,5 @@ bundle: - name: test-import-update + name: test-bind-update resources: jobs: @@ -17,7 +17,7 @@ resources: targets: default: - import: + bind: jobs: foo: id: "PLACEHOLDER_JOB_ID" diff --git a/acceptance/bundle/deploy/import/block-migrate/hello.py b/acceptance/bundle/deploy/bind/bind-and-update/hello.py similarity index 100% rename from acceptance/bundle/deploy/import/block-migrate/hello.py rename to acceptance/bundle/deploy/bind/bind-and-update/hello.py diff --git a/acceptance/bundle/deploy/import/block-migrate/out.test.toml b/acceptance/bundle/deploy/bind/bind-and-update/out.test.toml similarity index 100% rename from acceptance/bundle/deploy/import/block-migrate/out.test.toml rename to acceptance/bundle/deploy/bind/bind-and-update/out.test.toml diff --git a/acceptance/bundle/deploy/import/import-and-update/output.txt b/acceptance/bundle/deploy/bind/bind-and-update/output.txt similarity index 69% rename from acceptance/bundle/deploy/import/import-and-update/output.txt rename to acceptance/bundle/deploy/bind/bind-and-update/output.txt index 61be2f37f0..b45e2607b8 100644 --- a/acceptance/bundle/deploy/import/import-and-update/output.txt +++ b/acceptance/bundle/deploy/bind/bind-and-update/output.txt @@ -1,11 +1,11 @@ >>> [CLI] bundle plan -import jobs.foo (id: [NEW_JOB_ID]) +bind jobs.foo (id: [NEW_JOB_ID]) -Plan: 0 to add, 1 to change, 0 to delete, 0 unchanged, 1 to import +Plan: 0 to add, 1 to change, 0 to delete, 0 unchanged, 1 to bind >>> [CLI] bundle deploy --auto-approve -Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-import-update/default/files... +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bind-update/default/files... Deploying resources... Updating deployment state... Deployment complete! @@ -17,7 +17,7 @@ updated-job-name The following resources will be deleted: delete resources.jobs.foo -All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-import-update/default +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bind-update/default Deleting files... Destroy complete! diff --git a/acceptance/bundle/deploy/import/import-and-update/script b/acceptance/bundle/deploy/bind/bind-and-update/script similarity index 100% rename from acceptance/bundle/deploy/import/import-and-update/script rename to acceptance/bundle/deploy/bind/bind-and-update/script diff --git a/acceptance/bundle/deploy/import/terraform-with-import/databricks.yml b/acceptance/bundle/deploy/bind/block-migrate/databricks.yml similarity index 89% rename from acceptance/bundle/deploy/import/terraform-with-import/databricks.yml rename to acceptance/bundle/deploy/bind/block-migrate/databricks.yml index 73d8ae2cf8..16cc8b2be6 100644 --- a/acceptance/bundle/deploy/import/terraform-with-import/databricks.yml +++ b/acceptance/bundle/deploy/bind/block-migrate/databricks.yml @@ -1,5 +1,5 @@ bundle: - name: test-import-terraform + name: test-bind-block-migrate resources: jobs: @@ -17,7 +17,7 @@ resources: targets: default: - import: + bind: jobs: foo: id: "12345" diff --git a/acceptance/bundle/deploy/import/import-and-update/hello.py b/acceptance/bundle/deploy/bind/block-migrate/hello.py similarity index 100% rename from acceptance/bundle/deploy/import/import-and-update/hello.py rename to acceptance/bundle/deploy/bind/block-migrate/hello.py diff --git a/acceptance/bundle/deploy/import/import-and-update/out.test.toml b/acceptance/bundle/deploy/bind/block-migrate/out.test.toml similarity index 100% rename from acceptance/bundle/deploy/import/import-and-update/out.test.toml rename to acceptance/bundle/deploy/bind/block-migrate/out.test.toml diff --git a/acceptance/bundle/deploy/bind/block-migrate/output.txt b/acceptance/bundle/deploy/bind/block-migrate/output.txt new file mode 100644 index 0000000000..6bc78e4ace --- /dev/null +++ b/acceptance/bundle/deploy/bind/block-migrate/output.txt @@ -0,0 +1,3 @@ + +>>> musterr [CLI] bundle deployment migrate +Error: cannot run 'bundle deployment migrate' when bind blocks are defined in the target configuration; bind blocks are only supported with the direct deployment engine diff --git a/acceptance/bundle/deploy/bind/block-migrate/script b/acceptance/bundle/deploy/bind/block-migrate/script new file mode 100644 index 0000000000..64c538ec31 --- /dev/null +++ b/acceptance/bundle/deploy/bind/block-migrate/script @@ -0,0 +1,2 @@ +# Try to run migration with bind blocks - should fail +trace musterr $CLI bundle deployment migrate diff --git a/acceptance/bundle/deploy/import/block-migrate/test.toml b/acceptance/bundle/deploy/bind/block-migrate/test.toml similarity index 100% rename from acceptance/bundle/deploy/import/block-migrate/test.toml rename to acceptance/bundle/deploy/bind/block-migrate/test.toml diff --git a/acceptance/bundle/deploy/import/orphaned-import/databricks.yml b/acceptance/bundle/deploy/bind/orphaned-bind/databricks.yml similarity index 89% rename from acceptance/bundle/deploy/import/orphaned-import/databricks.yml rename to acceptance/bundle/deploy/bind/orphaned-bind/databricks.yml index 152a901e9e..26d0272b26 100644 --- a/acceptance/bundle/deploy/import/orphaned-import/databricks.yml +++ b/acceptance/bundle/deploy/bind/orphaned-bind/databricks.yml @@ -1,5 +1,5 @@ bundle: - name: test-import-orphaned + name: test-bind-orphaned resources: jobs: @@ -17,7 +17,7 @@ resources: targets: default: - import: + bind: jobs: bar: id: "12345" diff --git a/acceptance/bundle/deploy/import/orphaned-import/hello.py b/acceptance/bundle/deploy/bind/orphaned-bind/hello.py similarity index 100% rename from acceptance/bundle/deploy/import/orphaned-import/hello.py rename to acceptance/bundle/deploy/bind/orphaned-bind/hello.py diff --git a/acceptance/bundle/deploy/import/orphaned-import/out.test.toml b/acceptance/bundle/deploy/bind/orphaned-bind/out.test.toml similarity index 100% rename from acceptance/bundle/deploy/import/orphaned-import/out.test.toml rename to acceptance/bundle/deploy/bind/orphaned-bind/out.test.toml diff --git a/acceptance/bundle/deploy/bind/orphaned-bind/output.txt b/acceptance/bundle/deploy/bind/orphaned-bind/output.txt new file mode 100644 index 0000000000..e7a32938ea --- /dev/null +++ b/acceptance/bundle/deploy/bind/orphaned-bind/output.txt @@ -0,0 +1,6 @@ + +>>> musterr [CLI] bundle plan +Error: bind block references undefined resource "resources.jobs.bar"; define it in the resources section or remove the bind block + +Error: bind validation failed + diff --git a/acceptance/bundle/deploy/import/orphaned-import/script b/acceptance/bundle/deploy/bind/orphaned-bind/script similarity index 100% rename from acceptance/bundle/deploy/import/orphaned-import/script rename to acceptance/bundle/deploy/bind/orphaned-bind/script diff --git a/acceptance/bundle/deploy/import/recreate-blocked/databricks.yml b/acceptance/bundle/deploy/bind/recreate-blocked/databricks.yml similarity index 86% rename from acceptance/bundle/deploy/import/recreate-blocked/databricks.yml rename to acceptance/bundle/deploy/bind/recreate-blocked/databricks.yml index 4e397497ab..ddad62da6a 100644 --- a/acceptance/bundle/deploy/import/recreate-blocked/databricks.yml +++ b/acceptance/bundle/deploy/bind/recreate-blocked/databricks.yml @@ -1,5 +1,5 @@ bundle: - name: test-import-recreate + name: test-bind-recreate resources: pipelines: @@ -12,7 +12,7 @@ resources: targets: default: - import: + bind: pipelines: foo: id: "PLACEHOLDER_PIPELINE_ID" diff --git a/acceptance/bundle/deploy/import/recreate-blocked/nb.sql b/acceptance/bundle/deploy/bind/recreate-blocked/nb.sql similarity index 100% rename from acceptance/bundle/deploy/import/recreate-blocked/nb.sql rename to acceptance/bundle/deploy/bind/recreate-blocked/nb.sql diff --git a/acceptance/bundle/deploy/import/recreate-blocked/out.test.toml b/acceptance/bundle/deploy/bind/recreate-blocked/out.test.toml similarity index 100% rename from acceptance/bundle/deploy/import/recreate-blocked/out.test.toml rename to acceptance/bundle/deploy/bind/recreate-blocked/out.test.toml diff --git a/acceptance/bundle/deploy/import/recreate-blocked/output.txt b/acceptance/bundle/deploy/bind/recreate-blocked/output.txt similarity index 50% rename from acceptance/bundle/deploy/import/recreate-blocked/output.txt rename to acceptance/bundle/deploy/bind/recreate-blocked/output.txt index adc0da4968..4a39ec4441 100644 --- a/acceptance/bundle/deploy/import/recreate-blocked/output.txt +++ b/acceptance/bundle/deploy/bind/recreate-blocked/output.txt @@ -1,6 +1,6 @@ >>> musterr [CLI] bundle plan -Error: cannot plan resources.pipelines.foo: cannot recreate resource with import block; this would destroy the existing workspace resource. Remove the import block to allow recreation +Error: cannot plan resources.pipelines.foo: cannot recreate resource with bind block; this would destroy the existing workspace resource. Remove the bind block to allow recreation Error: planning failed diff --git a/acceptance/bundle/deploy/import/recreate-blocked/script b/acceptance/bundle/deploy/bind/recreate-blocked/script similarity index 94% rename from acceptance/bundle/deploy/import/recreate-blocked/script rename to acceptance/bundle/deploy/bind/recreate-blocked/script index 458ae27e88..f28f6b94ac 100644 --- a/acceptance/bundle/deploy/import/recreate-blocked/script +++ b/acceptance/bundle/deploy/bind/recreate-blocked/script @@ -5,5 +5,5 @@ add_repl.py $NEW_PIPELINE_ID NEW_PIPELINE_ID # Update the databricks.yml with the actual pipeline ID update_file.py databricks.yml 'PLACEHOLDER_PIPELINE_ID' "$NEW_PIPELINE_ID" -# Run plan - should fail because changing storage requires recreate which is blocked for imports +# Run plan - should fail because changing storage requires recreate which is blocked for binds trace musterr $CLI bundle plan diff --git a/acceptance/bundle/deploy/import/resource-not-found/databricks.yml b/acceptance/bundle/deploy/bind/resource-not-found/databricks.yml similarity index 89% rename from acceptance/bundle/deploy/import/resource-not-found/databricks.yml rename to acceptance/bundle/deploy/bind/resource-not-found/databricks.yml index 7efb2bc8d0..e02258511d 100644 --- a/acceptance/bundle/deploy/import/resource-not-found/databricks.yml +++ b/acceptance/bundle/deploy/bind/resource-not-found/databricks.yml @@ -1,5 +1,5 @@ bundle: - name: test-import-not-found + name: test-bind-not-found resources: jobs: @@ -17,7 +17,7 @@ resources: targets: default: - import: + bind: jobs: foo: id: "999999999" diff --git a/acceptance/bundle/deploy/import/resource-not-found/hello.py b/acceptance/bundle/deploy/bind/resource-not-found/hello.py similarity index 100% rename from acceptance/bundle/deploy/import/resource-not-found/hello.py rename to acceptance/bundle/deploy/bind/resource-not-found/hello.py diff --git a/acceptance/bundle/deploy/import/resource-not-found/out.test.toml b/acceptance/bundle/deploy/bind/resource-not-found/out.test.toml similarity index 100% rename from acceptance/bundle/deploy/import/resource-not-found/out.test.toml rename to acceptance/bundle/deploy/bind/resource-not-found/out.test.toml diff --git a/acceptance/bundle/deploy/import/resource-not-found/output.txt b/acceptance/bundle/deploy/bind/resource-not-found/output.txt similarity index 100% rename from acceptance/bundle/deploy/import/resource-not-found/output.txt rename to acceptance/bundle/deploy/bind/resource-not-found/output.txt diff --git a/acceptance/bundle/deploy/import/resource-not-found/script b/acceptance/bundle/deploy/bind/resource-not-found/script similarity index 100% rename from acceptance/bundle/deploy/import/resource-not-found/script rename to acceptance/bundle/deploy/bind/resource-not-found/script diff --git a/acceptance/bundle/deploy/import/block-migrate/databricks.yml b/acceptance/bundle/deploy/bind/terraform-with-bind/databricks.yml similarity index 88% rename from acceptance/bundle/deploy/import/block-migrate/databricks.yml rename to acceptance/bundle/deploy/bind/terraform-with-bind/databricks.yml index 75fd19e4c9..9845092834 100644 --- a/acceptance/bundle/deploy/import/block-migrate/databricks.yml +++ b/acceptance/bundle/deploy/bind/terraform-with-bind/databricks.yml @@ -1,5 +1,5 @@ bundle: - name: test-import-block-migrate + name: test-bind-terraform resources: jobs: @@ -17,7 +17,7 @@ resources: targets: default: - import: + bind: jobs: foo: id: "12345" diff --git a/acceptance/bundle/deploy/import/terraform-with-import/hello.py b/acceptance/bundle/deploy/bind/terraform-with-bind/hello.py similarity index 100% rename from acceptance/bundle/deploy/import/terraform-with-import/hello.py rename to acceptance/bundle/deploy/bind/terraform-with-bind/hello.py diff --git a/acceptance/bundle/deploy/import/terraform-with-import/out.test.toml b/acceptance/bundle/deploy/bind/terraform-with-bind/out.test.toml similarity index 100% rename from acceptance/bundle/deploy/import/terraform-with-import/out.test.toml rename to acceptance/bundle/deploy/bind/terraform-with-bind/out.test.toml diff --git a/acceptance/bundle/deploy/bind/terraform-with-bind/output.txt b/acceptance/bundle/deploy/bind/terraform-with-bind/output.txt new file mode 100644 index 0000000000..62c0d2b6c5 --- /dev/null +++ b/acceptance/bundle/deploy/bind/terraform-with-bind/output.txt @@ -0,0 +1,4 @@ + +>>> musterr [CLI] bundle plan +Error: bind blocks in the target configuration are only supported with the direct deployment engine; set DATABRICKS_BUNDLE_ENGINE=direct or remove the bind blocks + diff --git a/acceptance/bundle/deploy/import/terraform-with-import/script b/acceptance/bundle/deploy/bind/terraform-with-bind/script similarity index 100% rename from acceptance/bundle/deploy/import/terraform-with-import/script rename to acceptance/bundle/deploy/bind/terraform-with-bind/script diff --git a/acceptance/bundle/deploy/import/terraform-with-import/test.toml b/acceptance/bundle/deploy/bind/terraform-with-bind/test.toml similarity index 100% rename from acceptance/bundle/deploy/import/terraform-with-import/test.toml rename to acceptance/bundle/deploy/bind/terraform-with-bind/test.toml diff --git a/acceptance/bundle/deploy/import/test.toml b/acceptance/bundle/deploy/bind/test.toml similarity index 100% rename from acceptance/bundle/deploy/import/test.toml rename to acceptance/bundle/deploy/bind/test.toml diff --git a/acceptance/bundle/deploy/import/block-migrate/output.txt b/acceptance/bundle/deploy/import/block-migrate/output.txt deleted file mode 100644 index df257a1bfb..0000000000 --- a/acceptance/bundle/deploy/import/block-migrate/output.txt +++ /dev/null @@ -1,3 +0,0 @@ - ->>> musterr [CLI] bundle deployment migrate -Error: cannot run 'bundle deployment migrate' when import blocks are defined in the target configuration; import blocks are only supported with the direct deployment engine diff --git a/acceptance/bundle/deploy/import/block-migrate/script b/acceptance/bundle/deploy/import/block-migrate/script deleted file mode 100644 index 15528a0e4b..0000000000 --- a/acceptance/bundle/deploy/import/block-migrate/script +++ /dev/null @@ -1,2 +0,0 @@ -# Try to run migration with import blocks - should fail -trace musterr $CLI bundle deployment migrate diff --git a/acceptance/bundle/deploy/import/orphaned-import/output.txt b/acceptance/bundle/deploy/import/orphaned-import/output.txt deleted file mode 100644 index 92c0ac5099..0000000000 --- a/acceptance/bundle/deploy/import/orphaned-import/output.txt +++ /dev/null @@ -1,6 +0,0 @@ - ->>> musterr [CLI] bundle plan -Error: import block references undefined resource "resources.jobs.bar"; define it in the resources section or remove the import block - -Error: import validation failed - diff --git a/acceptance/bundle/deploy/import/terraform-with-import/output.txt b/acceptance/bundle/deploy/import/terraform-with-import/output.txt deleted file mode 100644 index af654f0f8e..0000000000 --- a/acceptance/bundle/deploy/import/terraform-with-import/output.txt +++ /dev/null @@ -1,4 +0,0 @@ - ->>> musterr [CLI] bundle plan -Error: import blocks in the target configuration are only supported with the direct deployment engine; set DATABRICKS_BUNDLE_ENGINE=direct or remove the import blocks - diff --git a/bundle/config/bind.go b/bundle/config/bind.go new file mode 100644 index 0000000000..923b7830a1 --- /dev/null +++ b/bundle/config/bind.go @@ -0,0 +1,42 @@ +package config + +// BindResource represents a single resource to bind with its workspace ID. +type BindResource struct { + ID string `json:"id"` +} + +// Bind defines resources to bind at the target level. +// Resources listed here will be bound to the bundle at deploy time. +// This field is only valid for the direct deployment engine. +// +// The outer map key is the resource type (e.g., "jobs", "pipelines"), +// and the inner map key is the resource name in the bundle configuration. +type Bind map[string]map[string]BindResource + +// GetBindID returns the bind ID for a given resource type and name. +// Returns empty string if no bind is defined for the resource. +func (i Bind) GetBindID(resourceType, resourceName string) string { + if r, ok := i[resourceType][resourceName]; ok { + return r.ID + } + return "" +} + +// ForEach calls fn for each bind entry in the configuration. +func (i Bind) ForEach(fn func(resourceType, resourceName, bindID string)) { + for resourceType, resources := range i { + for name, r := range resources { + fn(resourceType, name, r.ID) + } + } +} + +// IsEmpty returns true if no binds are defined. +func (i Bind) IsEmpty() bool { + for _, resources := range i { + if len(resources) > 0 { + return false + } + } + return true +} diff --git a/bundle/config/import.go b/bundle/config/import.go deleted file mode 100644 index 7f7e711d39..0000000000 --- a/bundle/config/import.go +++ /dev/null @@ -1,226 +0,0 @@ -package config - -// ImportResource represents a single resource to import with its workspace ID. -type ImportResource struct { - ID string `json:"id"` -} - -// Import defines resources to import at the target level. -// Resources listed here will be bound to the bundle at deploy time. -// This field is only valid for the direct deployment engine. -type Import struct { - Jobs map[string]ImportResource `json:"jobs,omitempty"` - Pipelines map[string]ImportResource `json:"pipelines,omitempty"` - Models map[string]ImportResource `json:"models,omitempty"` - Experiments map[string]ImportResource `json:"experiments,omitempty"` - ModelServingEndpoints map[string]ImportResource `json:"model_serving_endpoints,omitempty"` - RegisteredModels map[string]ImportResource `json:"registered_models,omitempty"` - QualityMonitors map[string]ImportResource `json:"quality_monitors,omitempty"` - Schemas map[string]ImportResource `json:"schemas,omitempty"` - Volumes map[string]ImportResource `json:"volumes,omitempty"` - Clusters map[string]ImportResource `json:"clusters,omitempty"` - Dashboards map[string]ImportResource `json:"dashboards,omitempty"` - Apps map[string]ImportResource `json:"apps,omitempty"` - SecretScopes map[string]ImportResource `json:"secret_scopes,omitempty"` - Alerts map[string]ImportResource `json:"alerts,omitempty"` - SqlWarehouses map[string]ImportResource `json:"sql_warehouses,omitempty"` - DatabaseInstances map[string]ImportResource `json:"database_instances,omitempty"` - DatabaseCatalogs map[string]ImportResource `json:"database_catalogs,omitempty"` - SyncedDatabaseTables map[string]ImportResource `json:"synced_database_tables,omitempty"` - PostgresProjects map[string]ImportResource `json:"postgres_projects,omitempty"` - PostgresBranches map[string]ImportResource `json:"postgres_branches,omitempty"` - PostgresEndpoints map[string]ImportResource `json:"postgres_endpoints,omitempty"` -} - -// GetImportID returns the import ID for a given resource type and name. -// Returns empty string if no import is defined for the resource. -func (i *Import) GetImportID(resourceType, resourceName string) string { - if i == nil { - return "" - } - switch resourceType { - case "jobs": - if r, ok := i.Jobs[resourceName]; ok { - return r.ID - } - case "pipelines": - if r, ok := i.Pipelines[resourceName]; ok { - return r.ID - } - case "models": - if r, ok := i.Models[resourceName]; ok { - return r.ID - } - case "experiments": - if r, ok := i.Experiments[resourceName]; ok { - return r.ID - } - case "model_serving_endpoints": - if r, ok := i.ModelServingEndpoints[resourceName]; ok { - return r.ID - } - case "registered_models": - if r, ok := i.RegisteredModels[resourceName]; ok { - return r.ID - } - case "quality_monitors": - if r, ok := i.QualityMonitors[resourceName]; ok { - return r.ID - } - case "schemas": - if r, ok := i.Schemas[resourceName]; ok { - return r.ID - } - case "volumes": - if r, ok := i.Volumes[resourceName]; ok { - return r.ID - } - case "clusters": - if r, ok := i.Clusters[resourceName]; ok { - return r.ID - } - case "dashboards": - if r, ok := i.Dashboards[resourceName]; ok { - return r.ID - } - case "apps": - if r, ok := i.Apps[resourceName]; ok { - return r.ID - } - case "secret_scopes": - if r, ok := i.SecretScopes[resourceName]; ok { - return r.ID - } - case "alerts": - if r, ok := i.Alerts[resourceName]; ok { - return r.ID - } - case "sql_warehouses": - if r, ok := i.SqlWarehouses[resourceName]; ok { - return r.ID - } - case "database_instances": - if r, ok := i.DatabaseInstances[resourceName]; ok { - return r.ID - } - case "database_catalogs": - if r, ok := i.DatabaseCatalogs[resourceName]; ok { - return r.ID - } - case "synced_database_tables": - if r, ok := i.SyncedDatabaseTables[resourceName]; ok { - return r.ID - } - case "postgres_projects": - if r, ok := i.PostgresProjects[resourceName]; ok { - return r.ID - } - case "postgres_branches": - if r, ok := i.PostgresBranches[resourceName]; ok { - return r.ID - } - case "postgres_endpoints": - if r, ok := i.PostgresEndpoints[resourceName]; ok { - return r.ID - } - } - return "" -} - -// ForEach calls fn for each import entry in the configuration. -func (i *Import) ForEach(fn func(resourceType, resourceName, importID string)) { - if i == nil { - return - } - for name, r := range i.Jobs { - fn("jobs", name, r.ID) - } - for name, r := range i.Pipelines { - fn("pipelines", name, r.ID) - } - for name, r := range i.Models { - fn("models", name, r.ID) - } - for name, r := range i.Experiments { - fn("experiments", name, r.ID) - } - for name, r := range i.ModelServingEndpoints { - fn("model_serving_endpoints", name, r.ID) - } - for name, r := range i.RegisteredModels { - fn("registered_models", name, r.ID) - } - for name, r := range i.QualityMonitors { - fn("quality_monitors", name, r.ID) - } - for name, r := range i.Schemas { - fn("schemas", name, r.ID) - } - for name, r := range i.Volumes { - fn("volumes", name, r.ID) - } - for name, r := range i.Clusters { - fn("clusters", name, r.ID) - } - for name, r := range i.Dashboards { - fn("dashboards", name, r.ID) - } - for name, r := range i.Apps { - fn("apps", name, r.ID) - } - for name, r := range i.SecretScopes { - fn("secret_scopes", name, r.ID) - } - for name, r := range i.Alerts { - fn("alerts", name, r.ID) - } - for name, r := range i.SqlWarehouses { - fn("sql_warehouses", name, r.ID) - } - for name, r := range i.DatabaseInstances { - fn("database_instances", name, r.ID) - } - for name, r := range i.DatabaseCatalogs { - fn("database_catalogs", name, r.ID) - } - for name, r := range i.SyncedDatabaseTables { - fn("synced_database_tables", name, r.ID) - } - for name, r := range i.PostgresProjects { - fn("postgres_projects", name, r.ID) - } - for name, r := range i.PostgresBranches { - fn("postgres_branches", name, r.ID) - } - for name, r := range i.PostgresEndpoints { - fn("postgres_endpoints", name, r.ID) - } -} - -// IsEmpty returns true if no imports are defined. -func (i *Import) IsEmpty() bool { - if i == nil { - return true - } - return len(i.Jobs) == 0 && - len(i.Pipelines) == 0 && - len(i.Models) == 0 && - len(i.Experiments) == 0 && - len(i.ModelServingEndpoints) == 0 && - len(i.RegisteredModels) == 0 && - len(i.QualityMonitors) == 0 && - len(i.Schemas) == 0 && - len(i.Volumes) == 0 && - len(i.Clusters) == 0 && - len(i.Dashboards) == 0 && - len(i.Apps) == 0 && - len(i.SecretScopes) == 0 && - len(i.Alerts) == 0 && - len(i.SqlWarehouses) == 0 && - len(i.DatabaseInstances) == 0 && - len(i.DatabaseCatalogs) == 0 && - len(i.SyncedDatabaseTables) == 0 && - len(i.PostgresProjects) == 0 && - len(i.PostgresBranches) == 0 && - len(i.PostgresEndpoints) == 0 -} diff --git a/bundle/config/target.go b/bundle/config/target.go index e6c4eb2e9b..a58e5191ee 100644 --- a/bundle/config/target.go +++ b/bundle/config/target.go @@ -70,10 +70,10 @@ type Target struct { Permissions []resources.Permission `json:"permissions,omitempty"` - // Import specifies existing workspace resources to import into bundle management. + // Bind specifies existing workspace resources to bind into bundle management. // Resources listed here will be bound to the bundle at deploy time. // This field is only valid for the direct deployment engine. - Import *Import `json:"import,omitempty"` + Bind Bind `json:"bind,omitempty"` } const ( diff --git a/bundle/deployplan/action.go b/bundle/deployplan/action.go index b1e817922d..f26a6819ee 100644 --- a/bundle/deployplan/action.go +++ b/bundle/deployplan/action.go @@ -36,35 +36,35 @@ const ( Create ActionType = "create" Recreate ActionType = "recreate" Delete ActionType = "delete" - Import ActionType = "import" - ImportAndUpdate ActionType = "import_and_update" + Bind ActionType = "bind" + BindAndUpdate ActionType = "bind_and_update" ) var actionOrder = map[ActionType]int{ - Undefined: 0, - Skip: 1, - Resize: 2, - Update: 3, - UpdateWithID: 4, - Create: 5, - Recreate: 6, - Delete: 7, - Import: 8, - ImportAndUpdate: 9, + Undefined: 0, + Skip: 1, + Resize: 2, + Update: 3, + UpdateWithID: 4, + Create: 5, + Recreate: 6, + Delete: 7, + Bind: 8, + BindAndUpdate: 9, } func (a ActionType) KeepsID() bool { switch a { - case Create, UpdateWithID, Recreate, Delete, Import, ImportAndUpdate: + case Create, UpdateWithID, Recreate, Delete, Bind, BindAndUpdate: return false default: return true } } -// IsImport returns true if the action is an import action. -func (a ActionType) IsImport() bool { - return a == Import || a == ImportAndUpdate +// IsBind returns true if the action is a bind action. +func (a ActionType) IsBind() bool { + return a == Bind || a == BindAndUpdate } // StringShort short version of action string, without suffix diff --git a/bundle/deployplan/plan.go b/bundle/deployplan/plan.go index 9c2ced4376..b9c7736596 100644 --- a/bundle/deployplan/plan.go +++ b/bundle/deployplan/plan.go @@ -74,7 +74,7 @@ func LoadPlanFromFile(path string) (*Plan, error) { type PlanEntry struct { ID string `json:"id,omitempty"` - ImportID string `json:"import_id,omitempty"` + BindID string `json:"bind_id,omitempty"` DependsOn []DependsOnEntry `json:"depends_on,omitempty"` Action ActionType `json:"action,omitempty"` NewState *structvar.StructVarJSON `json:"new_state,omitempty"` diff --git a/bundle/direct/apply.go b/bundle/direct/apply.go index f90e2eabf0..c73104b402 100644 --- a/bundle/direct/apply.go +++ b/bundle/direct/apply.go @@ -56,19 +56,19 @@ func (d *DeploymentUnit) Deploy(ctx context.Context, db *dstate.DeploymentState, } } -// Import handles importing an existing workspace resource into the bundle state. -// For Import action, it just saves the state with the import ID. -// For ImportAndUpdate action, it also applies config changes to the resource. -func (d *DeploymentUnit) Import(ctx context.Context, db *dstate.DeploymentState, importID string, newState any, actionType deployplan.ActionType, changes deployplan.Changes) error { - if actionType == deployplan.ImportAndUpdate { - // Apply updates to the imported resource +// DeclarativeBind handles binding an existing workspace resource into the bundle state. +// For Bind action, it just saves the state with the bind ID. +// For BindAndUpdate action, it also applies config changes to the resource. +func (d *DeploymentUnit) DeclarativeBind(ctx context.Context, db *dstate.DeploymentState, bindID string, newState any, actionType deployplan.ActionType, changes deployplan.Changes) error { + if actionType == deployplan.BindAndUpdate { + // Apply updates to the bound resource if !d.Adapter.HasDoUpdate() { return fmt.Errorf("internal error: DoUpdate not implemented for resource %s", d.ResourceKey) } - remoteState, err := d.Adapter.DoUpdate(ctx, importID, newState, changes) + remoteState, err := d.Adapter.DoUpdate(ctx, bindID, newState, changes) if err != nil { - return fmt.Errorf("updating imported resource id=%s: %w", importID, err) + return fmt.Errorf("updating bound resource id=%s: %w", bindID, err) } err = d.SetRemoteState(remoteState) @@ -76,15 +76,15 @@ func (d *DeploymentUnit) Import(ctx context.Context, db *dstate.DeploymentState, return err } - log.Infof(ctx, "Imported and updated %s id=%s", d.ResourceKey, importID) + log.Infof(ctx, "Bound and updated %s id=%s", d.ResourceKey, bindID) } else { - log.Infof(ctx, "Imported %s id=%s", d.ResourceKey, importID) + log.Infof(ctx, "Bound %s id=%s", d.ResourceKey, bindID) } - // Save state with the imported ID - err := db.SaveState(d.ResourceKey, importID, newState, d.DependsOn) + // Save state with the bound ID + err := db.SaveState(d.ResourceKey, bindID, newState, d.DependsOn) if err != nil { - return fmt.Errorf("saving state id=%s: %w", importID, err) + return fmt.Errorf("saving state id=%s: %w", bindID, err) } return nil diff --git a/bundle/direct/bundle_apply.go b/bundle/direct/bundle_apply.go index 902dc4a47f..c5261b3ed8 100644 --- a/bundle/direct/bundle_apply.go +++ b/bundle/direct/bundle_apply.go @@ -120,9 +120,9 @@ func (b *DeploymentBundle) Apply(ctx context.Context, client *databricks.Workspa return false } err = b.StateDB.SaveState(resourceKey, dbentry.ID, sv.Value, entry.DependsOn) - } else if action.IsImport() { - // Handle import actions - err = d.Import(ctx, &b.StateDB, entry.ImportID, sv.Value, action, entry.Changes) + } else if action.IsBind() { + // Handle bind actions + err = d.DeclarativeBind(ctx, &b.StateDB, entry.BindID, sv.Value, action, entry.Changes) } else { // TODO: redo calcDiff to downgrade planned action if possible (?) err = d.Deploy(ctx, &b.StateDB, sv.Value, action, entry.Changes) diff --git a/bundle/direct/bundle_plan.go b/bundle/direct/bundle_plan.go index eedc1fe332..b39b794d93 100644 --- a/bundle/direct/bundle_plan.go +++ b/bundle/direct/bundle_plan.go @@ -109,7 +109,7 @@ func (b *DeploymentBundle) InitForApply(ctx context.Context, client *databricks. return nil } -func (b *DeploymentBundle) CalculatePlan(ctx context.Context, client *databricks.WorkspaceClient, configRoot *config.Root, statePath string, importConfig *config.Import) (*deployplan.Plan, error) { +func (b *DeploymentBundle) CalculatePlan(ctx context.Context, client *databricks.WorkspaceClient, configRoot *config.Root, statePath string, bindConfig config.Bind) (*deployplan.Plan, error) { err := b.StateDB.Open(statePath) if err != nil { return nil, fmt.Errorf("reading state from %s: %w", statePath, err) @@ -120,24 +120,24 @@ func (b *DeploymentBundle) CalculatePlan(ctx context.Context, client *databricks return nil, err } - plan, err := b.makePlan(ctx, configRoot, &b.StateDB.Data, importConfig) + plan, err := b.makePlan(ctx, configRoot, &b.StateDB.Data, bindConfig) if err != nil { return nil, fmt.Errorf("reading config: %w", err) } - // Validate all import entries reference resources defined in config. - var importErrors []string - importConfig.ForEach(func(resourceType, resourceName, importID string) { + // Validate all bind entries reference resources defined in config. + var bindErrors []string + bindConfig.ForEach(func(resourceType, resourceName, bindID string) { key := "resources." + resourceType + "." + resourceName if _, ok := plan.Plan[key]; !ok { - importErrors = append(importErrors, fmt.Sprintf("import block references undefined resource %q; define it in the resources section or remove the import block", key)) + bindErrors = append(bindErrors, fmt.Sprintf("bind block references undefined resource %q; define it in the resources section or remove the bind block", key)) } }) - if len(importErrors) > 0 { - for _, msg := range importErrors { + if len(bindErrors) > 0 { + for _, msg := range bindErrors { logdiag.LogError(ctx, errors.New(msg)) } - return nil, errors.New("import validation failed") + return nil, errors.New("bind validation failed") } b.Plan = plan @@ -212,18 +212,18 @@ func (b *DeploymentBundle) CalculatePlan(ctx context.Context, client *databricks dbentry, hasEntry := b.StateDB.GetResourceEntry(resourceKey) - // Handle import block: if ImportID is set, this resource should be imported - if entry.ImportID != "" { + // Handle bind block: if BindID is set, this resource should be bound + if entry.BindID != "" { if hasEntry { // Resource is already in state - check if IDs match - if dbentry.ID != entry.ImportID { - logdiag.LogError(ctx, fmt.Errorf("%s: resource already bound to ID %q, cannot import as %q; remove the import block or unbind the existing resource", errorPrefix, dbentry.ID, entry.ImportID)) + if dbentry.ID != entry.BindID { + logdiag.LogError(ctx, fmt.Errorf("%s: resource already bound to ID %q, cannot bind as %q; remove the bind block or unbind the existing resource", errorPrefix, dbentry.ID, entry.BindID)) return false } - // IDs match - proceed with normal planning (resource was previously imported) + // IDs match - proceed with normal planning (resource was previously bound) } else { - // Not in state - this is a new import - return b.handleImportPlan(ctx, resourceKey, entry, adapter, errorPrefix) + // Not in state - this is a new bind + return b.handleBindPlan(ctx, resourceKey, entry, adapter, errorPrefix) } } @@ -740,7 +740,7 @@ func (b *DeploymentBundle) resolveReferences(ctx context.Context, resourceKey st return true } -func (b *DeploymentBundle) makePlan(ctx context.Context, configRoot *config.Root, db *dstate.Database, importConfig *config.Import) (*deployplan.Plan, error) { +func (b *DeploymentBundle) makePlan(ctx context.Context, configRoot *config.Root, db *dstate.Database, bindConfig config.Bind) (*deployplan.Plan, error) { p := deployplan.NewPlanDirect() // Copy state metadata to plan for validation during apply @@ -902,17 +902,14 @@ func (b *DeploymentBundle) makePlan(ctx context.Context, configRoot *config.Root return nil, fmt.Errorf("%s: cannot serialize state: %w", node, err) } - // Check if this resource has an import block defined + // Check if this resource has a bind block defined resourceType, resourceName := getResourceTypeAndName(node) - importID := "" - if importConfig != nil { - importID = importConfig.GetImportID(resourceType, resourceName) - } + bindID := bindConfig.GetBindID(resourceType, resourceName) e := deployplan.PlanEntry{ DependsOn: dependsOn, NewState: newStateJSON, - ImportID: importID, + BindID: bindID, } p.Plan[node] = &e @@ -984,18 +981,18 @@ func (b *DeploymentBundle) getAdapterForKey(resourceKey string) (*dresources.Ada return adapter, nil } -// handleImportPlan handles planning for resources that should be imported from the workspace. -// This is called when a resource has an import block defined and is not yet in the state. -func (b *DeploymentBundle) handleImportPlan(ctx context.Context, resourceKey string, entry *deployplan.PlanEntry, adapter *dresources.Adapter, errorPrefix string) bool { - importID := entry.ImportID +// handleBindPlan handles planning for resources that should be bound from the workspace. +// This is called when a resource has a bind block defined and is not yet in the state. +func (b *DeploymentBundle) handleBindPlan(ctx context.Context, resourceKey string, entry *deployplan.PlanEntry, adapter *dresources.Adapter, errorPrefix string) bool { + bindID := entry.BindID // Read the remote resource to verify it exists and get current state - remoteState, err := adapter.DoRead(ctx, importID) + remoteState, err := adapter.DoRead(ctx, bindID) if err != nil { if isResourceGone(err) { - logdiag.LogError(ctx, fmt.Errorf("%s: resource with ID %q does not exist in workspace", errorPrefix, importID)) + logdiag.LogError(ctx, fmt.Errorf("%s: resource with ID %q does not exist in workspace", errorPrefix, bindID)) } else { - logdiag.LogError(ctx, fmt.Errorf("%s: reading remote resource id=%q: %w", errorPrefix, importID, err)) + logdiag.LogError(ctx, fmt.Errorf("%s: reading remote resource id=%q: %w", errorPrefix, bindID, err)) } return false } @@ -1023,7 +1020,7 @@ func (b *DeploymentBundle) handleImportPlan(ctx context.Context, resourceKey str return false } - // For imports, there's no "saved state" so we compare remote directly with config + // For binds, there's no "saved state" so we compare remote directly with config entry.Changes, err = prepareChanges(ctx, adapter, nil, remoteDiff, nil, remoteStateComparable) if err != nil { logdiag.LogError(ctx, fmt.Errorf("%s: %w", errorPrefix, err)) @@ -1039,16 +1036,16 @@ func (b *DeploymentBundle) handleImportPlan(ctx context.Context, resourceKey str // Determine action based on changes maxAction := getMaxAction(entry.Changes) - // Block recreate action for imported resources + // Block recreate action for bound resources if maxAction == deployplan.Recreate { - logdiag.LogError(ctx, fmt.Errorf("%s: cannot recreate resource with import block; this would destroy the existing workspace resource. Remove the import block to allow recreation", errorPrefix)) + logdiag.LogError(ctx, fmt.Errorf("%s: cannot recreate resource with bind block; this would destroy the existing workspace resource. Remove the bind block to allow recreation", errorPrefix)) return false } if maxAction == deployplan.Skip || maxAction == deployplan.Undefined { - entry.Action = deployplan.Import + entry.Action = deployplan.Bind } else { - entry.Action = deployplan.ImportAndUpdate + entry.Action = deployplan.BindAndUpdate } return true diff --git a/bundle/internal/schema/annotations.yml b/bundle/internal/schema/annotations.yml index ab11009bea..547e1c3426 100644 --- a/bundle/internal/schema/annotations.yml +++ b/bundle/internal/schema/annotations.yml @@ -23,6 +23,10 @@ github.com/databricks/cli/bundle/config.ArtifactFile: "source": "description": |- Required. The artifact source file. +github.com/databricks/cli/bundle/config.BindResource: + "id": + "description": |- + The ID of the existing workspace resource to bind. github.com/databricks/cli/bundle/config.Bundle: "cluster_id": "description": |- @@ -96,74 +100,6 @@ github.com/databricks/cli/bundle/config.Git: The origin URL of the repository. "markdown_description": |- The origin URL of the repository. See [\_](/dev-tools/bundles/settings.md#git). -github.com/databricks/cli/bundle/config.Import: - "alerts": - "description": |- - The alert resources to import, where each key is the resource name in the bundle. - "apps": - "description": |- - The app resources to import, where each key is the resource name in the bundle. - "clusters": - "description": |- - The cluster resources to import, where each key is the resource name in the bundle. - "dashboards": - "description": |- - The dashboard resources to import, where each key is the resource name in the bundle. - "database_catalogs": - "description": |- - The database catalog resources to import, where each key is the resource name in the bundle. - "database_instances": - "description": |- - The database instance resources to import, where each key is the resource name in the bundle. - "experiments": - "description": |- - The experiment resources to import, where each key is the resource name in the bundle. - "jobs": - "description": |- - The job resources to import, where each key is the resource name in the bundle. - "model_serving_endpoints": - "description": |- - The model serving endpoint resources to import, where each key is the resource name in the bundle. - "models": - "description": |- - The model resources to import, where each key is the resource name in the bundle. - "pipelines": - "description": |- - The pipeline resources to import, where each key is the resource name in the bundle. - "postgres_branches": - "description": |- - The Postgres branch resources to import, where each key is the resource name in the bundle. - "postgres_endpoints": - "description": |- - The Postgres endpoint resources to import, where each key is the resource name in the bundle. - "postgres_projects": - "description": |- - The Postgres project resources to import, where each key is the resource name in the bundle. - "quality_monitors": - "description": |- - The quality monitor resources to import, where each key is the resource name in the bundle. - "registered_models": - "description": |- - The registered model resources to import, where each key is the resource name in the bundle. - "schemas": - "description": |- - The schema resources to import, where each key is the resource name in the bundle. - "secret_scopes": - "description": |- - The secret scope resources to import, where each key is the resource name in the bundle. - "sql_warehouses": - "description": |- - The SQL warehouse resources to import, where each key is the resource name in the bundle. - "synced_database_tables": - "description": |- - The synced database table resources to import, where each key is the resource name in the bundle. - "volumes": - "description": |- - The volume resources to import, where each key is the resource name in the bundle. -github.com/databricks/cli/bundle/config.ImportResource: - "id": - "description": |- - The ID of the existing workspace resource to import. github.com/databricks/cli/bundle/config.Lock: "enabled": "description": |- @@ -431,6 +367,9 @@ github.com/databricks/cli/bundle/config.Target: "artifacts": "description": |- The artifacts to include in the target deployment. + "bind": + "description": |- + The existing workspace resources to bind into bundle management for this target. "bundle": "description": |- The bundle attributes when deploying to this target. @@ -448,9 +387,6 @@ github.com/databricks/cli/bundle/config.Target: "git": "description": |- The Git version control settings for the target. - "import": - "description": |- - The existing workspace resources to import into bundle management for this target. "mode": "description": |- The deployment mode for the target. diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 403d180c7c..1afdf046ed 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -213,19 +213,19 @@ func Deploy(ctx context.Context, b *bundle.Bundle, outputHandler sync.OutputHand } func RunPlan(ctx context.Context, b *bundle.Bundle, engine engine.EngineType) *deployplan.Plan { - // Validate import blocks are only used with the direct deployment engine. - if !engine.IsDirect() && b.Target != nil && b.Target.Import != nil && !b.Target.Import.IsEmpty() { - logdiag.LogError(ctx, errors.New("import blocks in the target configuration are only supported with the direct deployment engine; set DATABRICKS_BUNDLE_ENGINE=direct or remove the import blocks")) + // Validate bind blocks are only used with the direct deployment engine. + if !engine.IsDirect() && b.Target != nil && !b.Target.Bind.IsEmpty() { + logdiag.LogError(ctx, errors.New("bind blocks in the target configuration are only supported with the direct deployment engine; set DATABRICKS_BUNDLE_ENGINE=direct or remove the bind blocks")) return nil } if engine.IsDirect() { _, localPath := b.StateFilenameDirect(ctx) - var importConfig *config.Import + var bindConfig config.Bind if b.Target != nil { - importConfig = b.Target.Import + bindConfig = b.Target.Bind } - plan, err := b.DeploymentBundle.CalculatePlan(ctx, b.WorkspaceClient(), &b.Config, localPath, importConfig) + plan, err := b.DeploymentBundle.CalculatePlan(ctx, b.WorkspaceClient(), &b.Config, localPath, bindConfig) if err != nil { logdiag.LogError(ctx, err) return nil diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index f9a06b5cda..fc7c6bf183 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -2490,6 +2490,27 @@ "config.ArtifactType": { "type": "string" }, + "config.BindResource": { + "oneOf": [ + { + "type": "object", + "properties": { + "id": { + "description": "The ID of the existing workspace resource to bind.", + "$ref": "#/$defs/string" + } + }, + "additionalProperties": false, + "required": [ + "id" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, "config.Bundle": { "oneOf": [ { @@ -2632,125 +2653,6 @@ } ] }, - "config.Import": { - "oneOf": [ - { - "type": "object", - "properties": { - "alerts": { - "description": "The alert resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "apps": { - "description": "The app resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "clusters": { - "description": "The cluster resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "dashboards": { - "description": "The dashboard resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "database_catalogs": { - "description": "The database catalog resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "database_instances": { - "description": "The database instance resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "experiments": { - "description": "The experiment resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "jobs": { - "description": "The job resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "model_serving_endpoints": { - "description": "The model serving endpoint resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "models": { - "description": "The model resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "pipelines": { - "description": "The pipeline resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "postgres_branches": { - "description": "The Postgres branch resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "postgres_endpoints": { - "description": "The Postgres endpoint resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "postgres_projects": { - "description": "The Postgres project resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "quality_monitors": { - "description": "The quality monitor resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "registered_models": { - "description": "The registered model resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "schemas": { - "description": "The schema resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "secret_scopes": { - "description": "The secret scope resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "sql_warehouses": { - "description": "The SQL warehouse resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "synced_database_tables": { - "description": "The synced database table resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - }, - "volumes": { - "description": "The volume resources to import, where each key is the resource name in the bundle.", - "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.ImportResource" - } - }, - "additionalProperties": false - }, - { - "type": "string", - "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" - } - ] - }, - "config.ImportResource": { - "oneOf": [ - { - "type": "object", - "properties": { - "id": { - "description": "The ID of the existing workspace resource to import.", - "$ref": "#/$defs/string" - } - }, - "additionalProperties": false, - "required": [ - "id" - ] - }, - { - "type": "string", - "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" - } - ] - }, "config.Lock": { "oneOf": [ { @@ -3025,6 +2927,10 @@ "description": "The artifacts to include in the target deployment.", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact" }, + "bind": { + "description": "The existing workspace resources to bind into bundle management for this target.", + "$ref": "#/$defs/map/map/github.com/databricks/cli/bundle/config.BindResource" + }, "bundle": { "description": "The bundle attributes when deploying to this target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle" @@ -3047,10 +2953,6 @@ "description": "The Git version control settings for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git" }, - "import": { - "description": "The existing workspace resources to import into bundle management for this target.", - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Import" - }, "mode": { "description": "The deployment mode for the target.", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Mode", @@ -10746,12 +10648,12 @@ } ] }, - "config.Command": { + "config.BindResource": { "oneOf": [ { "type": "object", "additionalProperties": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Command" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.BindResource" } }, { @@ -10760,12 +10662,12 @@ } ] }, - "config.ImportResource": { + "config.Command": { "oneOf": [ { "type": "object", "additionalProperties": { - "$ref": "#/$defs/github.com/databricks/cli/bundle/config.ImportResource" + "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Command" } }, { @@ -10806,6 +10708,30 @@ } } }, + "map": { + "github.com": { + "databricks": { + "cli": { + "bundle": { + "config.BindResource": { + "oneOf": [ + { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.BindResource" + } + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + } + } + } + } + } + }, "string": { "oneOf": [ { diff --git a/cmd/bundle/deployment/migrate.go b/cmd/bundle/deployment/migrate.go index d81c6aecfe..d2a0ff4629 100644 --- a/cmd/bundle/deployment/migrate.go +++ b/cmd/bundle/deployment/migrate.go @@ -154,9 +154,9 @@ WARNING: Both direct deployment engine and this command are experimental and not } ctx := cmd.Context() - // Check for import blocks - migration is not allowed with import blocks defined - if b.Target != nil && b.Target.Import != nil && !b.Target.Import.IsEmpty() { - return errors.New("cannot run 'bundle deployment migrate' when import blocks are defined in the target configuration; import blocks are only supported with the direct deployment engine") + // Check for bind blocks - migration is not allowed with bind blocks defined + if b.Target != nil && !b.Target.Bind.IsEmpty() { + return errors.New("cannot run 'bundle deployment migrate' when bind blocks are defined in the target configuration; bind blocks are only supported with the direct deployment engine") } if stateDesc.Lineage == "" { diff --git a/cmd/bundle/plan.go b/cmd/bundle/plan.go index 0bd6e0ea57..a61e939e87 100644 --- a/cmd/bundle/plan.go +++ b/cmd/bundle/plan.go @@ -72,7 +72,7 @@ It is useful for previewing changes before running 'bundle deploy'.`, updateCount := 0 deleteCount := 0 unchangedCount := 0 - importCount := 0 + bindCount := 0 for _, change := range plan.GetActions() { switch change.ActionType { @@ -88,10 +88,10 @@ It is useful for previewing changes before running 'bundle deploy'.`, createCount++ case deployplan.Skip, deployplan.Undefined: unchangedCount++ - case deployplan.Import: - importCount++ - case deployplan.ImportAndUpdate: - importCount++ + case deployplan.Bind: + bindCount++ + case deployplan.BindAndUpdate: + bindCount++ updateCount++ } } @@ -101,7 +101,7 @@ It is useful for previewing changes before running 'bundle deploy'.`, switch root.OutputType(cmd) { case flags.OutputText: // Print summary line and actions to stdout - totalChanges := createCount + updateCount + deleteCount + importCount + totalChanges := createCount + updateCount + deleteCount + bindCount if totalChanges > 0 { // Print all actions in the order they were processed for _, action := range plan.GetActions() { @@ -109,10 +109,10 @@ It is useful for previewing changes before running 'bundle deploy'.`, continue } key := strings.TrimPrefix(action.ResourceKey, "resources.") - // For import actions, include the import ID - if action.ActionType.IsImport() { + // For bind actions, include the bind ID + if action.ActionType.IsBind() { entry := plan.Plan[action.ResourceKey] - fmt.Fprintf(out, "%s %s (id: %s)\n", action.ActionType.StringShort(), key, entry.ImportID) + fmt.Fprintf(out, "%s %s (id: %s)\n", action.ActionType.StringShort(), key, entry.BindID) } else { fmt.Fprintf(out, "%s %s\n", action.ActionType.StringShort(), key) } @@ -120,8 +120,8 @@ It is useful for previewing changes before running 'bundle deploy'.`, fmt.Fprintln(out) } // Note, this string should not be changed, "bundle deployment migrate" depends on this format: - if importCount > 0 { - fmt.Fprintf(out, "Plan: %d to add, %d to change, %d to delete, %d unchanged, %d to import\n", createCount, updateCount, deleteCount, unchangedCount, importCount) + if bindCount > 0 { + fmt.Fprintf(out, "Plan: %d to add, %d to change, %d to delete, %d unchanged, %d to bind\n", createCount, updateCount, deleteCount, unchangedCount, bindCount) } else { fmt.Fprintf(out, "Plan: %d to add, %d to change, %d to delete, %d unchanged\n", createCount, updateCount, deleteCount, unchangedCount) } From a0eda17cd01f39a6b2d771d77b82320c11bb3dce Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 6 Mar 2026 00:08:02 +0100 Subject: [PATCH 06/13] Fix gofmt alignment in action.go const block Co-Authored-By: Claude Opus 4.6 --- bundle/deployplan/action.go | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/bundle/deployplan/action.go b/bundle/deployplan/action.go index f26a6819ee..85b9a2d174 100644 --- a/bundle/deployplan/action.go +++ b/bundle/deployplan/action.go @@ -28,16 +28,16 @@ type ActionType string // If case of several options, action with highest severity wins. // Note, Create/Delete are handled explicitly and never compared. const ( - Undefined ActionType = "" - Skip ActionType = "skip" - Resize ActionType = "resize" - Update ActionType = "update" - UpdateWithID ActionType = "update_id" - Create ActionType = "create" - Recreate ActionType = "recreate" - Delete ActionType = "delete" - Bind ActionType = "bind" - BindAndUpdate ActionType = "bind_and_update" + Undefined ActionType = "" + Skip ActionType = "skip" + Resize ActionType = "resize" + Update ActionType = "update" + UpdateWithID ActionType = "update_id" + Create ActionType = "create" + Recreate ActionType = "recreate" + Delete ActionType = "delete" + Bind ActionType = "bind" + BindAndUpdate ActionType = "bind_and_update" ) var actionOrder = map[ActionType]int{ From 89e54352af86cd038c27a736da7230ba83831de6 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 6 Mar 2026 00:27:51 +0100 Subject: [PATCH 07/13] Add bind validation, reorder action severity, and error case tests - Move Bind/BindAndUpdate below UpdateWithID but before Create/Recreate/Delete in the action severity order - Add validation that prevents bind and delete conflicts on the same resource ID - Add acceptance tests: - invalid-resource-type: bind references a non-existent resource type - top-level-bind: bind placed at root level instead of under target - delete-and-bind-conflict: bind ID matches a resource being deleted Co-Authored-By: Claude Opus 4.6 --- .../delete-and-bind-conflict/databricks.yml | 19 +++++++++++++++ .../databricks_conflict.yml | 23 +++++++++++++++++++ .../bind/delete-and-bind-conflict/hello.py | 1 + .../delete-and-bind-conflict/out.test.toml | 5 ++++ .../bind/delete-and-bind-conflict/output.txt | 21 +++++++++++++++++ .../bind/delete-and-bind-conflict/script | 18 +++++++++++++++ .../bind/delete-and-bind-conflict/test.toml | 1 + .../bind/invalid-resource-type/databricks.yml | 23 +++++++++++++++++++ .../bind/invalid-resource-type/hello.py | 1 + .../bind/invalid-resource-type/out.test.toml | 5 ++++ .../bind/invalid-resource-type/output.txt | 6 +++++ .../deploy/bind/invalid-resource-type/script | 1 + .../deploy/bind/top-level-bind/databricks.yml | 21 +++++++++++++++++ .../deploy/bind/top-level-bind/hello.py | 1 + .../deploy/bind/top-level-bind/out.test.toml | 5 ++++ .../deploy/bind/top-level-bind/output.txt | 12 ++++++++++ .../bundle/deploy/bind/top-level-bind/script | 1 + .../deploy/bind/top-level-bind/test.toml | 1 + bundle/deployplan/action.go | 10 ++++---- bundle/direct/bundle_plan.go | 20 ++++++++++++++++ 20 files changed, 190 insertions(+), 5 deletions(-) create mode 100644 acceptance/bundle/deploy/bind/delete-and-bind-conflict/databricks.yml create mode 100644 acceptance/bundle/deploy/bind/delete-and-bind-conflict/databricks_conflict.yml create mode 100644 acceptance/bundle/deploy/bind/delete-and-bind-conflict/hello.py create mode 100644 acceptance/bundle/deploy/bind/delete-and-bind-conflict/out.test.toml create mode 100644 acceptance/bundle/deploy/bind/delete-and-bind-conflict/output.txt create mode 100644 acceptance/bundle/deploy/bind/delete-and-bind-conflict/script create mode 100644 acceptance/bundle/deploy/bind/delete-and-bind-conflict/test.toml create mode 100644 acceptance/bundle/deploy/bind/invalid-resource-type/databricks.yml create mode 100644 acceptance/bundle/deploy/bind/invalid-resource-type/hello.py create mode 100644 acceptance/bundle/deploy/bind/invalid-resource-type/out.test.toml create mode 100644 acceptance/bundle/deploy/bind/invalid-resource-type/output.txt create mode 100644 acceptance/bundle/deploy/bind/invalid-resource-type/script create mode 100644 acceptance/bundle/deploy/bind/top-level-bind/databricks.yml create mode 100644 acceptance/bundle/deploy/bind/top-level-bind/hello.py create mode 100644 acceptance/bundle/deploy/bind/top-level-bind/out.test.toml create mode 100644 acceptance/bundle/deploy/bind/top-level-bind/output.txt create mode 100644 acceptance/bundle/deploy/bind/top-level-bind/script create mode 100644 acceptance/bundle/deploy/bind/top-level-bind/test.toml diff --git a/acceptance/bundle/deploy/bind/delete-and-bind-conflict/databricks.yml b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/databricks.yml new file mode 100644 index 0000000000..cbb7da54af --- /dev/null +++ b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/databricks.yml @@ -0,0 +1,19 @@ +bundle: + name: test-bind-delete-conflict + +resources: + jobs: + foo: + name: test-bind-delete-job + environments: + - environment_key: default + spec: + client: "1" + tasks: + - task_key: my_task + environment_key: default + spark_python_task: + python_file: ./hello.py + +targets: + default: diff --git a/acceptance/bundle/deploy/bind/delete-and-bind-conflict/databricks_conflict.yml b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/databricks_conflict.yml new file mode 100644 index 0000000000..5975881706 --- /dev/null +++ b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/databricks_conflict.yml @@ -0,0 +1,23 @@ +bundle: + name: test-bind-delete-conflict + +resources: + jobs: + bar: + name: test-bind-delete-job + environments: + - environment_key: default + spec: + client: "1" + tasks: + - task_key: my_task + environment_key: default + spark_python_task: + python_file: ./hello.py + +targets: + default: + bind: + jobs: + bar: + id: "PLACEHOLDER_JOB_ID" diff --git a/acceptance/bundle/deploy/bind/delete-and-bind-conflict/hello.py b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/hello.py new file mode 100644 index 0000000000..11b15b1a45 --- /dev/null +++ b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/hello.py @@ -0,0 +1 @@ +print("hello") diff --git a/acceptance/bundle/deploy/bind/delete-and-bind-conflict/out.test.toml b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/out.test.toml new file mode 100644 index 0000000000..19b2c349a3 --- /dev/null +++ b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = true + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/acceptance/bundle/deploy/bind/delete-and-bind-conflict/output.txt b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/output.txt new file mode 100644 index 0000000000..98700546e0 --- /dev/null +++ b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/output.txt @@ -0,0 +1,21 @@ + +>>> [CLI] bundle deploy --auto-approve +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bind-delete-conflict/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> musterr [CLI] bundle plan +Error: bind block for "resources.jobs.bar" has the same ID "[FOO_ID]" as "resources.jobs.foo" which is being deleted; remove the bind block or the delete + +Error: bind validation failed + + +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete resources.jobs.foo + +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bind-delete-conflict/default + +Deleting files... +Destroy complete! diff --git a/acceptance/bundle/deploy/bind/delete-and-bind-conflict/script b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/script new file mode 100644 index 0000000000..6547472f40 --- /dev/null +++ b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/script @@ -0,0 +1,18 @@ +# Deploy foo to create it in state +trace $CLI bundle deploy --auto-approve + +# Get the job ID from state +JOB_ID=$(read_id.py foo) + +# Switch to a config that renames foo->bar and adds a bind block for bar +# with the same job ID. This creates a conflict: foo is being deleted +# (still in state) while bar is being bound with the same ID. +cp databricks.yml databricks.yml.bak +cp databricks_conflict.yml databricks.yml +update_file.py databricks.yml 'PLACEHOLDER_JOB_ID' "$JOB_ID" + +trace musterr $CLI bundle plan + +# Cleanup: restore original config and destroy +cp databricks.yml.bak databricks.yml +trace $CLI bundle destroy --auto-approve diff --git a/acceptance/bundle/deploy/bind/delete-and-bind-conflict/test.toml b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/test.toml new file mode 100644 index 0000000000..a07a767561 --- /dev/null +++ b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/test.toml @@ -0,0 +1 @@ +Ignore = [".databricks", "databricks.yml.bak", "databricks_conflict.yml"] diff --git a/acceptance/bundle/deploy/bind/invalid-resource-type/databricks.yml b/acceptance/bundle/deploy/bind/invalid-resource-type/databricks.yml new file mode 100644 index 0000000000..84e59cd7f5 --- /dev/null +++ b/acceptance/bundle/deploy/bind/invalid-resource-type/databricks.yml @@ -0,0 +1,23 @@ +bundle: + name: test-bind-invalid-resource-type + +resources: + jobs: + foo: + name: test-job + environments: + - environment_key: default + spec: + client: "1" + tasks: + - task_key: my_task + environment_key: default + spark_python_task: + python_file: ./hello.py + +targets: + default: + bind: + foobar: + foo: + id: "123" diff --git a/acceptance/bundle/deploy/bind/invalid-resource-type/hello.py b/acceptance/bundle/deploy/bind/invalid-resource-type/hello.py new file mode 100644 index 0000000000..11b15b1a45 --- /dev/null +++ b/acceptance/bundle/deploy/bind/invalid-resource-type/hello.py @@ -0,0 +1 @@ +print("hello") diff --git a/acceptance/bundle/deploy/bind/invalid-resource-type/out.test.toml b/acceptance/bundle/deploy/bind/invalid-resource-type/out.test.toml new file mode 100644 index 0000000000..19b2c349a3 --- /dev/null +++ b/acceptance/bundle/deploy/bind/invalid-resource-type/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = true + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/acceptance/bundle/deploy/bind/invalid-resource-type/output.txt b/acceptance/bundle/deploy/bind/invalid-resource-type/output.txt new file mode 100644 index 0000000000..4228c2380d --- /dev/null +++ b/acceptance/bundle/deploy/bind/invalid-resource-type/output.txt @@ -0,0 +1,6 @@ + +>>> musterr [CLI] bundle plan +Error: bind block references undefined resource "resources.foobar.foo"; define it in the resources section or remove the bind block + +Error: bind validation failed + diff --git a/acceptance/bundle/deploy/bind/invalid-resource-type/script b/acceptance/bundle/deploy/bind/invalid-resource-type/script new file mode 100644 index 0000000000..9d9604578f --- /dev/null +++ b/acceptance/bundle/deploy/bind/invalid-resource-type/script @@ -0,0 +1 @@ +trace musterr $CLI bundle plan diff --git a/acceptance/bundle/deploy/bind/top-level-bind/databricks.yml b/acceptance/bundle/deploy/bind/top-level-bind/databricks.yml new file mode 100644 index 0000000000..6783311697 --- /dev/null +++ b/acceptance/bundle/deploy/bind/top-level-bind/databricks.yml @@ -0,0 +1,21 @@ +bundle: + name: test-bind-top-level + +bind: + jobs: + foo: + id: "123" + +resources: + jobs: + foo: + name: test-job + environments: + - environment_key: default + spec: + client: "1" + tasks: + - task_key: my_task + environment_key: default + spark_python_task: + python_file: ./hello.py diff --git a/acceptance/bundle/deploy/bind/top-level-bind/hello.py b/acceptance/bundle/deploy/bind/top-level-bind/hello.py new file mode 100644 index 0000000000..11b15b1a45 --- /dev/null +++ b/acceptance/bundle/deploy/bind/top-level-bind/hello.py @@ -0,0 +1 @@ +print("hello") diff --git a/acceptance/bundle/deploy/bind/top-level-bind/out.test.toml b/acceptance/bundle/deploy/bind/top-level-bind/out.test.toml new file mode 100644 index 0000000000..54146af564 --- /dev/null +++ b/acceptance/bundle/deploy/bind/top-level-bind/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = false + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/acceptance/bundle/deploy/bind/top-level-bind/output.txt b/acceptance/bundle/deploy/bind/top-level-bind/output.txt new file mode 100644 index 0000000000..ee8fd54ee7 --- /dev/null +++ b/acceptance/bundle/deploy/bind/top-level-bind/output.txt @@ -0,0 +1,12 @@ + +>>> [CLI] bundle validate +Warning: unknown field: bind + in databricks.yml:4:1 + +Name: test-bind-top-level +Target: default +Workspace: + User: [USERNAME] + Path: /Workspace/Users/[USERNAME]/.bundle/test-bind-top-level/default + +Found 1 warning diff --git a/acceptance/bundle/deploy/bind/top-level-bind/script b/acceptance/bundle/deploy/bind/top-level-bind/script new file mode 100644 index 0000000000..5350876150 --- /dev/null +++ b/acceptance/bundle/deploy/bind/top-level-bind/script @@ -0,0 +1 @@ +trace $CLI bundle validate diff --git a/acceptance/bundle/deploy/bind/top-level-bind/test.toml b/acceptance/bundle/deploy/bind/top-level-bind/test.toml new file mode 100644 index 0000000000..18b1a88417 --- /dev/null +++ b/acceptance/bundle/deploy/bind/top-level-bind/test.toml @@ -0,0 +1 @@ +Cloud = false diff --git a/bundle/deployplan/action.go b/bundle/deployplan/action.go index 85b9a2d174..1975714081 100644 --- a/bundle/deployplan/action.go +++ b/bundle/deployplan/action.go @@ -46,11 +46,11 @@ var actionOrder = map[ActionType]int{ Resize: 2, Update: 3, UpdateWithID: 4, - Create: 5, - Recreate: 6, - Delete: 7, - Bind: 8, - BindAndUpdate: 9, + Bind: 5, + BindAndUpdate: 6, + Create: 7, + Recreate: 8, + Delete: 9, } func (a ActionType) KeepsID() bool { diff --git a/bundle/direct/bundle_plan.go b/bundle/direct/bundle_plan.go index b39b794d93..1b499cb30b 100644 --- a/bundle/direct/bundle_plan.go +++ b/bundle/direct/bundle_plan.go @@ -133,6 +133,26 @@ func (b *DeploymentBundle) CalculatePlan(ctx context.Context, client *databricks bindErrors = append(bindErrors, fmt.Sprintf("bind block references undefined resource %q; define it in the resources section or remove the bind block", key)) } }) + + // Validate that no bind ID conflicts with a resource being deleted. + // This catches the case where a resource was previously deployed and a different + // resource name now has a bind block with the same ID. + for key, entry := range plan.Plan { + if entry.Action != deployplan.Delete { + continue + } + dbEntry, ok := b.StateDB.GetResourceEntry(key) + if !ok || dbEntry.ID == "" { + continue + } + bindConfig.ForEach(func(resourceType, resourceName, bindID string) { + if bindID == dbEntry.ID { + bindKey := "resources." + resourceType + "." + resourceName + bindErrors = append(bindErrors, fmt.Sprintf("bind block for %q has the same ID %q as %q which is being deleted; remove the bind block or the delete", bindKey, bindID, key)) + } + }) + } + if len(bindErrors) > 0 { for _, msg := range bindErrors { logdiag.LogError(ctx, errors.New(msg)) From 34f8a49d369b599a416537faa90108e51882bc61 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 6 Mar 2026 00:31:45 +0100 Subject: [PATCH 08/13] Use diagnostics with location info for bind validation errors Replace plain error messages with diag.Diagnostic that includes Locations and Paths pointing to the bind block in the YAML config. Guard against nil configRoot when called from destroy path. Co-Authored-By: Claude Opus 4.6 --- .../bind/delete-and-bind-conflict/output.txt | 1 + .../bind/invalid-resource-type/output.txt | 1 + .../deploy/bind/orphaned-bind/output.txt | 1 + bundle/deployplan/action.go | 4 +- bundle/direct/bundle_plan.go | 71 ++++++++++++------- 5 files changed, 49 insertions(+), 29 deletions(-) diff --git a/acceptance/bundle/deploy/bind/delete-and-bind-conflict/output.txt b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/output.txt index 98700546e0..2474d98afa 100644 --- a/acceptance/bundle/deploy/bind/delete-and-bind-conflict/output.txt +++ b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/output.txt @@ -7,6 +7,7 @@ Deployment complete! >>> musterr [CLI] bundle plan Error: bind block for "resources.jobs.bar" has the same ID "[FOO_ID]" as "resources.jobs.foo" which is being deleted; remove the bind block or the delete + at targets.default.bind.jobs.bar Error: bind validation failed diff --git a/acceptance/bundle/deploy/bind/invalid-resource-type/output.txt b/acceptance/bundle/deploy/bind/invalid-resource-type/output.txt index 4228c2380d..d06b74c8f1 100644 --- a/acceptance/bundle/deploy/bind/invalid-resource-type/output.txt +++ b/acceptance/bundle/deploy/bind/invalid-resource-type/output.txt @@ -1,6 +1,7 @@ >>> musterr [CLI] bundle plan Error: bind block references undefined resource "resources.foobar.foo"; define it in the resources section or remove the bind block + at targets.default.bind.foobar.foo Error: bind validation failed diff --git a/acceptance/bundle/deploy/bind/orphaned-bind/output.txt b/acceptance/bundle/deploy/bind/orphaned-bind/output.txt index e7a32938ea..6ae6816b73 100644 --- a/acceptance/bundle/deploy/bind/orphaned-bind/output.txt +++ b/acceptance/bundle/deploy/bind/orphaned-bind/output.txt @@ -1,6 +1,7 @@ >>> musterr [CLI] bundle plan Error: bind block references undefined resource "resources.jobs.bar"; define it in the resources section or remove the bind block + at targets.default.bind.jobs.bar Error: bind validation failed diff --git a/bundle/deployplan/action.go b/bundle/deployplan/action.go index 1975714081..87e144dfa0 100644 --- a/bundle/deployplan/action.go +++ b/bundle/deployplan/action.go @@ -33,11 +33,11 @@ const ( Resize ActionType = "resize" Update ActionType = "update" UpdateWithID ActionType = "update_id" + Bind ActionType = "bind" + BindAndUpdate ActionType = "bind_and_update" Create ActionType = "create" Recreate ActionType = "recreate" Delete ActionType = "delete" - Bind ActionType = "bind" - BindAndUpdate ActionType = "bind_and_update" ) var actionOrder = map[ActionType]int{ diff --git a/bundle/direct/bundle_plan.go b/bundle/direct/bundle_plan.go index 1b499cb30b..d87bef089b 100644 --- a/bundle/direct/bundle_plan.go +++ b/bundle/direct/bundle_plan.go @@ -15,6 +15,7 @@ import ( "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/bundle/direct/dresources" "github.com/databricks/cli/bundle/direct/dstate" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn/dynvar" "github.com/databricks/cli/libs/log" @@ -125,39 +126,55 @@ func (b *DeploymentBundle) CalculatePlan(ctx context.Context, client *databricks return nil, fmt.Errorf("reading config: %w", err) } - // Validate all bind entries reference resources defined in config. - var bindErrors []string - bindConfig.ForEach(func(resourceType, resourceName, bindID string) { - key := "resources." + resourceType + "." + resourceName - if _, ok := plan.Plan[key]; !ok { - bindErrors = append(bindErrors, fmt.Sprintf("bind block references undefined resource %q; define it in the resources section or remove the bind block", key)) - } - }) + // Validate bind entries when a bind config is provided. + if !bindConfig.IsEmpty() { + var hasBindErrors bool + targetName := configRoot.Bundle.Target - // Validate that no bind ID conflicts with a resource being deleted. - // This catches the case where a resource was previously deployed and a different - // resource name now has a bind block with the same ID. - for key, entry := range plan.Plan { - if entry.Action != deployplan.Delete { - continue - } - dbEntry, ok := b.StateDB.GetResourceEntry(key) - if !ok || dbEntry.ID == "" { - continue - } + // Validate all bind entries reference resources defined in config. bindConfig.ForEach(func(resourceType, resourceName, bindID string) { - if bindID == dbEntry.ID { - bindKey := "resources." + resourceType + "." + resourceName - bindErrors = append(bindErrors, fmt.Sprintf("bind block for %q has the same ID %q as %q which is being deleted; remove the bind block or the delete", bindKey, bindID, key)) + key := "resources." + resourceType + "." + resourceName + if _, ok := plan.Plan[key]; !ok { + bindPath := dyn.NewPath(dyn.Key("targets"), dyn.Key(targetName), dyn.Key("bind"), dyn.Key(resourceType), dyn.Key(resourceName)) + logdiag.LogDiag(ctx, diag.Diagnostic{ + Severity: diag.Error, + Summary: fmt.Sprintf("bind block references undefined resource %q; define it in the resources section or remove the bind block", key), + Locations: configRoot.GetLocations(bindPath.String()), + Paths: []dyn.Path{bindPath}, + }) + hasBindErrors = true } }) - } - if len(bindErrors) > 0 { - for _, msg := range bindErrors { - logdiag.LogError(ctx, errors.New(msg)) + // Validate that no bind ID conflicts with a resource being deleted. + // This catches the case where a resource was previously deployed and a different + // resource name now has a bind block with the same ID. + for key, entry := range plan.Plan { + if entry.Action != deployplan.Delete { + continue + } + dbEntry, ok := b.StateDB.GetResourceEntry(key) + if !ok || dbEntry.ID == "" { + continue + } + bindConfig.ForEach(func(resourceType, resourceName, bindID string) { + if bindID == dbEntry.ID { + bindKey := "resources." + resourceType + "." + resourceName + bindPath := dyn.NewPath(dyn.Key("targets"), dyn.Key(targetName), dyn.Key("bind"), dyn.Key(resourceType), dyn.Key(resourceName)) + logdiag.LogDiag(ctx, diag.Diagnostic{ + Severity: diag.Error, + Summary: fmt.Sprintf("bind block for %q has the same ID %q as %q which is being deleted; remove the bind block or the delete", bindKey, bindID, key), + Locations: configRoot.GetLocations(bindPath.String()), + Paths: []dyn.Path{bindPath}, + }) + hasBindErrors = true + } + }) + } + + if hasBindErrors { + return nil, errors.New("bind validation failed") } - return nil, errors.New("bind validation failed") } b.Plan = plan From e55b3526192ce0d81e352a3352f6047e5df85d00 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 6 Mar 2026 00:40:18 +0100 Subject: [PATCH 09/13] Check bind ID conflicts against all state entries, not just deletes Bind IDs that conflict with existing resources in state are invalid regardless of the action type (delete, recreate, update_id, etc.). Check all state entries directly instead of only delete plan entries. Co-Authored-By: Claude Opus 4.6 --- .../bind/delete-and-bind-conflict/output.txt | 2 +- bundle/direct/bundle_plan.go | 42 ++++++++----------- 2 files changed, 19 insertions(+), 25 deletions(-) diff --git a/acceptance/bundle/deploy/bind/delete-and-bind-conflict/output.txt b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/output.txt index 2474d98afa..57e63fae48 100644 --- a/acceptance/bundle/deploy/bind/delete-and-bind-conflict/output.txt +++ b/acceptance/bundle/deploy/bind/delete-and-bind-conflict/output.txt @@ -6,7 +6,7 @@ Updating deployment state... Deployment complete! >>> musterr [CLI] bundle plan -Error: bind block for "resources.jobs.bar" has the same ID "[FOO_ID]" as "resources.jobs.foo" which is being deleted; remove the bind block or the delete +Error: bind block for "resources.jobs.bar" has the same ID "[FOO_ID]" as existing resource "resources.jobs.foo"; remove the bind block or the conflicting resource at targets.default.bind.jobs.bar Error: bind validation failed diff --git a/bundle/direct/bundle_plan.go b/bundle/direct/bundle_plan.go index d87bef089b..6608eeca8f 100644 --- a/bundle/direct/bundle_plan.go +++ b/bundle/direct/bundle_plan.go @@ -146,31 +146,25 @@ func (b *DeploymentBundle) CalculatePlan(ctx context.Context, client *databricks } }) - // Validate that no bind ID conflicts with a resource being deleted. - // This catches the case where a resource was previously deployed and a different - // resource name now has a bind block with the same ID. - for key, entry := range plan.Plan { - if entry.Action != deployplan.Delete { - continue - } - dbEntry, ok := b.StateDB.GetResourceEntry(key) - if !ok || dbEntry.ID == "" { - continue - } - bindConfig.ForEach(func(resourceType, resourceName, bindID string) { - if bindID == dbEntry.ID { - bindKey := "resources." + resourceType + "." + resourceName - bindPath := dyn.NewPath(dyn.Key("targets"), dyn.Key(targetName), dyn.Key("bind"), dyn.Key(resourceType), dyn.Key(resourceName)) - logdiag.LogDiag(ctx, diag.Diagnostic{ - Severity: diag.Error, - Summary: fmt.Sprintf("bind block for %q has the same ID %q as %q which is being deleted; remove the bind block or the delete", bindKey, bindID, key), - Locations: configRoot.GetLocations(bindPath.String()), - Paths: []dyn.Path{bindPath}, - }) - hasBindErrors = true + // Validate that no bind ID conflicts with an existing resource in state. + // Deletes, recreates, and update_ids are not allowed when a bind block + // references the same resource ID under a different resource key. + bindConfig.ForEach(func(resourceType, resourceName, bindID string) { + bindKey := "resources." + resourceType + "." + resourceName + for stateKey, stateEntry := range b.StateDB.Data.State { + if stateKey == bindKey || stateEntry.ID != bindID { + continue } - }) - } + bindPath := dyn.NewPath(dyn.Key("targets"), dyn.Key(targetName), dyn.Key("bind"), dyn.Key(resourceType), dyn.Key(resourceName)) + logdiag.LogDiag(ctx, diag.Diagnostic{ + Severity: diag.Error, + Summary: fmt.Sprintf("bind block for %q has the same ID %q as existing resource %q; remove the bind block or the conflicting resource", bindKey, bindID, stateKey), + Locations: configRoot.GetLocations(bindPath.String()), + Paths: []dyn.Path{bindPath}, + }) + hasBindErrors = true + } + }) if hasBindErrors { return nil, errors.New("bind validation failed") From afbc6ee174619c9a8aacbea803b75713fc7ee633 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Fri, 6 Mar 2026 00:52:33 +0100 Subject: [PATCH 10/13] Add acceptance test for bind ID conflicting with non-deleted resource Test that binding a resource ID already managed by another resource in state is caught, even when the existing resource is not being deleted. This covers recreate and update_id conflict scenarios. Co-Authored-By: Claude Opus 4.6 --- .../bind/duplicate-bind-id/databricks.yml | 19 +++++++++++ .../databricks_with_bind.yml | 34 +++++++++++++++++++ .../deploy/bind/duplicate-bind-id/hello.py | 1 + .../bind/duplicate-bind-id/out.test.toml | 5 +++ .../deploy/bind/duplicate-bind-id/output.txt | 22 ++++++++++++ .../deploy/bind/duplicate-bind-id/script | 18 ++++++++++ .../deploy/bind/duplicate-bind-id/test.toml | 1 + 7 files changed, 100 insertions(+) create mode 100644 acceptance/bundle/deploy/bind/duplicate-bind-id/databricks.yml create mode 100644 acceptance/bundle/deploy/bind/duplicate-bind-id/databricks_with_bind.yml create mode 100644 acceptance/bundle/deploy/bind/duplicate-bind-id/hello.py create mode 100644 acceptance/bundle/deploy/bind/duplicate-bind-id/out.test.toml create mode 100644 acceptance/bundle/deploy/bind/duplicate-bind-id/output.txt create mode 100644 acceptance/bundle/deploy/bind/duplicate-bind-id/script create mode 100644 acceptance/bundle/deploy/bind/duplicate-bind-id/test.toml diff --git a/acceptance/bundle/deploy/bind/duplicate-bind-id/databricks.yml b/acceptance/bundle/deploy/bind/duplicate-bind-id/databricks.yml new file mode 100644 index 0000000000..9ae092f190 --- /dev/null +++ b/acceptance/bundle/deploy/bind/duplicate-bind-id/databricks.yml @@ -0,0 +1,19 @@ +bundle: + name: test-bind-duplicate-id + +resources: + jobs: + foo: + name: test-bind-dup-job + environments: + - environment_key: default + spec: + client: "1" + tasks: + - task_key: my_task + environment_key: default + spark_python_task: + python_file: ./hello.py + +targets: + default: diff --git a/acceptance/bundle/deploy/bind/duplicate-bind-id/databricks_with_bind.yml b/acceptance/bundle/deploy/bind/duplicate-bind-id/databricks_with_bind.yml new file mode 100644 index 0000000000..0b3abd007f --- /dev/null +++ b/acceptance/bundle/deploy/bind/duplicate-bind-id/databricks_with_bind.yml @@ -0,0 +1,34 @@ +bundle: + name: test-bind-duplicate-id + +resources: + jobs: + foo: + name: test-bind-dup-job + environments: + - environment_key: default + spec: + client: "1" + tasks: + - task_key: my_task + environment_key: default + spark_python_task: + python_file: ./hello.py + bar: + name: test-bind-dup-bar + environments: + - environment_key: default + spec: + client: "1" + tasks: + - task_key: my_task + environment_key: default + spark_python_task: + python_file: ./hello.py + +targets: + default: + bind: + jobs: + bar: + id: "PLACEHOLDER_JOB_ID" diff --git a/acceptance/bundle/deploy/bind/duplicate-bind-id/hello.py b/acceptance/bundle/deploy/bind/duplicate-bind-id/hello.py new file mode 100644 index 0000000000..11b15b1a45 --- /dev/null +++ b/acceptance/bundle/deploy/bind/duplicate-bind-id/hello.py @@ -0,0 +1 @@ +print("hello") diff --git a/acceptance/bundle/deploy/bind/duplicate-bind-id/out.test.toml b/acceptance/bundle/deploy/bind/duplicate-bind-id/out.test.toml new file mode 100644 index 0000000000..19b2c349a3 --- /dev/null +++ b/acceptance/bundle/deploy/bind/duplicate-bind-id/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = true + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["direct"] diff --git a/acceptance/bundle/deploy/bind/duplicate-bind-id/output.txt b/acceptance/bundle/deploy/bind/duplicate-bind-id/output.txt new file mode 100644 index 0000000000..60a188806b --- /dev/null +++ b/acceptance/bundle/deploy/bind/duplicate-bind-id/output.txt @@ -0,0 +1,22 @@ + +>>> [CLI] bundle deploy --auto-approve +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bind-duplicate-id/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> musterr [CLI] bundle plan +Error: bind block for "resources.jobs.bar" has the same ID "[FOO_ID]" as existing resource "resources.jobs.foo"; remove the bind block or the conflicting resource + at targets.default.bind.jobs.bar + +Error: bind validation failed + + +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete resources.jobs.foo + +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bind-duplicate-id/default + +Deleting files... +Destroy complete! diff --git a/acceptance/bundle/deploy/bind/duplicate-bind-id/script b/acceptance/bundle/deploy/bind/duplicate-bind-id/script new file mode 100644 index 0000000000..57ab78fcf0 --- /dev/null +++ b/acceptance/bundle/deploy/bind/duplicate-bind-id/script @@ -0,0 +1,18 @@ +# Deploy foo to create it in state +trace $CLI bundle deploy --auto-approve + +# Get foo's job ID from state +JOB_ID=$(read_id.py foo) + +# Switch to a config that keeps foo AND adds bar with a bind block +# pointing to foo's ID. This is a conflict: foo is still managed in +# state and bar tries to bind the same resource ID. +cp databricks.yml databricks.yml.bak +cp databricks_with_bind.yml databricks.yml +update_file.py databricks.yml 'PLACEHOLDER_JOB_ID' "$JOB_ID" + +trace musterr $CLI bundle plan + +# Cleanup +cp databricks.yml.bak databricks.yml +trace $CLI bundle destroy --auto-approve diff --git a/acceptance/bundle/deploy/bind/duplicate-bind-id/test.toml b/acceptance/bundle/deploy/bind/duplicate-bind-id/test.toml new file mode 100644 index 0000000000..f70b5c78ff --- /dev/null +++ b/acceptance/bundle/deploy/bind/duplicate-bind-id/test.toml @@ -0,0 +1 @@ +Ignore = [".databricks", "databricks.yml.bak", "databricks_with_bind.yml"] From d54e3214017ac06d3dbde782f77dc473c5b9211d Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 11 Mar 2026 23:58:27 +0100 Subject: [PATCH 11/13] Add design docs for variable interpolation parser and lookup suggestions Two proposals: 1. Replace regex-based ${...} parsing with a proper two-mode character scanner 2. Add "did you mean?" suggestions for invalid variable references Co-Authored-By: Claude Opus 4.6 --- design/interpolation-parser.md | 473 ++++++++++++++++++++ design/variable-lookup-suggestions.md | 605 ++++++++++++++++++++++++++ 2 files changed, 1078 insertions(+) create mode 100644 design/interpolation-parser.md create mode 100644 design/variable-lookup-suggestions.md diff --git a/design/interpolation-parser.md b/design/interpolation-parser.md new file mode 100644 index 0000000000..2495949adf --- /dev/null +++ b/design/interpolation-parser.md @@ -0,0 +1,473 @@ +# Proposal: Replace Regex-Based Variable Interpolation with a Proper Parser + +## Status: Draft + +## Background + +DABs variable interpolation (`${...}` syntax) is currently implemented via regex +matching in `libs/dyn/dynvar/ref.go`: + +```go +baseVarDef = `[a-zA-Z]+([-_]*[a-zA-Z0-9]+)*` +re = regexp.MustCompile( + fmt.Sprintf(`\$\{(%s(\.%s(\[[0-9]+\])*)*(\[[0-9]+\])*)\}`, baseVarDef, baseVarDef), +) +``` + +This regex is used in `NewRef()` via `FindAllStringSubmatch` to extract all +`${...}` references from a string. The matched substrings are then resolved by +the pipeline in `resolve.go` (collect → resolve → replace). + +### Problems with the Regex Approach + +1. **No error reporting.** A non-match produces zero information — the user + gets no feedback about *why* `${foo.bar-}` or `${foo..bar}` is silently + ignored. Invalid references are indistinguishable from literal text. + +2. **No position information.** Errors cannot point to the character where + parsing fails. When resolution *does* fail, the error messages refer to the + matched string but not its location within the original value. + +3. **Hard to extend.** Adding new syntax (e.g., default values like + `${var.name:-default}`, or function calls like `${upper(var.name)}`) requires + modifying a regex that is already at the edge of readability. + +4. **No escape mechanism.** There is no way to produce a literal `${` in a + string. Users who need `${` in their output (e.g., shell scripts, ARM + templates) have no workaround. + +5. **Dual maintenance burden.** The regex must be kept in sync with a Python + regex in `python/databricks/bundles/core/_transform.py` — a fragile + arrangement with no automated enforcement. + +6. **Silent acceptance of ambiguous input.** The regex approach cannot + distinguish between "this string has no variable references" and "this string + has a malformed variable reference that should be reported." + +## Research: How Other Systems Parse `${...}` + +| System | Strategy | Escape | Nesting | Error Quality | +|--------|----------|--------|---------|---------------| +| Go `text/template` | State-function lexer | None | Paren depth | Line + template name | +| HCL2 (Terraform) | Ragel FSM + recursive descent | `$${` → literal `${` | Brace depth stack | Source range + suggestions | +| Python f-strings (PEP 701) | Mode-stack tokenizer | `{{` → `{` | Mode stack | Line/column | +| Rust `format!` | Iterator-based descent | `{{`/`}}` | N/A | Spans + suggestions | +| Bash | Char-by-char + depth tracking | `\$` | Full recursive | Line number | + +**Key insight from the research:** For a syntax as simple as `${path.to.var[0]}` +(no nested expressions, no function calls, no operators), a full recursive +descent parser is overkill. The right tool is a **two-mode character scanner** — +the same pattern used by Go's `text/template` lexer and HCL's scanner at their +core. This gives us proper error reporting, escape support, and extensibility +without the complexity of a full parser generator. + +## Proposed Design + +### Architecture: Two-Phase Scanner + +Replace the regex with a small, explicit scanner that operates in two modes: + +``` +Mode 1: TEXT — accumulate literal characters +Mode 2: REFERENCE — accumulate variable path characters inside ${...} +``` + +The scanner produces a flat list of tokens. No AST, no recursive descent — just +a linear scan that cleanly separates literal text from variable references. + +### Token Types + +```go +// TokenKind represents the type of a parsed token. +type TokenKind int + +const ( + TokenLiteral TokenKind = iota // Literal text (no interpolation) + TokenRef // Variable reference: content between ${ and } +) +``` + +### Core Data Structure + +```go +// Token represents a parsed segment of an interpolation string. +type Token struct { + Kind TokenKind + Value string // For Literal: the text. For Ref: the variable path (e.g., "a.b[0].c"). + Start int // Byte offset of the start of this token in the original string. + End int // Byte offset past the end of this token. +} +``` + +### Scanner Implementation + +```go +// Parse parses a string that may contain ${...} variable references. +// It returns a slice of tokens representing literal text and variable references. +// +// Escape sequences: +// - "$$" produces a literal "$" (only when followed by "{") +// +// Examples: +// - "hello" → [Literal("hello")] +// - "${a.b}" → [Ref("a.b")] +// - "pre ${a.b} post" → [Literal("pre "), Ref("a.b"), Literal(" post")] +// - "$${a.b}" → [Literal("${a.b}")] +// - "${a.b} ${c[0]}" → [Ref("a.b"), Literal(" "), Ref("c[0]")] +func Parse(s string) ([]Token, error) { + var tokens []Token + i := 0 + buf := strings.Builder{} // accumulates literal text + + flushLiteral := func(end int) { + if buf.Len() > 0 { + tokens = append(tokens, Token{ + Kind: TokenLiteral, + Value: buf.String(), + Start: end - buf.Len(), + End: end, + }) + buf.Reset() + } + } + + for i < len(s) { + if s[i] != '$' { + buf.WriteByte(s[i]) + i++ + continue + } + + // We see '$'. Look ahead. + if i+1 >= len(s) { + // Trailing '$' — treat as literal. + buf.WriteByte('$') + i++ + continue + } + + switch s[i+1] { + case '$': + // Escape: "$$" → literal "$". + buf.WriteByte('$') + i += 2 + + case '{': + // Start of variable reference. + flushLiteral(i) + refStart := i + i += 2 // skip "${" + + // Scan the variable path until we find '}'. + pathStart := i + for i < len(s) && s[i] != '}' { + i++ + } + + if i >= len(s) { + return nil, fmt.Errorf( + "unterminated variable reference at position %d", + refStart, + ) + } + + path := s[pathStart:i] + i++ // skip '}' + + if path == "" { + return nil, fmt.Errorf( + "empty variable reference at position %d", + refStart, + ) + } + + // Validate the path content. + if err := validatePath(path, refStart); err != nil { + return nil, err + } + + tokens = append(tokens, Token{ + Kind: TokenRef, + Value: path, + Start: refStart, + End: i, + }) + + default: + // '$' not followed by '$' or '{' — treat as literal. + buf.WriteByte('$') + i++ + } + } + + flushLiteral(i) + return tokens, nil +} +``` + +### Path Validation + +Rather than encoding path rules in the regex, validate path contents explicitly +after extraction. This function reuses the existing `dyn.NewPathFromString` but +adds character-level error reporting: + +```go +// validatePath checks that a variable path is well-formed. +// It wraps dyn.NewPathFromString with position-aware error messages. +func validatePath(path string, refStart int) error { + _, err := dyn.NewPathFromString(path) + if err != nil { + return fmt.Errorf( + "invalid variable reference ${%s} at position %d: %w", + path, refStart, err, + ) + } + return nil +} +``` + +We should also add validation for the character set used in path segments. The +current regex implicitly enforces `[a-zA-Z]` start and `[a-zA-Z0-9_-]` +continuation. This should move to an explicit check inside path validation: + +```go +func validatePathSegment(seg string) error { + if len(seg) == 0 { + return fmt.Errorf("empty path segment") + } + if seg[0] < 'A' || (seg[0] > 'Z' && seg[0] < 'a') || seg[0] > 'z' { + return fmt.Errorf("path segment must start with a letter, got %q", seg[0]) + } + // ... check continuation characters ... +} +``` + +### Updated Ref Type + +The `Ref` struct changes from storing raw regex match groups to storing parsed +tokens: + +```go +type Ref struct { + Value dyn.Value // Original dyn.Value. + Str string // Original string content. + Tokens []Token // Parsed tokens (literals and references). +} + +func NewRef(v dyn.Value) (Ref, bool) { + s, ok := v.AsString() + if !ok { + return Ref{}, false + } + + tokens, err := Parse(s) + if err != nil { + // Return error through a new error-aware API (see Migration section). + return Ref{}, false + } + + // Check if any token is a reference. + hasRef := false + for _, t := range tokens { + if t.Kind == TokenRef { + hasRef = true + break + } + } + if !hasRef { + return Ref{}, false + } + + return Ref{Value: v, Str: s, Tokens: tokens}, true +} +``` + +### Updated Resolution Logic + +The string interpolation in `resolveRef` simplifies from a regex-replacement +loop to a token-concatenation loop: + +```go +func (r *resolver) resolveRef(ref Ref, seen []string) (dyn.Value, error) { + deps := ref.References() + resolved := make([]dyn.Value, len(deps)) + complete := true + + // ... resolve deps (unchanged) ... + + // Pure substitution (single ref, no literals). + if ref.IsPure() && complete { + return dyn.NewValue(resolved[0].Value(), ref.Value.Locations()), nil + } + + // String interpolation: concatenate tokens. + var buf strings.Builder + refIdx := 0 + for _, tok := range ref.Tokens { + switch tok.Kind { + case TokenLiteral: + buf.WriteString(tok.Value) + case TokenRef: + if !resolved[refIdx].IsValid() { + // Skipped — write original ${...} back. + buf.WriteString("${") + buf.WriteString(tok.Value) + buf.WriteByte('}') + } else { + s, err := valueToString(resolved[refIdx]) + if err != nil { + return dyn.InvalidValue, err + } + buf.WriteString(s) + } + refIdx++ + } + } + + return dyn.NewValue(buf.String(), ref.Value.Locations()), nil +} +``` + +This is cleaner than the current approach which uses `strings.Replace` with a +count of 1 — a trick needed to avoid double-replacing when the same variable +appears multiple times. + +### Helper Methods on Ref + +```go +// IsPure returns true if the string is a single variable reference with no +// surrounding text (e.g., "${a.b}" but not "x ${a.b}" or "${a} ${b}"). +func (v Ref) IsPure() bool { + return len(v.Tokens) == 1 && v.Tokens[0].Kind == TokenRef +} + +// References returns the variable paths referenced in this string. +func (v Ref) References() []string { + var out []string + for _, t := range v.Tokens { + if t.Kind == TokenRef { + out = append(out, t.Value) + } + } + return out +} +``` + +### Escape Sequence: `$$` + +Following HCL2's precedent, `$$` before `{` produces a literal `$`. This is the +most natural escape for users already familiar with Terraform/HCL: + +| Input | Output | +|-------|--------| +| `${a.b}` | *(resolved value of a.b)* | +| `$${a.b}` | `${a.b}` (literal) | +| `$$notbrace` | `$notbrace` (literal) | +| `$notbrace` | `$notbrace` (literal) | + +This is backward compatible: `$$` is not a valid prefix today (the regex +requires `${`), so no existing config uses `$$` in a way that would change +meaning. + +## File Changes + +| File | Change | +|------|--------| +| `libs/dyn/dynvar/ref.go` | Replace regex + `Matches` with `Parse()` + `[]Token` | +| `libs/dyn/dynvar/ref_test.go` | Update tests: add parser tests, keep behavioral tests | +| `libs/dyn/dynvar/resolve.go` | Update `resolveRef` to use token concatenation | +| `libs/dyn/dynvar/resolve_test.go` | Add tests for escape sequences, error messages | +| `libs/dyn/dynvar/parse.go` | **New file**: scanner + token types | +| `libs/dyn/dynvar/parse_test.go` | **New file**: scanner unit tests | +| `python/databricks/bundles/core/_transform.py` | Update Python side to match (separate PR) | + +## Migration Strategy + +### Phase 1: Add Parser, Keep Regex + +1. Implement `Parse()` in a new `parse.go` file with full test coverage. +2. Add a `NewRefWithDiagnostics(v dyn.Value) (Ref, diag.Diagnostics)` that + uses the parser and can return warnings for malformed references. +3. Keep the existing `NewRef` as-is, calling the parser internally but falling + back to the regex for any parse errors (belt-and-suspenders). +4. Add logging when the parser and regex disagree, to catch discrepancies. + +### Phase 2: Switch Over + +1. Remove the regex fallback — `NewRef` uses only the parser. +2. Update `Ref` to store `[]Token` instead of `[][]string`. +3. Update `resolveRef` to use token concatenation. +4. Remove the `re` variable. + +### Phase 3: Add Escape Support + +1. Enable `$$` escape handling in the parser. +2. Document the escape sequence. +3. Update the Python implementation. + +## Compatibility + +- **Forward compatible:** All strings that currently contain valid `${...}` + references will parse identically. The parser accepts a strict superset of + the regex (it can also report errors for malformed references). + +- **Backward compatible escape:** `$$` before `{` is a new feature, not a + breaking change. No existing valid config contains `$${` (the regex would not + match it, and a literal `$${` in YAML has no special meaning today). + +- **Error reporting is additive:** Strings that silently failed to match the + regex will now produce actionable error messages. This is a UX improvement, + not a breaking change, though it could surface new warnings for configs that + previously "worked" by accident (e.g., a typo like `${foo.bar-}` was silently + treated as literal text). + +## Testing Plan + +1. **Parser unit tests** (`parse_test.go`): + - Valid references: single, multiple, with indices, with hyphens/underscores + - Escape sequences: `$$`, `$` at end of string, `$` before non-`{` + - Error cases: unterminated `${`, empty `${}`, invalid characters in path + - Position tracking: verify `Start`/`End` offsets are correct + +2. **Ref behavioral tests** (`ref_test.go`): + - All existing tests pass unchanged + - New tests for `IsPure()` and `References()` using token-based `Ref` + +3. **Resolution tests** (`resolve_test.go`): + - All existing tests pass unchanged + - New tests for escape sequences in interpolation + - New tests verifying improved error messages + +4. **Acceptance tests**: + - Add acceptance test with `$$` escape in `databricks.yml` + - Verify existing acceptance tests pass without output changes + +## Why Not a More Powerful Parser? + +A recursive descent parser or parser combinator would allow richer syntax (nested +expressions, function calls, filters). We deliberately avoid this because: + +1. **YAGNI.** The current `${path.to.var[0]}` syntax covers all use cases. There + are no open feature requests for computed expressions inside `${...}`. + +2. **Two implementations.** Any syntax change must be mirrored in the Python + implementation. A simple scanner is easy to port; a recursive descent parser + is not. + +3. **Terraform alignment.** DABs variable references are conceptually similar to + HCL variable references. Keeping the syntax simple avoids user confusion + about what expressions are supported. + +If we ever need richer expressions, the token-based architecture makes it easy to +add a parser layer on top of the scanner without changing the `Ref`/`Token` types +or the resolution pipeline. + +## Summary + +Replace the regex in `dynvar/ref.go` with a ~80-line character scanner that: +- Produces the same results for all valid inputs +- Reports actionable errors for invalid inputs (with byte positions) +- Supports `$$` escape for literal `${` output +- Is straightforward to read, test, and extend +- Simplifies the interpolation logic in `resolve.go` from regex-replacement to + token concatenation diff --git a/design/variable-lookup-suggestions.md b/design/variable-lookup-suggestions.md new file mode 100644 index 0000000000..8e8fa221a4 --- /dev/null +++ b/design/variable-lookup-suggestions.md @@ -0,0 +1,605 @@ +# Proposal: "Did You Mean?" Suggestions for Invalid Variable References + +## Status: Draft + +## Problem + +When a user writes a variable reference like `${bundle.git.origin_urlx}` (typo) +or `${var.my_clster_id}` (misspelling), the error message today is: + +``` +reference does not exist: ${bundle.git.origin_urlx} +``` + +That's it. No suggestions, no list of valid keys, no indication of what went +wrong. The user has to go back to the docs or mentally diff against the schema +to figure out the correct name. + +This is a common source of frustration: a single character typo in a long path +like `${resources.jobs.my_pipeline.tasks[0].task_key}` can take minutes to spot. + +Variable references are multi-level paths (e.g., `${resources.jobs.my_job.id}`). +A typo can occur in **any** component — or even in **multiple** components at +once. A good suggestion system must handle all of these cases. + +## What We Have Today + +### The Error Path + +When resolution fails, the call chain is: + +``` +resolve_variable_references.go: dynvar.Resolve(v, lookupFn) + resolve.go: r.resolveKey(dep, seen) + resolve.go: r.fn(p) // calls the lookup function + resolve_variable_references.go: m.lookupFn(normalized, path, b) + resolve_variable_references.go: dyn.GetByPath(v, path) + visit.go: m.GetByString(c.key) // FAILS HERE + return noSuchKeyError{path} +``` + +At the point of failure in `visit.go:135-137`: +```go +m := v.MustMap() +ev, ok := m.GetByString(c.key) +if !ok { + return InvalidValue, noSuchKeyError{path} +} +``` + +The map `m` contains **all sibling keys** — i.e., the valid alternatives. But +that information is discarded. The error only carries the failed `path`. + +Back in `resolve.go:200-201`, the error is rewrapped into a flat string: +```go +if dyn.IsNoSuchKeyError(err) { + err = fmt.Errorf("reference does not exist: ${%s}", key) +} +``` + +**Crucially**, `visit.go` stops at the **first** non-existent key. For a path +like `${resources.jbs.my_jb.id}` with typos in both `jbs` and `my_jb`: +1. `resources` — exists, traverse into it +2. `jbs` — **does not exist** → `NoSuchKeyError` immediately + +The traversal never reaches `my_jb`, so we can only suggest fixing `jbs`. The +user has to fix that, re-run, and discover the second typo. This round-trip +is exactly the frustration we want to avoid. + +### The Normalized Config Tree + +In `resolve_variable_references.go:203`, before resolution begins: +```go +normalized, _ := convert.Normalize(b.Config, root, convert.IncludeMissingFields) +``` + +This `normalized` tree includes: +- All struct-defined fields (from Go types), even if unset (via `IncludeMissingFields`) +- All user-defined map keys (resource names, variable names, etc.) + +This is the right source of truth for suggestions. It contains everything the +user could validly reference. + +## Design + +### Approach: Fuzzy Path Walk Against the Config Tree + +Rather than relying on the error from `visit.go` (which only tells us about the +first failing component), we do a **separate fuzzy walk** of the config tree when +a lookup fails. This walk processes every component in the reference path and +can fix typos in **multiple** components simultaneously. + +The flow: +1. Lookup fails with `NoSuchKeyError` +2. We walk the reference path component by component against the normalized tree +3. At each component, if the exact key exists, we follow it +4. If not, we fuzzy-match against sibling keys and follow the best match +5. If all components are resolved (some via fuzzy matching), we suggest the + corrected full path +6. If any component can't be fuzzy-matched (too far from all candidates), we + give up on the suggestion + +### Implementation + +#### 1. Levenshtein Distance Utility + +```go +// File: libs/dyn/dynvar/suggest.go + +package dynvar + +// levenshtein computes the edit distance between two strings. +func levenshtein(a, b string) int { + if len(a) == 0 { + return len(b) + } + if len(b) == 0 { + return len(a) + } + + // Use single-row DP to save memory. + prev := make([]int, len(b)+1) + for j := range prev { + prev[j] = j + } + + for i := range len(a) { + curr := make([]int, len(b)+1) + curr[0] = i + 1 + for j := range len(b) { + cost := 1 + if a[i] == b[j] { + cost = 0 + } + curr[j+1] = min( + curr[j]+1, // insertion + prev[j+1]+1, // deletion + prev[j]+cost, // substitution + ) + } + prev = curr + } + + return prev[len(b)] +} +``` + +#### 2. Single-Key Match Function + +```go +// closestKeyMatch finds the closest matching key from a list of candidates. +// Returns the best match and its edit distance. +// Returns ("", -1) if no candidate is within the distance threshold. +func closestKeyMatch(key string, candidates []string) (string, int) { + // Threshold: allow up to 3 edits, but no more than half the key length. + // This avoids suggesting wildly different strings for short keys. + maxDist := min(3, max(1, len(key)/2)) + + bestMatch := "" + bestDist := maxDist + 1 + + for _, c := range candidates { + d := levenshtein(key, c) + if d < bestDist { + bestDist = d + bestMatch = c + } + } + + if bestMatch == "" { + return "", -1 + } + return bestMatch, bestDist +} +``` + +#### 3. Fuzzy Path Walk + +This is the core new function. It walks the reference path against the config +tree, fuzzy-matching at each level: + +```go +// suggestPath walks the reference path against root and tries to find the +// closest valid path. At each component, it first tries an exact match; if +// that fails, it fuzzy-matches against available keys. +// +// Returns the suggested path as a string, or "" if no reasonable suggestion +// can be made. +func suggestPath(root dyn.Value, refPath dyn.Path) string { + current := root + suggested := make(dyn.Path, 0, len(refPath)) + + for _, component := range refPath { + if component.IsIndex() { + // For index components (e.g., [0]), we can't fuzzy-match. + // Just check if the index is valid and pass through. + s, ok := current.AsSequence() + if !ok || component.Index() >= len(s) { + return "" + } + suggested = append(suggested, component) + current = s[component.Index()] + continue + } + + key := component.Key() + m, ok := current.AsMap() + if !ok { + // Expected a map but got something else — can't suggest. + return "" + } + + // Try exact match first. + if v, exists := m.GetByString(key); exists { + suggested = append(suggested, component) + current = v + continue + } + + // Exact match failed — try fuzzy match. + candidates := m.StringKeys() + match, _ := closestKeyMatch(key, candidates) + if match == "" { + // No close match — can't suggest beyond this point. + return "" + } + + suggested = append(suggested, dyn.Key(match)) + v, _ := m.GetByString(match) + current = v + } + + return suggested.String() +} +``` + +**Key properties:** +- Handles typos at **any** level: first, middle, last, or multiple levels +- Index components (`[0]`) are passed through verbatim — no fuzzy matching +- Stops suggesting as soon as any component can't be matched (no partial guesses) +- Each component is matched independently, so two typos in different components + are both corrected + +#### 4. Wire It Into Resolution + +The suggestion logic needs access to the normalized config tree that the lookup +function uses. Today, the `Lookup` function type is: + +```go +type Lookup func(path dyn.Path) (dyn.Value, error) +``` + +The `resolve.go` resolver doesn't have direct access to the underlying tree — +it only has the lookup function. We add the suggestion logic at the layer above, +in `resolve_variable_references.go`, which has access to the `normalized` tree. + +**Option A: Pass a suggest function into the resolver** + +Add an optional suggest callback to the resolver: + +```go +// SuggestFn takes a failed reference path and returns a suggested correction, +// or "" if no suggestion can be made. +type SuggestFn func(path dyn.Path) string + +func Resolve(in dyn.Value, fn Lookup, opts ...ResolveOption) (out dyn.Value, err error) { + r := resolver{in: in, fn: fn} + for _, opt := range opts { + opt(&r) + } + return r.run() +} + +type ResolveOption func(*resolver) + +func WithSuggestFn(fn SuggestFn) ResolveOption { + return func(r *resolver) { + r.suggestFn = fn + } +} +``` + +Then in `resolveKey`: +```go +v, err := r.fn(p) +if err != nil { + if dyn.IsNoSuchKeyError(err) { + msg := fmt.Sprintf("reference does not exist: ${%s}", key) + + if r.suggestFn != nil { + if suggestion := r.suggestFn(p); suggestion != "" { + msg += fmt.Sprintf("; did you mean ${%s}?", suggestion) + } + } + + err = fmt.Errorf(msg) + } + + r.lookups[key] = lookupResult{v: dyn.InvalidValue, err: err} + return dyn.InvalidValue, err +} +``` + +And in `resolve_variable_references.go`, pass the suggest function: + +```go +return dynvar.Resolve(v, lookupFn, dynvar.WithSuggestFn(func(p dyn.Path) string { + return dynvar.SuggestPath(normalized, p) +})) +``` + +**Option B: Suggest at the `resolve_variable_references.go` level** + +Instead of modifying `Resolve`'s signature, wrap the error after `Resolve` +returns. This is simpler but less clean: + +```go +out, err := dynvar.Resolve(v, lookupFn) +if err != nil && dyn.IsNoSuchKeyError(err) { + // Extract the failed path and suggest... +} +``` + +The problem with Option B is that by the time `Resolve` returns, the original +`dyn.Path` is lost — it's been formatted into the error string. We'd have to +re-parse it or change the error type. **Option A is cleaner.** + +### Example Error Messages + +| Reference | Typos | Error After | +|-----------|-------|-------------| +| `${bundle.git.origin_urlx}` | 1 (leaf) | `did you mean ${bundle.git.origin_url}?` | +| `${resources.jbs.my_job.id}` | 1 (middle) | `did you mean ${resources.jobs.my_job.id}?` | +| `${resources.jbs.my_jb.id}` | 2 (middle + middle) | `did you mean ${resources.jobs.my_job.id}?` | +| `${bundel.git.origin_urlx}` | 2 (root + leaf) | `did you mean ${bundle.git.origin_url}?` | +| `${workspace.root_paht}` | 1 (leaf) | `did you mean ${workspace.root_path}?` | +| `${var.my_clster_id}` | 1 (leaf) | `did you mean ${var.my_cluster_id}?` | +| `${completely.wrong.path}` | all | *(no suggestion — too far at first component)* | +| `${resources.jobs.my_jb.idd}` | 2 (deep) | `did you mean ${resources.jobs.my_job.id}?` | + +### Walk-Through: Multi-Level Typo + +For `${resources.jbs.my_jb.id}`, the fuzzy walk proceeds: + +``` +Component Tree at this level Exact? Fuzzy match +───────── ────────────────── ────── ─────────── +resources {bundle, resources, ...} yes — +jbs {jobs, pipelines, ...} no "jobs" (dist=1) +my_jb {my_job, other_job, ...} no "my_job" (dist=2) +id {id, name, ...} yes — + +Suggested path: resources.jobs.my_job.id +``` + +All four components resolved, so we suggest `${resources.jobs.my_job.id}`. + +### Walk-Through: Unfixable Path + +For `${zzz.yyy.xxx}`: + +``` +Component Tree at this level Exact? Fuzzy match +───────── ────────────────── ────── ─────────── +zzz {bundle, resources, ...} no none (all dist>3) + +Suggested path: "" (give up) +``` + +No suggestion produced. + +## Scope + +### What This Covers + +- Typos in struct field names: `${bundle.git.origin_urlx}` (keys from Go types) +- Typos in user-defined names: `${var.my_clster_id}` (keys from user config) +- Typos in resource type names: `${resources.jbs.my_job.id}` +- Typos in resource instance names: `${resources.jobs.my_jb.id}` +- **Multi-level typos**: `${resources.jbs.my_jb.id}` (typos at two levels) + +### What This Does NOT Cover + +- **Invalid path structure** (e.g., `${a..b}` or `${a[x]}`) — this is a parse + error, not a lookup error, and would be handled by the parser proposal. +- **References to the wrong section** (e.g., user writes `${bundle.cluster_id}` + when they mean `${var.cluster_id}`) — the prefix is valid so we'd only + suggest keys within `bundle.*`. Cross-section suggestions would require + searching the entire tree, which is a separate feature. +- **Array index out of bounds** (e.g., `${resources.jobs.foo.tasks[99]}`) — this + is an `indexOutOfBoundsError`, not a `noSuchKeyError`. No suggestions apply. + +## `var` Shorthand + +The `${var.foo}` shorthand is rewritten to `${variables.foo.value}` before +lookup (in `resolve_variable_references.go:209-222`). The suggestion function +receives the **rewritten** path. If we suggest a corrected path, we should +convert it back to the shorthand form for the user-facing message. + +For example: +- User writes: `${var.my_clster_id}` +- Rewritten to: `${variables.my_clster_id.value}` +- Suggestion from fuzzy walk: `variables.my_cluster_id.value` +- User-facing message: `did you mean ${var.my_cluster_id}?` + +This reverse mapping is straightforward: if the suggested path starts with +`variables.` and ends with `.value`, strip those and prefix with `var.`. + +## File Changes + +| File | Change | +|------|--------| +| `libs/dyn/mapping.go` | Add `StringKeys()` helper | +| `libs/dyn/dynvar/suggest.go` | **New**: `levenshtein()`, `closestKeyMatch()`, `SuggestPath()` | +| `libs/dyn/dynvar/suggest_test.go` | **New**: tests for distance, matching, and path suggestion | +| `libs/dyn/dynvar/resolve.go` | Add `SuggestFn` field, use it in `resolveKey` | +| `libs/dyn/dynvar/resolve_test.go` | Add tests for suggestion error messages | +| `bundle/config/mutator/resolve_variable_references.go` | Pass `WithSuggestFn` to `Resolve` | + +Note: no changes to `libs/dyn/visit.go` — the suggestion logic is entirely +separate from the traversal error path. + +## Testing + +### Unit Tests for Levenshtein + Suggestions + +```go +func TestLevenshtein(t *testing.T) { + assert.Equal(t, 0, levenshtein("abc", "abc")) + assert.Equal(t, 1, levenshtein("abc", "ab")) // deletion + assert.Equal(t, 1, levenshtein("abc", "abcd")) // insertion + assert.Equal(t, 1, levenshtein("abc", "adc")) // substitution + assert.Equal(t, 3, levenshtein("abc", "xyz")) // all different + assert.Equal(t, 3, levenshtein("", "abc")) // empty vs non-empty +} + +func TestClosestKeyMatch(t *testing.T) { + candidates := []string{"origin_url", "branch", "commit"} + + match, dist := closestKeyMatch("origin_urlx", candidates) + assert.Equal(t, "origin_url", match) + assert.Equal(t, 1, dist) + + match, _ = closestKeyMatch("zzzzzzz", candidates) + assert.Equal(t, "", match) +} +``` + +### Fuzzy Path Walk Tests + +```go +func TestSuggestPathSingleTypo(t *testing.T) { + root := dyn.V(map[string]dyn.Value{ + "bundle": dyn.V(map[string]dyn.Value{ + "git": dyn.V(map[string]dyn.Value{ + "origin_url": dyn.V(""), + "branch": dyn.V(""), + }), + }), + }) + + p := dyn.MustPathFromString("bundle.git.origin_urlx") + assert.Equal(t, "bundle.git.origin_url", SuggestPath(root, p)) +} + +func TestSuggestPathMultiLevelTypo(t *testing.T) { + root := dyn.V(map[string]dyn.Value{ + "resources": dyn.V(map[string]dyn.Value{ + "jobs": dyn.V(map[string]dyn.Value{ + "my_job": dyn.V(map[string]dyn.Value{ + "id": dyn.V(""), + }), + }), + }), + }) + + p := dyn.MustPathFromString("resources.jbs.my_jb.id") + assert.Equal(t, "resources.jobs.my_job.id", SuggestPath(root, p)) +} + +func TestSuggestPathNoMatch(t *testing.T) { + root := dyn.V(map[string]dyn.Value{ + "bundle": dyn.V(map[string]dyn.Value{ + "name": dyn.V(""), + }), + }) + + p := dyn.MustPathFromString("zzzzz.yyyyy") + assert.Equal(t, "", SuggestPath(root, p)) +} + +func TestSuggestPathWithIndex(t *testing.T) { + root := dyn.V(map[string]dyn.Value{ + "resources": dyn.V(map[string]dyn.Value{ + "jobs": dyn.V(map[string]dyn.Value{ + "my_job": dyn.V(map[string]dyn.Value{ + "tasks": dyn.V([]dyn.Value{ + dyn.V(map[string]dyn.Value{ + "task_key": dyn.V(""), + }), + }), + }), + }), + }), + }) + + p := dyn.MustPathFromString("resources.jobs.my_job.tasks[0].tsk_key") + assert.Equal(t, "resources.jobs.my_job.tasks[0].task_key", SuggestPath(root, p)) +} +``` + +### Integration-Level Tests + +```go +func TestResolveNotFoundWithSuggestion(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "bundle": dyn.V(map[string]dyn.Value{ + "name": dyn.V("my-bundle"), + "target": dyn.V("dev"), + }), + "ref": dyn.V("${bundle.nme}"), + }) + + _, err := dynvar.Resolve(in, dynvar.DefaultLookup(in), + dynvar.WithSuggestFn(func(p dyn.Path) string { + return dynvar.SuggestPath(in, p) + }), + ) + assert.ErrorContains(t, err, "reference does not exist: ${bundle.nme}") + assert.ErrorContains(t, err, "did you mean ${bundle.name}?") +} + +func TestResolveNotFoundMultiLevelTypo(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "resources": dyn.V(map[string]dyn.Value{ + "jobs": dyn.V(map[string]dyn.Value{ + "my_job": dyn.V(map[string]dyn.Value{ + "id": dyn.V("123"), + }), + }), + }), + "ref": dyn.V("${resources.jbs.my_jb.id}"), + }) + + _, err := dynvar.Resolve(in, dynvar.DefaultLookup(in), + dynvar.WithSuggestFn(func(p dyn.Path) string { + return dynvar.SuggestPath(in, p) + }), + ) + assert.ErrorContains(t, err, "did you mean ${resources.jobs.my_job.id}?") +} +``` + +## Alternatives Considered + +### A. Fix one component at a time (original proposal) + +Only suggest a fix for the first failing component. After the user fixes that, +they re-run and discover the next typo. + +**Rejected** because: +- Requires multiple round-trips for multi-level typos +- The fuzzy walk approach is barely more complex but gives a much better UX + +### B. Enumerate all valid paths in the error + +List all valid sibling keys: + +``` +reference does not exist: ${bundle.nme}; valid keys at "bundle" are: name, target, git +``` + +**Rejected** because for large maps (e.g., `resources.jobs` with dozens of jobs) +this would produce very noisy output. A single close match is more actionable. + +### C. Search the entire tree for the closest leaf path + +Walk the entire normalized tree and compute edit distance for every possible +leaf path against the full reference string. + +**Rejected** because: +- Expensive for large configs (every leaf × string distance) +- Could suggest paths in completely unrelated sections +- The per-component walk is more predictable and faster (bounded by path depth) + +### D. Do nothing — rely on docs/IDE support + +**Rejected** because: +- Many users don't use an IDE for YAML editing +- The error happens at `databricks bundle validate` time, which is the right + place for actionable feedback +- This is low-effort, high-value + +## Relationship to Parser Proposal + +This proposal is **independent** of the regex-to-parser migration. It can be +implemented with the current regex-based `NewRef` — the suggestion logic operates +at the resolution level, not the parsing level. + +However, the two proposals complement each other: +- The parser proposal improves error reporting for **malformed** references + (e.g., `${a..b}`, unterminated `${`) +- This proposal improves error reporting for **well-formed but invalid** + references (e.g., `${bundle.nme}`) + +Both can be implemented and shipped independently. From 4219a02ad46dcbac0b9acc243758a35303dca6bb Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 12 Mar 2026 01:45:42 +0100 Subject: [PATCH 12/13] Add test coverage for bind-then-update action types Extends the bind-and-update acceptance test to verify that the action type transitions from "bind" on the first deploy to "update" on subsequent deploys after the resource is already bound in state. Co-Authored-By: Claude Opus 4.6 --- .../bundle/deploy/bind/bind-and-update/output.txt | 14 ++++++++++++++ .../bundle/deploy/bind/bind-and-update/script | 12 +++++++++++- 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/acceptance/bundle/deploy/bind/bind-and-update/output.txt b/acceptance/bundle/deploy/bind/bind-and-update/output.txt index b45e2607b8..92b414476c 100644 --- a/acceptance/bundle/deploy/bind/bind-and-update/output.txt +++ b/acceptance/bundle/deploy/bind/bind-and-update/output.txt @@ -13,6 +13,20 @@ Deployment complete! >>> [CLI] jobs get [NEW_JOB_ID] updated-job-name +>>> [CLI] bundle plan +update jobs.foo + +Plan: 0 to add, 1 to change, 0 to delete, 0 unchanged + +>>> [CLI] bundle deploy --auto-approve +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bind-update/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> [CLI] jobs get [NEW_JOB_ID] +second-update-name + >>> [CLI] bundle destroy --auto-approve The following resources will be deleted: delete resources.jobs.foo diff --git a/acceptance/bundle/deploy/bind/bind-and-update/script b/acceptance/bundle/deploy/bind/bind-and-update/script index b0a27071c8..0c2c3431f6 100644 --- a/acceptance/bundle/deploy/bind/bind-and-update/script +++ b/acceptance/bundle/deploy/bind/bind-and-update/script @@ -5,7 +5,7 @@ add_repl.py $NEW_JOB_ID NEW_JOB_ID # Update the databricks.yml with the actual job ID update_file.py databricks.yml 'PLACEHOLDER_JOB_ID' "$NEW_JOB_ID" -# Run plan - should show import_and_update action (name differs from config) +# Run plan - should show bind action (name differs from config) trace $CLI bundle plan # Deploy with auto-approve @@ -14,5 +14,15 @@ trace $CLI bundle deploy --auto-approve # Verify the job was updated trace $CLI jobs get $NEW_JOB_ID | jq -r .settings.name +# Now update the job name again in the config and deploy again. +# This time the action should be "update", not "bind", since the resource +# is already bound in state. +update_file.py databricks.yml 'updated-job-name' 'second-update-name' +trace $CLI bundle plan +trace $CLI bundle deploy --auto-approve + +# Verify the job was updated with the second name +trace $CLI jobs get $NEW_JOB_ID | jq -r .settings.name + # Cleanup trace $CLI bundle destroy --auto-approve From 01469517bd218df578453501ef8599c44e747b9a Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Thu, 12 Mar 2026 02:08:40 +0100 Subject: [PATCH 13/13] Block destroy with active bind blocks and tighten bind plan logic Two changes: 1. Destroy validation: bundle destroy now errors when bind blocks in the target configuration reference resources tracked in the deployment state. This prevents accidentally deleting pre-existing workspace resources that were imported via bind. Users must remove the bind blocks or run 'bundle deployment unbind' before destroying. 2. Bind plan logic: use an explicit switch on the max action type during bind planning. Only Update and Resize map to BindAndUpdate. Recreate and UpdateWithID produce user-facing errors. Any other action type (e.g. Create, Delete) is treated as an internal error. Co-Authored-By: Claude Opus 4.6 --- acceptance/bundle/deploy/bind/basic/script | 3 ++ .../bundle/deploy/bind/bind-and-update/script | 3 ++ .../bind/destroy-blocked/databricks.yml | 23 +++++++++ .../deploy/bind/destroy-blocked/hello.py | 1 + .../deploy/bind/destroy-blocked/output.txt | 18 +++++++ .../bundle/deploy/bind/destroy-blocked/script | 16 ++++++ bundle/direct/bundle_plan.go | 20 +++++--- bundle/phases/destroy.go | 49 +++++++++++++++++++ 8 files changed, 125 insertions(+), 8 deletions(-) create mode 100644 acceptance/bundle/deploy/bind/destroy-blocked/databricks.yml create mode 100644 acceptance/bundle/deploy/bind/destroy-blocked/hello.py create mode 100644 acceptance/bundle/deploy/bind/destroy-blocked/output.txt create mode 100644 acceptance/bundle/deploy/bind/destroy-blocked/script diff --git a/acceptance/bundle/deploy/bind/basic/script b/acceptance/bundle/deploy/bind/basic/script index 2252f3a630..33038ea51b 100644 --- a/acceptance/bundle/deploy/bind/basic/script +++ b/acceptance/bundle/deploy/bind/basic/script @@ -17,5 +17,8 @@ trace $CLI bundle plan # Verify state file contains the imported ID trace print_state.py | contains.py "$NEW_JOB_ID" +# Remove bind block before destroy (destroy blocks on active bind blocks) +python3 -c "open('databricks.yml', 'w').write(open('databricks.yml').read().split('\ntargets:')[0] + '\n')" + # Cleanup trace $CLI bundle destroy --auto-approve diff --git a/acceptance/bundle/deploy/bind/bind-and-update/script b/acceptance/bundle/deploy/bind/bind-and-update/script index 0c2c3431f6..029b91b725 100644 --- a/acceptance/bundle/deploy/bind/bind-and-update/script +++ b/acceptance/bundle/deploy/bind/bind-and-update/script @@ -24,5 +24,8 @@ trace $CLI bundle deploy --auto-approve # Verify the job was updated with the second name trace $CLI jobs get $NEW_JOB_ID | jq -r .settings.name +# Remove bind block before destroy (destroy blocks on active bind blocks) +python3 -c "open('databricks.yml', 'w').write(open('databricks.yml').read().split('\ntargets:')[0] + '\n')" + # Cleanup trace $CLI bundle destroy --auto-approve diff --git a/acceptance/bundle/deploy/bind/destroy-blocked/databricks.yml b/acceptance/bundle/deploy/bind/destroy-blocked/databricks.yml new file mode 100644 index 0000000000..f60645ff51 --- /dev/null +++ b/acceptance/bundle/deploy/bind/destroy-blocked/databricks.yml @@ -0,0 +1,23 @@ +bundle: + name: test-bind-destroy-blocked + +resources: + jobs: + foo: + name: test-bind-job + environments: + - environment_key: default + spec: + client: "1" + tasks: + - task_key: my_task + environment_key: default + spark_python_task: + python_file: ./hello.py + +targets: + default: + bind: + jobs: + foo: + id: "PLACEHOLDER_JOB_ID" diff --git a/acceptance/bundle/deploy/bind/destroy-blocked/hello.py b/acceptance/bundle/deploy/bind/destroy-blocked/hello.py new file mode 100644 index 0000000000..11b15b1a45 --- /dev/null +++ b/acceptance/bundle/deploy/bind/destroy-blocked/hello.py @@ -0,0 +1 @@ +print("hello") diff --git a/acceptance/bundle/deploy/bind/destroy-blocked/output.txt b/acceptance/bundle/deploy/bind/destroy-blocked/output.txt new file mode 100644 index 0000000000..7c452cf0ae --- /dev/null +++ b/acceptance/bundle/deploy/bind/destroy-blocked/output.txt @@ -0,0 +1,18 @@ + +>>> [CLI] bundle deploy --auto-approve +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bind-destroy-blocked/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> musterr [CLI] bundle destroy --auto-approve +Error: cannot destroy with bind blocks that reference resources in the deployment state: resources.jobs.foo; remove the bind blocks from the target configuration or run 'bundle deployment unbind' before destroying + +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete resources.jobs.foo + +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/test-bind-destroy-blocked/default + +Deleting files... +Destroy complete! diff --git a/acceptance/bundle/deploy/bind/destroy-blocked/script b/acceptance/bundle/deploy/bind/destroy-blocked/script new file mode 100644 index 0000000000..559b5fbcff --- /dev/null +++ b/acceptance/bundle/deploy/bind/destroy-blocked/script @@ -0,0 +1,16 @@ +# Create a job in the workspace +NEW_JOB_ID=$($CLI jobs create --json '{"name": "test-bind-job", "environments": [{"environment_key": "default", "spec": {"client": "1"}}], "tasks": [{"task_key": "my_task", "environment_key": "default", "spark_python_task": {"python_file": "/Workspace/test.py"}}]}' | jq -r .job_id) +add_repl.py $NEW_JOB_ID NEW_JOB_ID + +# Update the databricks.yml with the actual job ID +update_file.py databricks.yml 'PLACEHOLDER_JOB_ID' "$NEW_JOB_ID" + +# Deploy to bind the resource into state +trace $CLI bundle deploy --auto-approve + +# Try to destroy - should fail because bind blocks reference resources in state +trace musterr $CLI bundle destroy --auto-approve + +# Remove bind block and destroy to clean up +python3 -c "open('databricks.yml', 'w').write(open('databricks.yml').read().split('\ntargets:')[0] + '\n')" +trace $CLI bundle destroy --auto-approve diff --git a/bundle/direct/bundle_plan.go b/bundle/direct/bundle_plan.go index 542a0adccb..1c7faf4653 100644 --- a/bundle/direct/bundle_plan.go +++ b/bundle/direct/bundle_plan.go @@ -1101,16 +1101,20 @@ func (b *DeploymentBundle) handleBindPlan(ctx context.Context, resourceKey strin // Determine action based on changes maxAction := getMaxAction(entry.Changes) - // Block recreate action for bound resources - if maxAction == deployplan.Recreate { - logdiag.LogError(ctx, fmt.Errorf("%s: cannot recreate resource with bind block; this would destroy the existing workspace resource. Remove the bind block to allow recreation", errorPrefix)) - return false - } - - if maxAction == deployplan.Skip || maxAction == deployplan.Undefined { + switch maxAction { + case deployplan.Skip, deployplan.Undefined: entry.Action = deployplan.Bind - } else { + case deployplan.Update, deployplan.Resize: entry.Action = deployplan.BindAndUpdate + case deployplan.Recreate: + logdiag.LogError(ctx, fmt.Errorf("%s: cannot recreate resource with bind block; this would destroy the existing workspace resource. Remove the bind block to allow recreation", errorPrefix)) + return false + case deployplan.UpdateWithID: + logdiag.LogError(ctx, fmt.Errorf("%s: cannot update resource ID with bind block; this would replace the existing workspace resource. Remove the bind block to allow ID update", errorPrefix)) + return false + default: + logdiag.LogError(ctx, fmt.Errorf("%s: internal error: unexpected action %q during bind planning", errorPrefix, maxAction)) + return false } return true diff --git a/bundle/phases/destroy.go b/bundle/phases/destroy.go index da6fdace15..e0bf627b83 100644 --- a/bundle/phases/destroy.go +++ b/bundle/phases/destroy.go @@ -2,10 +2,16 @@ package phases import ( "context" + "encoding/json" "errors" + "fmt" "net/http" + "os" + "slices" + "strings" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/engine" "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/deploy/files" @@ -13,6 +19,7 @@ import ( "github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/deployplan" "github.com/databricks/cli/bundle/direct" + "github.com/databricks/cli/bundle/direct/dstate" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/logdiag" @@ -158,6 +165,15 @@ func Destroy(ctx context.Context, b *bundle.Bundle, engine engine.EngineType) { var plan *deployplan.Plan if engine.IsDirect() { _, localPath := b.StateFilenameDirect(ctx) + + // Validate: cannot destroy resources managed via bind blocks. + if b.Target != nil && !b.Target.Bind.IsEmpty() { + if err := validateNoBindForDestroy(localPath, b.Target.Bind); err != nil { + logdiag.LogError(ctx, err) + return + } + } + plan, err = b.DeploymentBundle.CalculatePlan(ctx, b.WorkspaceClient(), nil, localPath, nil) if err != nil { logdiag.LogError(ctx, err) @@ -189,3 +205,36 @@ func Destroy(ctx context.Context, b *bundle.Bundle, engine engine.EngineType) { cmdio.LogString(ctx, "Destroy cancelled!") } } + +// validateNoBindForDestroy checks that no bind blocks reference resources +// that are currently tracked in the deployment state. Destroying bound resources +// would delete pre-existing workspace resources, which is likely unintended. +func validateNoBindForDestroy(statePath string, bindConfig config.Bind) error { + data, err := os.ReadFile(statePath) + if err != nil { + if os.IsNotExist(err) { + return nil + } + return fmt.Errorf("reading state from %s: %w", statePath, err) + } + + var db dstate.Database + if err := json.Unmarshal(data, &db); err != nil { + return fmt.Errorf("parsing state from %s: %w", statePath, err) + } + + var boundInState []string + bindConfig.ForEach(func(resourceType, resourceName, bindID string) { + key := "resources." + resourceType + "." + resourceName + if entry, ok := db.State[key]; ok && entry.ID == bindID { + boundInState = append(boundInState, key) + } + }) + + if len(boundInState) == 0 { + return nil + } + + slices.Sort(boundInState) + return fmt.Errorf("cannot destroy with bind blocks that reference resources in the deployment state: %s; remove the bind blocks from the target configuration or run 'bundle deployment unbind' before destroying", strings.Join(boundInState, ", ")) +}