Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
ec78345
[DECO-25663] Replace default-python template with improved version
lennartkats-db Oct 13, 2025
f4479fc
Update acceptance test outputs for default-python template
lennartkats-db Oct 13, 2025
e63b542
Minor tweak, update tests
lennartkats-db Oct 14, 2025
6321f5c
Formatting
lennartkats-db Oct 14, 2025
17d6855
Fix whitespace in template acceptance test outputs
lennartkats-db Oct 14, 2025
37df48c
Fix template whitespace: use {{- end}} to strip trailing newline
lennartkats-db Oct 14, 2025
af524bb
Update NEXT_CHANGELOG.md
lennartkats-db Oct 15, 2025
5ed4067
Update libs/template/templates/default-python/databricks_template_sch…
lennartkats-db Oct 16, 2025
bb33f6e
Update libs/template/templates/default-python/databricks_template_sch…
lennartkats-db Oct 16, 2025
7890ddb
Update libs/template/templates/default-python/databricks_template_sch…
lennartkats-db Oct 16, 2025
d588ebe
Update libs/template/templates/default-python/databricks_template_sch…
lennartkats-db Oct 16, 2025
a3673b8
Update libs/template/templates/default-python/databricks_template_sch…
lennartkats-db Oct 16, 2025
e1d4b49
Update libs/template/templates/default-python/databricks_template_sch…
lennartkats-db Oct 16, 2025
03c2853
Update acceptance tests for default-python template
lennartkats-db Oct 16, 2025
0910a80
Fix URL
lennartkats-db Oct 16, 2025
5e7f05e
Remove the project_name_short special casing
lennartkats-db Oct 16, 2025
1e34b95
Avoid using environemnts in notebook task (PrPr feature)
lennartkats-db Oct 22, 2025
52c67eb
Cleanup
lennartkats-db Oct 22, 2025
24b914b
Update acceptance tests for current state
lennartkats-db Nov 2, 2025
c51bbf2
Merge remote-tracking branch 'origin/main' into replace-default-pytho…
lennartkats-db Nov 2, 2025
27f48d9
Apply fixes for non-UC support, catalog references, and PyDABs compat…
lennartkats-db Nov 3, 2025
fe7fca4
Add terraform state files to .gitignore
lennartkats-db Nov 3, 2025
b5598b4
Cleanup .gitignore, add development entries
lennartkats-db Nov 3, 2025
2d6df7d
Fix serverless DLT catalog for workspaces without default catalog
lennartkats-db Nov 4, 2025
e7daf19
QA
lennartkats-db Nov 4, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
### Dependency updates

### Bundles
* Updated the default-python template to follow the Lakeflow conventions: pipelines as source files, pyproject.toml ([#3712](https://github.com/databricks/cli/pull/3712)).
* Fix a permissions bug adding second IS\_OWNER and causing "The job must have exactly one owner." error. Introduced in 0.274.0. ([#3850](https://github.com/databricks/cli/pull/3850))

### API Changes
Original file line number Diff line number Diff line change
@@ -1,68 +1,68 @@
--- [TESTROOT]/bundle/templates/default-python/classic/../serverless/output/my_default_python/databricks.yml
+++ output/my_default_python/databricks.yml
@@ -25,4 +25,11 @@
host: [DATABRICKS_URL]

@@ -34,4 +34,6 @@
catalog: hive_metastore
schema: ${workspace.current_user.short_name}
+ presets:
+ # Set dynamic_version: true on all artifacts of type "whl".
+ # This makes "bundle deploy" add a timestamp to wheel's version before uploading,
+ # new wheel takes over the previous installation even if actual wheel version is unchanged.
+ # See https://docs.databricks.com/aws/en/dev-tools/bundles/settings
+ artifacts_dynamic_version: true
+
prod:
mode: production
--- [TESTROOT]/bundle/templates/default-python/classic/../serverless/output/my_default_python/resources/my_default_python.job.yml
+++ output/my_default_python/resources/my_default_python.job.yml
@@ -17,4 +17,5 @@
tasks:
- task_key: notebook_task
+ job_cluster_key: job_cluster
--- [TESTROOT]/bundle/templates/default-python/classic/../serverless/output/my_default_python/resources/my_default_python_etl.pipeline.yml
+++ output/my_default_python/resources/my_default_python_etl.pipeline.yml
@@ -5,8 +5,7 @@
my_default_python_etl:
name: my_default_python_etl
- # Catalog is required for serverless compute
- catalog: main
+ ## Specify the 'catalog' field to configure this pipeline to make use of Unity Catalog:
+ # catalog: ${var.catalog}
schema: ${var.schema}
- serverless: true
root_path: "../src/my_default_python_etl"

--- [TESTROOT]/bundle/templates/default-python/classic/../serverless/output/my_default_python/resources/sample_job.job.yml
+++ output/my_default_python/resources/sample_job.job.yml
@@ -26,4 +26,10 @@
notebook_task:
notebook_path: ../src/notebook.ipynb
@@ -29,17 +30,21 @@
notebook_path: ../src/sample_notebook.ipynb
+ job_cluster_key: job_cluster
+ libraries:
+ # By default we just include the .whl file generated for the my_default_python package.
+ # See https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
+ # for more information on how to add other libraries.
+ - whl: ../dist/*.whl
- task_key: python_wheel_task
depends_on:
- task_key: refresh_pipeline
@@ -37,5 +43,10 @@
- "--schema"
- "${var.schema}"
- environment_key: default
+ job_cluster_key: job_cluster
python_wheel_task:
package_name: my_default_python
entry_point: main
+ libraries:
+ # By default we just include the .whl file generated for the my_default_python package.
+ # See https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
+ # for more information on how to add other libraries.
+ - whl: ../dist/*.whl
- task_key: refresh_pipeline
depends_on:
@@ -44,11 +55,11 @@
pipeline_id: ${resources.pipelines.my_default_python_etl.id}

- # A list of task execution environment specifications that can be referenced by tasks of this job.
- environments:
- - environment_key: default
-
- # Full documentation of this spec can be found at:
- # https://docs.databricks.com/api/workspace/jobs/create#environments-spec
- spec:
- environment_version: "2"
- dependencies:
- # By default we just include the .whl file generated for the my_default_python package.
- # See https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
- # for more information on how to add other libraries.
- - ../dist/*.whl
+ job_clusters:
+ - job_cluster_key: job_cluster
+ new_cluster:
+ spark_version: 15.4.x-scala2.12
+ spark_version: 16.4.x-scala2.12
+ node_type_id: [NODE_TYPE_ID]
+ data_security_mode: SINGLE_USER
+ autoscale:
+ min_workers: 1
+ max_workers: 4
--- [TESTROOT]/bundle/templates/default-python/classic/../serverless/output/my_default_python/resources/my_default_python.pipeline.yml
+++ output/my_default_python/resources/my_default_python.pipeline.yml
@@ -4,8 +4,7 @@
my_default_python_pipeline:
name: my_default_python_pipeline
- ## Catalog is required for serverless compute
- catalog: main
+ ## Specify the 'catalog' field to configure this pipeline to make use of Unity Catalog:
+ # catalog: catalog_name
schema: my_default_python_${bundle.target}
- serverless: true
libraries:
- notebook:
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
{
"plan": {
"resources.jobs.my_default_python_job": {
"resources.jobs.sample_job": {
"depends_on": [
{
"node": "resources.pipelines.my_default_python_pipeline",
"label": "${resources.pipelines.my_default_python_pipeline.id}"
"node": "resources.pipelines.my_default_python_etl",
"label": "${resources.pipelines.my_default_python_etl.id}"
}
],
"action": "create",
Expand All @@ -27,43 +27,64 @@
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 0,
"spark_version": "15.4.x-scala2.12"
"spark_version": "16.4.x-scala2.12"
}
}
],
"max_concurrent_runs": 4,
"name": "[dev [USERNAME]] my_default_python_job",
"name": "[dev [USERNAME]] sample_job",
"parameters": [
{
"default": "hive_metastore",
"name": "catalog"
},
{
"default": "[USERNAME]",
"name": "schema"
}
],
"queue": {
"enabled": true
},
"tags": {
"dev": "[USERNAME]"
},
"tasks": [
{
"job_cluster_key": "job_cluster",
"libraries": [
{
"whl": "/Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/artifacts/.internal/my_default_python-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl"
}
],
"notebook_task": {
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/files/src/sample_notebook"
},
"task_key": "notebook_task"
},
{
"depends_on": [
{
"task_key": "refresh_pipeline"
"task_key": "notebook_task"
}
],
"job_cluster_key": "job_cluster",
"libraries": [
{
"whl": "/Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/artifacts/.internal/my_default_python-0.0.1+[UNIX_TIME_NANOS]-py3-none-any.whl"
"whl": "/Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/artifacts/.internal/my_default_python-0.0.1+[UNIX_TIME_NANOS][0]-py3-none-any.whl"
}
],
"python_wheel_task": {
"entry_point": "main",
"package_name": "my_default_python"
},
"task_key": "main_task"
},
{
"job_cluster_key": "job_cluster",
"notebook_task": {
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/files/src/notebook"
"package_name": "my_default_python",
"parameters": [
"--catalog",
"hive_metastore",
"--schema",
"[USERNAME]"
]
},
"task_key": "notebook_task"
"task_key": "python_wheel_task"
},
{
"depends_on": [
Expand All @@ -72,7 +93,7 @@
}
],
"pipeline_task": {
"pipeline_id": "${resources.pipelines.my_default_python_pipeline.id}"
"pipeline_id": "${resources.pipelines.my_default_python_etl.id}"
},
"task_key": "refresh_pipeline"
}
Expand All @@ -86,33 +107,36 @@
}
},
"vars": {
"tasks[2].pipeline_task.pipeline_id": "${resources.pipelines.my_default_python_pipeline.id}"
"tasks[2].pipeline_task.pipeline_id": "${resources.pipelines.my_default_python_etl.id}"
}
}
},
"resources.pipelines.my_default_python_pipeline": {
"resources.pipelines.my_default_python_etl": {
"action": "create",
"new_state": {
"config": {
"channel": "CURRENT",
"configuration": {
"bundle.sourcePath": "/Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/files/src"
},
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/state/metadata.json"
},
"development": true,
"edition": "ADVANCED",
"environment": {
"dependencies": [
"--editable /Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/files"
]
},
"libraries": [
{
"notebook": {
"path": "/Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/files/src/pipeline"
"glob": {
"include": "/Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/files/src/my_default_python_etl/transformations/**"
}
}
],
"name": "[dev [USERNAME]] my_default_python_pipeline",
"schema": "my_default_python_dev",
"name": "[dev [USERNAME]] my_default_python_etl",
"root_path": "/Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/files/src/my_default_python_etl",
"schema": "[USERNAME]",
"tags": {
"dev": "[USERNAME]"
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
{
"plan": {
"resources.jobs.my_default_python_job": {
"resources.jobs.sample_job": {
"action": "create"
},
"resources.pipelines.my_default_python_pipeline": {
"resources.pipelines.my_default_python_etl": {
"action": "create"
}
}
Expand Down
Loading