From c060e85999f09e786f781b2818c6d42cfea3664c Mon Sep 17 00:00:00 2001 From: "databricks-ci-ghec-2[bot]" <184307802+databricks-ci-ghec-2[bot]@users.noreply.github.com> Date: Thu, 1 Jan 2026 09:41:29 +0000 Subject: [PATCH] Update SDK to 8685a7d0216270e9c2b1f66e5917ee272899a315 --- .codegen/_openapi_sha | 2 +- docs/account/iam/workspace_assignment.rst | 8 +++--- docs/workspace/catalog/catalogs.rst | 3 +- docs/workspace/catalog/external_locations.rst | 10 +++---- .../workspace/catalog/storage_credentials.rst | 6 ++-- docs/workspace/catalog/tables.rst | 2 +- docs/workspace/compute/clusters.rst | 3 +- docs/workspace/jobs/jobs.rst | 28 ++++++++++++++++++- docs/workspace/ml/model_registry.rst | 2 ++ docs/workspace/workspace/workspace.rst | 17 +++++++---- 10 files changed, 56 insertions(+), 25 deletions(-) diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index bca179d78..246231fd4 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -64d5b2c102315eef89e9b3b0b3fc960049030d77 \ No newline at end of file +8685a7d0216270e9c2b1f66e5917ee272899a315 \ No newline at end of file diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index ca78b86df..fa9c2ee3e 100644 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -43,9 +43,9 @@ a = AccountClient() - workspace_id = os.environ["TEST_WORKSPACE_ID"] + workspace_id = os.environ["DUMMY_WORKSPACE_ID"] - all = a.workspace_assignment.list(list=workspace_id) + all = a.workspace_assignment.list(workspace_id=workspace_id) Get the permission assignments for the specified Databricks account and Databricks workspace. @@ -74,9 +74,9 @@ spn_id = spn.id - workspace_id = os.environ["TEST_WORKSPACE_ID"] + workspace_id = os.environ["DUMMY_WORKSPACE_ID"] - a.workspace_assignment.update( + _ = a.workspace_assignment.update( workspace_id=workspace_id, principal_id=spn_id, permissions=[iam.WorkspacePermission.USER], diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst index 258f994d3..17297d8dd 100644 --- a/docs/workspace/catalog/catalogs.rst +++ b/docs/workspace/catalog/catalogs.rst @@ -155,13 +155,12 @@ import time from databricks.sdk import WorkspaceClient - from databricks.sdk.service import catalog w = WorkspaceClient() created = w.catalogs.create(name=f"sdk-{time.time_ns()}") - _ = w.catalogs.update(name=created.name, isolation_mode=catalog.CatalogIsolationMode.ISOLATED) + _ = w.catalogs.update(name=created.name, comment="updated") # cleanup w.catalogs.delete(name=created.name, force=True) diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst index 5591a7aa7..b87fd3265 100644 --- a/docs/workspace/catalog/external_locations.rst +++ b/docs/workspace/catalog/external_locations.rst @@ -107,20 +107,20 @@ credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) created = w.external_locations.create( name=f"sdk-{time.time_ns()}", credential_name=credential.name, - url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', + url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), ) - _ = w.external_locations.get(get=created.name) + _ = w.external_locations.get(name=created.name) # cleanup - w.storage_credentials.delete(delete=credential.name) - w.external_locations.delete(delete=created.name) + w.storage_credentials.delete(name=credential.name) + w.external_locations.delete(name=created.name) Gets an external location from the metastore. The caller must be either a metastore admin, the owner of the external location, or a user that has some privilege on the external location. diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst index c3c990e99..c16a8e093 100644 --- a/docs/workspace/catalog/storage_credentials.rst +++ b/docs/workspace/catalog/storage_credentials.rst @@ -173,17 +173,17 @@ created = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) _ = w.storage_credentials.update( name=created.name, comment=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) # cleanup - w.storage_credentials.delete(delete=created.name) + w.storage_credentials.delete(name=created.name) Updates a storage credential on the metastore. diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst index 009b4bbd2..089b1b7f1 100644 --- a/docs/workspace/catalog/tables.rst +++ b/docs/workspace/catalog/tables.rst @@ -156,7 +156,7 @@ created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name) - summaries = w.tables.list_summaries(catalog_name=created_catalog.name, schema_name_pattern=created_schema.name) + all_tables = w.tables.list(catalog_name=created_catalog.name, schema_name=created_schema.name) # cleanup w.schemas.delete(full_name=created_schema.full_name) diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst index 8619a5e9a..463e34d0a 100644 --- a/docs/workspace/compute/clusters.rst +++ b/docs/workspace/compute/clusters.rst @@ -645,11 +645,10 @@ .. code-block:: from databricks.sdk import WorkspaceClient - from databricks.sdk.service import compute w = WorkspaceClient() - all = w.clusters.list(compute.ListClustersRequest()) + nodes = w.clusters.list_node_types() Return information about all pinned and active clusters, and all clusters terminated within the last 30 days. Clusters terminated prior to this period are not included. diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index aea2acd46..49eee71d9 100644 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -522,11 +522,37 @@ .. code-block:: + import os + import time + from databricks.sdk import WorkspaceClient + from databricks.sdk.service import jobs w = WorkspaceClient() - job_list = w.jobs.list(expand_tasks=False) + notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" + + cluster_id = ( + w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"] + ) + + created_job = w.jobs.create( + name=f"sdk-{time.time_ns()}", + tasks=[ + jobs.Task( + description="test", + existing_cluster_id=cluster_id, + notebook_task=jobs.NotebookTask(notebook_path=notebook_path), + task_key="test", + timeout_seconds=0, + ) + ], + ) + + run_list = w.jobs.list_runs(job_id=created_job.job_id) + + # cleanup + w.jobs.delete(job_id=created_job.job_id) List jobs. diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst index 9a6c8f286..46c3a4565 100644 --- a/docs/workspace/ml/model_registry.rst +++ b/docs/workspace/ml/model_registry.rst @@ -91,6 +91,8 @@ w = WorkspaceClient() model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") + + created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Creates a new registered model with the name specified in the request body. Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst index d4136c637..8378eea35 100644 --- a/docs/workspace/workspace/workspace.rst +++ b/docs/workspace/workspace/workspace.rst @@ -148,9 +148,9 @@ w = WorkspaceClient() - notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" + notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" - get_status_response = w.workspace.get_status(path=notebook) + obj = w.workspace.get_status(path=notebook_path) Gets the status of an object or a directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. @@ -176,14 +176,19 @@ w = WorkspaceClient() - notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" + notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" w.workspace.import_( - path=notebook, + path=notebook_path, + overwrite=true_, format=workspace.ImportFormat.SOURCE, language=workspace.Language.PYTHON, - content=base64.b64encode(("# Databricks notebook source\nprint('hello from job')").encode()).decode(), - overwrite=True, + content=base64.b64encode( + ( + """print(1) + """ + ).encode() + ).decode(), ) Imports a workspace object (for example, a notebook or file) or the contents of an entire directory.