Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .codegen/_openapi_sha
Original file line number Diff line number Diff line change
@@ -1 +1 @@
64d5b2c102315eef89e9b3b0b3fc960049030d77
8685a7d0216270e9c2b1f66e5917ee272899a315
8 changes: 4 additions & 4 deletions docs/account/iam/workspace_assignment.rst
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,9 @@

a = AccountClient()

workspace_id = os.environ["TEST_WORKSPACE_ID"]
workspace_id = os.environ["DUMMY_WORKSPACE_ID"]

all = a.workspace_assignment.list(list=workspace_id)
all = a.workspace_assignment.list(workspace_id=workspace_id)

Get the permission assignments for the specified Databricks account and Databricks workspace.

Expand Down Expand Up @@ -74,9 +74,9 @@

spn_id = spn.id

workspace_id = os.environ["TEST_WORKSPACE_ID"]
workspace_id = os.environ["DUMMY_WORKSPACE_ID"]

a.workspace_assignment.update(
_ = a.workspace_assignment.update(
workspace_id=workspace_id,
principal_id=spn_id,
permissions=[iam.WorkspacePermission.USER],
Expand Down
3 changes: 1 addition & 2 deletions docs/workspace/catalog/catalogs.rst
Original file line number Diff line number Diff line change
Expand Up @@ -155,13 +155,12 @@
import time

from databricks.sdk import WorkspaceClient
from databricks.sdk.service import catalog

w = WorkspaceClient()

created = w.catalogs.create(name=f"sdk-{time.time_ns()}")

_ = w.catalogs.update(name=created.name, isolation_mode=catalog.CatalogIsolationMode.ISOLATED)
_ = w.catalogs.update(name=created.name, comment="updated")

# cleanup
w.catalogs.delete(name=created.name, force=True)
Expand Down
10 changes: 5 additions & 5 deletions docs/workspace/catalog/external_locations.rst
Original file line number Diff line number Diff line change
Expand Up @@ -107,20 +107,20 @@

credential = w.storage_credentials.create(
name=f"sdk-{time.time_ns()}",
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
)

created = w.external_locations.create(
name=f"sdk-{time.time_ns()}",
credential_name=credential.name,
url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}',
url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"),
)

_ = w.external_locations.get(get=created.name)
_ = w.external_locations.get(name=created.name)

# cleanup
w.storage_credentials.delete(delete=credential.name)
w.external_locations.delete(delete=created.name)
w.storage_credentials.delete(name=credential.name)
w.external_locations.delete(name=created.name)

Gets an external location from the metastore. The caller must be either a metastore admin, the owner
of the external location, or a user that has some privilege on the external location.
Expand Down
6 changes: 3 additions & 3 deletions docs/workspace/catalog/storage_credentials.rst
Original file line number Diff line number Diff line change
Expand Up @@ -173,17 +173,17 @@

created = w.storage_credentials.create(
name=f"sdk-{time.time_ns()}",
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
)

_ = w.storage_credentials.update(
name=created.name,
comment=f"sdk-{time.time_ns()}",
aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]),
)

# cleanup
w.storage_credentials.delete(delete=created.name)
w.storage_credentials.delete(name=created.name)

Updates a storage credential on the metastore.

Expand Down
2 changes: 1 addition & 1 deletion docs/workspace/catalog/tables.rst
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@

created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name)

summaries = w.tables.list_summaries(catalog_name=created_catalog.name, schema_name_pattern=created_schema.name)
all_tables = w.tables.list(catalog_name=created_catalog.name, schema_name=created_schema.name)

# cleanup
w.schemas.delete(full_name=created_schema.full_name)
Expand Down
3 changes: 1 addition & 2 deletions docs/workspace/compute/clusters.rst
Original file line number Diff line number Diff line change
Expand Up @@ -645,11 +645,10 @@
.. code-block::

from databricks.sdk import WorkspaceClient
from databricks.sdk.service import compute

w = WorkspaceClient()

all = w.clusters.list(compute.ListClustersRequest())
nodes = w.clusters.list_node_types()

Return information about all pinned and active clusters, and all clusters terminated within the last
30 days. Clusters terminated prior to this period are not included.
Expand Down
28 changes: 27 additions & 1 deletion docs/workspace/jobs/jobs.rst
Original file line number Diff line number Diff line change
Expand Up @@ -522,11 +522,37 @@

.. code-block::

import os
import time

from databricks.sdk import WorkspaceClient
from databricks.sdk.service import jobs

w = WorkspaceClient()

job_list = w.jobs.list(expand_tasks=False)
notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"

cluster_id = (
w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
)

created_job = w.jobs.create(
name=f"sdk-{time.time_ns()}",
tasks=[
jobs.Task(
description="test",
existing_cluster_id=cluster_id,
notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
task_key="test",
timeout_seconds=0,
)
],
)

run_list = w.jobs.list_runs(job_id=created_job.job_id)

# cleanup
w.jobs.delete(job_id=created_job.job_id)

List jobs.

Expand Down
2 changes: 2 additions & 0 deletions docs/workspace/ml/model_registry.rst
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,8 @@
w = WorkspaceClient()

model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}")

created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp")

Creates a new registered model with the name specified in the request body. Throws
`RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists.
Expand Down
17 changes: 11 additions & 6 deletions docs/workspace/workspace/workspace.rst
Original file line number Diff line number Diff line change
Expand Up @@ -148,9 +148,9 @@

w = WorkspaceClient()

notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"

get_status_response = w.workspace.get_status(path=notebook)
obj = w.workspace.get_status(path=notebook_path)

Gets the status of an object or a directory. If `path` does not exist, this call returns an error
`RESOURCE_DOES_NOT_EXIST`.
Expand All @@ -176,14 +176,19 @@

w = WorkspaceClient()

notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"
notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}"

w.workspace.import_(
path=notebook,
path=notebook_path,
overwrite=true_,
format=workspace.ImportFormat.SOURCE,
language=workspace.Language.PYTHON,
content=base64.b64encode(("# Databricks notebook source\nprint('hello from job')").encode()).decode(),
overwrite=True,
content=base64.b64encode(
(
"""print(1)
"""
).encode()
).decode(),
)

Imports a workspace object (for example, a notebook or file) or the contents of an entire directory.
Expand Down
Loading