diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 85a78ec8f..2e1b26236 100755 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -2e4155a57cb6c406a47d42fdc8264795fe7d7e1b \ No newline at end of file +d7ea36916592a39c3b731ed7717884b2ffe15ebe \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index d1e1623ac..2a7d04a47 100755 --- a/.gitattributes +++ b/.gitattributes @@ -1178,6 +1178,30 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Snapsho databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/TimeSeriesConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateMonitorRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateRefreshRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/BaseEnvironmentType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/CreateWorkspaceBaseEnvironmentOperation.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/CreateWorkspaceBaseEnvironmentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/DatabricksServiceExceptionWithDetailsProto.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/DefaultWorkspaceBaseEnvironment.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/DeleteWorkspaceBaseEnvironmentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/EnvironmentsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/EnvironmentsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/EnvironmentsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/ErrorCode.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/GetDefaultWorkspaceBaseEnvironmentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/GetOperationRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/GetWorkspaceBaseEnvironmentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/ListWorkspaceBaseEnvironmentsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/ListWorkspaceBaseEnvironmentsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/Operation.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/RefreshWorkspaceBaseEnvironmentOperation.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/RefreshWorkspaceBaseEnvironmentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/UpdateDefaultWorkspaceBaseEnvironmentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/UpdateWorkspaceBaseEnvironmentOperation.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/UpdateWorkspaceBaseEnvironmentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/WorkspaceBaseEnvironment.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/WorkspaceBaseEnvironmentCacheStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/WorkspaceBaseEnvironmentOperationMetadata.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlock.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Close.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Create.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 0eab3a65f..e4c5aab08 100755 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -16,4 +16,6 @@ * Add `alertOutput` field for `com.databricks.sdk.service.jobs.RunOutput`. * Add `alertTask` field for `com.databricks.sdk.service.jobs.RunTask`. * Add `alertTask` field for `com.databricks.sdk.service.jobs.SubmitTask`. -* Add `alertTask` field for `com.databricks.sdk.service.jobs.Task`. \ No newline at end of file +* Add `alertTask` field for `com.databricks.sdk.service.jobs.Task`. +* Add `com.databricks.sdk.service.environments` package. +* Add `workspaceClient.environments()` service. \ No newline at end of file diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index fae5f8e85..1ff744a47 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -107,6 +107,8 @@ import com.databricks.sdk.service.dataclassification.DataClassificationService; import com.databricks.sdk.service.dataquality.DataQualityAPI; import com.databricks.sdk.service.dataquality.DataQualityService; +import com.databricks.sdk.service.environments.EnvironmentsAPI; +import com.databricks.sdk.service.environments.EnvironmentsService; import com.databricks.sdk.service.files.DbfsService; import com.databricks.sdk.service.files.FilesAPI; import com.databricks.sdk.service.files.FilesService; @@ -303,6 +305,7 @@ public class WorkspaceClient { private DbfsExt dbfsAPI; private DbsqlPermissionsAPI dbsqlPermissionsAPI; private EntityTagAssignmentsAPI entityTagAssignmentsAPI; + private EnvironmentsAPI environmentsAPI; private ExperimentsAPI experimentsAPI; private ExternalLineageAPI externalLineageAPI; private ExternalLocationsAPI externalLocationsAPI; @@ -438,6 +441,7 @@ public WorkspaceClient(DatabricksConfig config) { dbfsAPI = new DbfsExt(apiClient); dbsqlPermissionsAPI = new DbsqlPermissionsAPI(apiClient); entityTagAssignmentsAPI = new EntityTagAssignmentsAPI(apiClient); + environmentsAPI = new EnvironmentsAPI(apiClient); experimentsAPI = new ExperimentsAPI(apiClient); externalLineageAPI = new ExternalLineageAPI(apiClient); externalLocationsAPI = new ExternalLocationsAPI(apiClient); @@ -905,6 +909,17 @@ public EntityTagAssignmentsAPI entityTagAssignments() { return entityTagAssignmentsAPI; } + /** + * APIs to manage environment resources. + * + *

The Environments API provides management capabilities for different types of environments + * including workspace-level base environments that define the environment version and + * dependencies to be used in serverless notebooks and jobs. + */ + public EnvironmentsAPI environments() { + return environmentsAPI; + } + /** * Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an * experiment. Each experiment lets you visualize, search, and compare runs, as well as download @@ -2563,6 +2578,17 @@ public WorkspaceClient withEntityTagAssignmentsAPI(EntityTagAssignmentsAPI entit return this; } + /** Replace the default EnvironmentsService with a custom implementation. */ + public WorkspaceClient withEnvironmentsImpl(EnvironmentsService environments) { + return this.withEnvironmentsAPI(new EnvironmentsAPI(environments)); + } + + /** Replace the default EnvironmentsAPI with a custom implementation. */ + public WorkspaceClient withEnvironmentsAPI(EnvironmentsAPI environments) { + this.environmentsAPI = environments; + return this; + } + /** Replace the default ExperimentsService with a custom implementation. */ public WorkspaceClient withExperimentsImpl(ExperimentsService experiments) { return this.withExperimentsAPI(new ExperimentsAPI(experiments)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/BaseEnvironmentType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/BaseEnvironmentType.java new file mode 100755 index 000000000..a2dc3e06a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/BaseEnvironmentType.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; + +/** If changed, also update estore/namespaces/defaultbaseenvironments/latest.proto */ +@Generated +public enum BaseEnvironmentType { + CPU, + GPU, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/CreateWorkspaceBaseEnvironmentOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/CreateWorkspaceBaseEnvironmentOperation.java new file mode 100755 index 000000000..7a164ba04 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/CreateWorkspaceBaseEnvironmentOperation.java @@ -0,0 +1,165 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.utils.SerDeUtils; +import com.databricks.sdk.service.common.lro.LroOptions; +import com.databricks.sdk.support.Generated; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.time.Duration; +import java.util.Optional; +import java.util.concurrent.TimeoutException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Wrapper for interacting with a long-running createWorkspaceBaseEnvironment operation. Provides + * methods to wait for completion, check status, cancel, and access metadata. + */ +@Generated +public class CreateWorkspaceBaseEnvironmentOperation { + private static final Logger LOG = + LoggerFactory.getLogger(CreateWorkspaceBaseEnvironmentOperation.class); + + private final EnvironmentsService impl; + private Operation operation; + private final ObjectMapper objectMapper; + + public CreateWorkspaceBaseEnvironmentOperation(EnvironmentsService impl, Operation operation) { + this.impl = impl; + this.operation = operation; + this.objectMapper = SerDeUtils.createMapper(); + } + + /** + * Wait for the operation to complete and return the resulting WorkspaceBaseEnvironment. Waits + * indefinitely if no timeout is specified. + * + * @return the created WorkspaceBaseEnvironment + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public WorkspaceBaseEnvironment waitForCompletion() throws TimeoutException { + return waitForCompletion(Optional.empty()); + } + + /** + * Wait for the operation to complete and return the resulting WorkspaceBaseEnvironment. + * + * @param options the options for configuring the wait behavior, can be empty for defaults + * @return the created WorkspaceBaseEnvironment + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public WorkspaceBaseEnvironment waitForCompletion(Optional options) + throws TimeoutException { + Optional timeout = options.flatMap(LroOptions::getTimeout); + long deadline = + timeout.isPresent() + ? System.currentTimeMillis() + timeout.get().toMillis() + : Long.MAX_VALUE; + String statusMessage = "polling operation..."; + int attempt = 1; + + while (System.currentTimeMillis() < deadline) { + // Refresh the operation state + refreshOperation(); + + if (operation.getDone() != null && operation.getDone()) { + // Operation completed, check for success or failure + if (operation.getError() != null) { + String errorMsg = "unknown error"; + if (operation.getError().getMessage() != null + && !operation.getError().getMessage().isEmpty()) { + errorMsg = operation.getError().getMessage(); + } + + if (operation.getError().getErrorCode() != null) { + errorMsg = String.format("[%s] %s", operation.getError().getErrorCode(), errorMsg); + } + + throw new DatabricksException("Operation failed: " + errorMsg); + } + + // Operation completed successfully, unmarshal response + if (operation.getResponse() == null) { + throw new DatabricksException("Operation completed but no response available"); + } + + try { + JsonNode responseJson = objectMapper.valueToTree(operation.getResponse()); + return objectMapper.treeToValue(responseJson, WorkspaceBaseEnvironment.class); + } catch (JsonProcessingException e) { + throw new DatabricksException( + "Failed to unmarshal workspaceBaseEnvironment response: " + e.getMessage(), e); + } + } + + // Operation still in progress, wait before polling again + String prefix = String.format("operation=%s", operation.getName()); + int sleep = Math.min(attempt, 10); // sleep 10s max per attempt + LOG.info("{}: operation in progress (sleeping ~{}s)", prefix, sleep); + + try { + Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new DatabricksException("Current thread was interrupted", e); + } + attempt++; + } + + String timeoutMessage = + timeout.isPresent() + ? String.format("Operation timed out after %s: %s", timeout.get(), statusMessage) + : String.format("Operation timed out: %s", statusMessage); + throw new TimeoutException(timeoutMessage); + } + + /** + * Get the operation name. + * + * @return the operation name + */ + public String getName() { + return operation.getName(); + } + + /** + * Get the operation metadata. + * + * @return the operation metadata, or null if not available + * @throws DatabricksException if the metadata cannot be deserialized + */ + public WorkspaceBaseEnvironmentOperationMetadata getMetadata() { + if (operation.getMetadata() == null) { + return null; + } + + try { + JsonNode metadataJson = objectMapper.valueToTree(operation.getMetadata()); + return objectMapper.treeToValue( + metadataJson, WorkspaceBaseEnvironmentOperationMetadata.class); + } catch (JsonProcessingException e) { + throw new DatabricksException("Failed to unmarshal operation metadata: " + e.getMessage(), e); + } + } + + /** + * Check if the operation is done. This method refreshes the operation state before checking. + * + * @return true if the operation is complete, false otherwise + * @throws DatabricksException if the status check fails + */ + public boolean isDone() { + refreshOperation(); + return operation.getDone() != null && operation.getDone(); + } + + /** Refresh the operation state by polling the server. */ + private void refreshOperation() { + operation = impl.getOperation(new GetOperationRequest().setName(operation.getName())); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/CreateWorkspaceBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/CreateWorkspaceBaseEnvironmentRequest.java new file mode 100755 index 000000000..fa5de1d9b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/CreateWorkspaceBaseEnvironmentRequest.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateWorkspaceBaseEnvironmentRequest { + /** + * A unique identifier for this request. A random UUID is recommended. This request is only + * idempotent if a request_id is provided. + */ + @JsonIgnore + @QueryParam("request_id") + private String requestId; + + /** Required. The workspace base environment to create. */ + @JsonProperty("workspace_base_environment") + private WorkspaceBaseEnvironment workspaceBaseEnvironment; + + /** + * The ID to use for the workspace base environment, which will become the final component of the + * resource name. This value should be 4-63 characters, and valid characters are /[a-z][0-9]-/. + */ + @JsonIgnore + @QueryParam("workspace_base_environment_id") + private String workspaceBaseEnvironmentId; + + public CreateWorkspaceBaseEnvironmentRequest setRequestId(String requestId) { + this.requestId = requestId; + return this; + } + + public String getRequestId() { + return requestId; + } + + public CreateWorkspaceBaseEnvironmentRequest setWorkspaceBaseEnvironment( + WorkspaceBaseEnvironment workspaceBaseEnvironment) { + this.workspaceBaseEnvironment = workspaceBaseEnvironment; + return this; + } + + public WorkspaceBaseEnvironment getWorkspaceBaseEnvironment() { + return workspaceBaseEnvironment; + } + + public CreateWorkspaceBaseEnvironmentRequest setWorkspaceBaseEnvironmentId( + String workspaceBaseEnvironmentId) { + this.workspaceBaseEnvironmentId = workspaceBaseEnvironmentId; + return this; + } + + public String getWorkspaceBaseEnvironmentId() { + return workspaceBaseEnvironmentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateWorkspaceBaseEnvironmentRequest that = (CreateWorkspaceBaseEnvironmentRequest) o; + return Objects.equals(requestId, that.requestId) + && Objects.equals(workspaceBaseEnvironment, that.workspaceBaseEnvironment) + && Objects.equals(workspaceBaseEnvironmentId, that.workspaceBaseEnvironmentId); + } + + @Override + public int hashCode() { + return Objects.hash(requestId, workspaceBaseEnvironment, workspaceBaseEnvironmentId); + } + + @Override + public String toString() { + return new ToStringer(CreateWorkspaceBaseEnvironmentRequest.class) + .add("requestId", requestId) + .add("workspaceBaseEnvironment", workspaceBaseEnvironment) + .add("workspaceBaseEnvironmentId", workspaceBaseEnvironmentId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/DatabricksServiceExceptionWithDetailsProto.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/DatabricksServiceExceptionWithDetailsProto.java new file mode 100755 index 000000000..b7e259c6e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/DatabricksServiceExceptionWithDetailsProto.java @@ -0,0 +1,92 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Databricks Error that is returned by all Databricks APIs. */ +@Generated +public class DatabricksServiceExceptionWithDetailsProto { + /** */ + @JsonProperty("details") + private Collection details; + + /** */ + @JsonProperty("error_code") + private ErrorCode errorCode; + + /** */ + @JsonProperty("message") + private String message; + + /** */ + @JsonProperty("stack_trace") + private String stackTrace; + + public DatabricksServiceExceptionWithDetailsProto setDetails(Collection details) { + this.details = details; + return this; + } + + public Collection getDetails() { + return details; + } + + public DatabricksServiceExceptionWithDetailsProto setErrorCode(ErrorCode errorCode) { + this.errorCode = errorCode; + return this; + } + + public ErrorCode getErrorCode() { + return errorCode; + } + + public DatabricksServiceExceptionWithDetailsProto setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public DatabricksServiceExceptionWithDetailsProto setStackTrace(String stackTrace) { + this.stackTrace = stackTrace; + return this; + } + + public String getStackTrace() { + return stackTrace; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatabricksServiceExceptionWithDetailsProto that = + (DatabricksServiceExceptionWithDetailsProto) o; + return Objects.equals(details, that.details) + && Objects.equals(errorCode, that.errorCode) + && Objects.equals(message, that.message) + && Objects.equals(stackTrace, that.stackTrace); + } + + @Override + public int hashCode() { + return Objects.hash(details, errorCode, message, stackTrace); + } + + @Override + public String toString() { + return new ToStringer(DatabricksServiceExceptionWithDetailsProto.class) + .add("details", details) + .add("errorCode", errorCode) + .add("message", message) + .add("stackTrace", stackTrace) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/DefaultWorkspaceBaseEnvironment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/DefaultWorkspaceBaseEnvironment.java new file mode 100755 index 000000000..57cecf59a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/DefaultWorkspaceBaseEnvironment.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * A singleton resource representing the default workspace base environment configuration. This + * resource contains the workspace base environments that are used as defaults for serverless + * notebooks and jobs in the workspace, for both CPU and GPU compute types. + */ +@Generated +public class DefaultWorkspaceBaseEnvironment { + /** + * The default workspace base environment for CPU compute. Format: + * workspace-base-environments/{workspace_base_environment} + */ + @JsonProperty("cpu_workspace_base_environment") + private String cpuWorkspaceBaseEnvironment; + + /** + * The default workspace base environment for GPU compute. Format: + * workspace-base-environments/{workspace_base_environment} + */ + @JsonProperty("gpu_workspace_base_environment") + private String gpuWorkspaceBaseEnvironment; + + /** The resource name of this singleton resource. Format: default-workspace-base-environment */ + @JsonProperty("name") + private String name; + + public DefaultWorkspaceBaseEnvironment setCpuWorkspaceBaseEnvironment( + String cpuWorkspaceBaseEnvironment) { + this.cpuWorkspaceBaseEnvironment = cpuWorkspaceBaseEnvironment; + return this; + } + + public String getCpuWorkspaceBaseEnvironment() { + return cpuWorkspaceBaseEnvironment; + } + + public DefaultWorkspaceBaseEnvironment setGpuWorkspaceBaseEnvironment( + String gpuWorkspaceBaseEnvironment) { + this.gpuWorkspaceBaseEnvironment = gpuWorkspaceBaseEnvironment; + return this; + } + + public String getGpuWorkspaceBaseEnvironment() { + return gpuWorkspaceBaseEnvironment; + } + + public DefaultWorkspaceBaseEnvironment setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DefaultWorkspaceBaseEnvironment that = (DefaultWorkspaceBaseEnvironment) o; + return Objects.equals(cpuWorkspaceBaseEnvironment, that.cpuWorkspaceBaseEnvironment) + && Objects.equals(gpuWorkspaceBaseEnvironment, that.gpuWorkspaceBaseEnvironment) + && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(cpuWorkspaceBaseEnvironment, gpuWorkspaceBaseEnvironment, name); + } + + @Override + public String toString() { + return new ToStringer(DefaultWorkspaceBaseEnvironment.class) + .add("cpuWorkspaceBaseEnvironment", cpuWorkspaceBaseEnvironment) + .add("gpuWorkspaceBaseEnvironment", gpuWorkspaceBaseEnvironment) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/DeleteWorkspaceBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/DeleteWorkspaceBaseEnvironmentRequest.java new file mode 100755 index 000000000..256c14b86 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/DeleteWorkspaceBaseEnvironmentRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteWorkspaceBaseEnvironmentRequest { + /** + * Required. The resource name of the workspace base environment to delete. Format: + * workspace-base-environments/{workspace_base_environment} + */ + @JsonIgnore private String name; + + public DeleteWorkspaceBaseEnvironmentRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteWorkspaceBaseEnvironmentRequest that = (DeleteWorkspaceBaseEnvironmentRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteWorkspaceBaseEnvironmentRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/EnvironmentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/EnvironmentsAPI.java new file mode 100755 index 000000000..14355e0b1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/EnvironmentsAPI.java @@ -0,0 +1,146 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * APIs to manage environment resources. + * + *

The Environments API provides management capabilities for different types of environments + * including workspace-level base environments that define the environment version and dependencies + * to be used in serverless notebooks and jobs. + */ +@Generated +public class EnvironmentsAPI { + private static final Logger LOG = LoggerFactory.getLogger(EnvironmentsAPI.class); + + private final EnvironmentsService impl; + + /** Regular-use constructor */ + public EnvironmentsAPI(ApiClient apiClient) { + impl = new EnvironmentsImpl(apiClient); + } + + /** Constructor for mocks */ + public EnvironmentsAPI(EnvironmentsService mock) { + impl = mock; + } + + /** + * Creates a new WorkspaceBaseEnvironment. This is a long-running operation. The operation will + * asynchronously generate a materialized environment to optimize dependency resolution and is + * only marked as done when the materialized environment has been successfully generated or has + * failed. + */ + public CreateWorkspaceBaseEnvironmentOperation createWorkspaceBaseEnvironment( + CreateWorkspaceBaseEnvironmentRequest request) { + Operation operation = impl.createWorkspaceBaseEnvironment(request); + return new CreateWorkspaceBaseEnvironmentOperation(impl, operation); + } + + public void deleteWorkspaceBaseEnvironment(String name) { + deleteWorkspaceBaseEnvironment(new DeleteWorkspaceBaseEnvironmentRequest().setName(name)); + } + + /** + * Deletes a WorkspaceBaseEnvironment. Deleting a base environment may impact linked notebooks and + * jobs. This operation is irreversible and should be performed only when you are certain the + * environment is no longer needed. + */ + public void deleteWorkspaceBaseEnvironment(DeleteWorkspaceBaseEnvironmentRequest request) { + impl.deleteWorkspaceBaseEnvironment(request); + } + + public DefaultWorkspaceBaseEnvironment getDefaultWorkspaceBaseEnvironment(String name) { + return getDefaultWorkspaceBaseEnvironment( + new GetDefaultWorkspaceBaseEnvironmentRequest().setName(name)); + } + + /** + * Gets the default WorkspaceBaseEnvironment configuration for the workspace. Returns the current + * default base environment settings for both CPU and GPU compute. + */ + public DefaultWorkspaceBaseEnvironment getDefaultWorkspaceBaseEnvironment( + GetDefaultWorkspaceBaseEnvironmentRequest request) { + return impl.getDefaultWorkspaceBaseEnvironment(request); + } + + public Operation getOperation(String name) { + return getOperation(new GetOperationRequest().setName(name)); + } + + /** + * Gets the status of a long-running operation. Clients can use this method to poll the operation + * result. + */ + public Operation getOperation(GetOperationRequest request) { + return impl.getOperation(request); + } + + public WorkspaceBaseEnvironment getWorkspaceBaseEnvironment(String name) { + return getWorkspaceBaseEnvironment(new GetWorkspaceBaseEnvironmentRequest().setName(name)); + } + + /** Retrieves a WorkspaceBaseEnvironment by its name. */ + public WorkspaceBaseEnvironment getWorkspaceBaseEnvironment( + GetWorkspaceBaseEnvironmentRequest request) { + return impl.getWorkspaceBaseEnvironment(request); + } + + /** Lists all WorkspaceBaseEnvironments in the workspace. */ + public Iterable listWorkspaceBaseEnvironments( + ListWorkspaceBaseEnvironmentsRequest request) { + return new Paginator<>( + request, + impl::listWorkspaceBaseEnvironments, + ListWorkspaceBaseEnvironmentsResponse::getWorkspaceBaseEnvironments, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + + /** + * Refreshes the materialized environment for a WorkspaceBaseEnvironment. This is a long-running + * operation. The operation will asynchronously regenerate the materialized environment and is + * only marked as done when the materialized environment has been successfully generated or has + * failed. The existing materialized environment remains available until it expires. + */ + public RefreshWorkspaceBaseEnvironmentOperation refreshWorkspaceBaseEnvironment( + RefreshWorkspaceBaseEnvironmentRequest request) { + Operation operation = impl.refreshWorkspaceBaseEnvironment(request); + return new RefreshWorkspaceBaseEnvironmentOperation(impl, operation); + } + + /** + * Updates the default WorkspaceBaseEnvironment configuration for the workspace. Sets the + * specified base environments as the workspace defaults for CPU and/or GPU compute. + */ + public DefaultWorkspaceBaseEnvironment updateDefaultWorkspaceBaseEnvironment( + UpdateDefaultWorkspaceBaseEnvironmentRequest request) { + return impl.updateDefaultWorkspaceBaseEnvironment(request); + } + + /** + * Updates an existing WorkspaceBaseEnvironment. This is a long-running operation. The operation + * will asynchronously regenerate the materialized environment and is only marked as done when the + * materialized environment has been successfully generated or has failed. The existing + * materialized environment remains available until it expires. + */ + public UpdateWorkspaceBaseEnvironmentOperation updateWorkspaceBaseEnvironment( + UpdateWorkspaceBaseEnvironmentRequest request) { + Operation operation = impl.updateWorkspaceBaseEnvironment(request); + return new UpdateWorkspaceBaseEnvironmentOperation(impl, operation); + } + + public EnvironmentsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/EnvironmentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/EnvironmentsImpl.java new file mode 100755 index 000000000..a3dcf16e0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/EnvironmentsImpl.java @@ -0,0 +1,187 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.support.Generated; +import java.io.IOException; +import java.util.UUID; + +/** Package-local implementation of Environments */ +@Generated +class EnvironmentsImpl implements EnvironmentsService { + private final ApiClient apiClient; + + public EnvironmentsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public Operation createWorkspaceBaseEnvironment(CreateWorkspaceBaseEnvironmentRequest request) { + String path = "/api/environments/v1/workspace-base-environments"; + try { + Request req = + new Request("POST", path, apiClient.serialize(request.getWorkspaceBaseEnvironment())); + + if (request.getRequestId() == null || request.getRequestId().isEmpty()) { + request.setRequestId(UUID.randomUUID().toString()); + } + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, Operation.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public void deleteWorkspaceBaseEnvironment(DeleteWorkspaceBaseEnvironmentRequest request) { + String path = String.format("/api/environments/v1/%s", request.getName()); + try { + Request req = new Request("DELETE", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DefaultWorkspaceBaseEnvironment getDefaultWorkspaceBaseEnvironment( + GetDefaultWorkspaceBaseEnvironmentRequest request) { + String path = String.format("/api/environments/v1/%s", request.getName()); + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, DefaultWorkspaceBaseEnvironment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Operation getOperation(GetOperationRequest request) { + String path = String.format("/api/environments/v1/%s", request.getName()); + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, Operation.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public WorkspaceBaseEnvironment getWorkspaceBaseEnvironment( + GetWorkspaceBaseEnvironmentRequest request) { + String path = String.format("/api/environments/v1/%s", request.getName()); + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, WorkspaceBaseEnvironment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public ListWorkspaceBaseEnvironmentsResponse listWorkspaceBaseEnvironments( + ListWorkspaceBaseEnvironmentsRequest request) { + String path = "/api/environments/v1/workspace-base-environments"; + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, ListWorkspaceBaseEnvironmentsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Operation refreshWorkspaceBaseEnvironment(RefreshWorkspaceBaseEnvironmentRequest request) { + String path = String.format("/api/environments/v1/%s/refresh", request.getName()); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, Operation.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public DefaultWorkspaceBaseEnvironment updateDefaultWorkspaceBaseEnvironment( + UpdateDefaultWorkspaceBaseEnvironmentRequest request) { + String path = String.format("/api/environments/v1/%s", request.getName()); + try { + Request req = + new Request( + "PATCH", path, apiClient.serialize(request.getDefaultWorkspaceBaseEnvironment())); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, DefaultWorkspaceBaseEnvironment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + + @Override + public Operation updateWorkspaceBaseEnvironment(UpdateWorkspaceBaseEnvironmentRequest request) { + String path = String.format("/api/environments/v1/%s", request.getName()); + try { + Request req = + new Request("PATCH", path, apiClient.serialize(request.getWorkspaceBaseEnvironment())); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, Operation.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/EnvironmentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/EnvironmentsService.java new file mode 100755 index 000000000..3016ef103 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/EnvironmentsService.java @@ -0,0 +1,81 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; + +/** + * APIs to manage environment resources. + * + *

The Environments API provides management capabilities for different types of environments + * including workspace-level base environments that define the environment version and dependencies + * to be used in serverless notebooks and jobs. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface EnvironmentsService { + /** + * Creates a new WorkspaceBaseEnvironment. This is a long-running operation. The operation will + * asynchronously generate a materialized environment to optimize dependency resolution and is + * only marked as done when the materialized environment has been successfully generated or has + * failed. + */ + Operation createWorkspaceBaseEnvironment( + CreateWorkspaceBaseEnvironmentRequest createWorkspaceBaseEnvironmentRequest); + + /** + * Deletes a WorkspaceBaseEnvironment. Deleting a base environment may impact linked notebooks and + * jobs. This operation is irreversible and should be performed only when you are certain the + * environment is no longer needed. + */ + void deleteWorkspaceBaseEnvironment( + DeleteWorkspaceBaseEnvironmentRequest deleteWorkspaceBaseEnvironmentRequest); + + /** + * Gets the default WorkspaceBaseEnvironment configuration for the workspace. Returns the current + * default base environment settings for both CPU and GPU compute. + */ + DefaultWorkspaceBaseEnvironment getDefaultWorkspaceBaseEnvironment( + GetDefaultWorkspaceBaseEnvironmentRequest getDefaultWorkspaceBaseEnvironmentRequest); + + /** + * Gets the status of a long-running operation. Clients can use this method to poll the operation + * result. + */ + Operation getOperation(GetOperationRequest getOperationRequest); + + /** Retrieves a WorkspaceBaseEnvironment by its name. */ + WorkspaceBaseEnvironment getWorkspaceBaseEnvironment( + GetWorkspaceBaseEnvironmentRequest getWorkspaceBaseEnvironmentRequest); + + /** Lists all WorkspaceBaseEnvironments in the workspace. */ + ListWorkspaceBaseEnvironmentsResponse listWorkspaceBaseEnvironments( + ListWorkspaceBaseEnvironmentsRequest listWorkspaceBaseEnvironmentsRequest); + + /** + * Refreshes the materialized environment for a WorkspaceBaseEnvironment. This is a long-running + * operation. The operation will asynchronously regenerate the materialized environment and is + * only marked as done when the materialized environment has been successfully generated or has + * failed. The existing materialized environment remains available until it expires. + */ + Operation refreshWorkspaceBaseEnvironment( + RefreshWorkspaceBaseEnvironmentRequest refreshWorkspaceBaseEnvironmentRequest); + + /** + * Updates the default WorkspaceBaseEnvironment configuration for the workspace. Sets the + * specified base environments as the workspace defaults for CPU and/or GPU compute. + */ + DefaultWorkspaceBaseEnvironment updateDefaultWorkspaceBaseEnvironment( + UpdateDefaultWorkspaceBaseEnvironmentRequest updateDefaultWorkspaceBaseEnvironmentRequest); + + /** + * Updates an existing WorkspaceBaseEnvironment. This is a long-running operation. The operation + * will asynchronously regenerate the materialized environment and is only marked as done when the + * materialized environment has been successfully generated or has failed. The existing + * materialized environment remains available until it expires. + */ + Operation updateWorkspaceBaseEnvironment( + UpdateWorkspaceBaseEnvironmentRequest updateWorkspaceBaseEnvironmentRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/ErrorCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/ErrorCode.java new file mode 100755 index 000000000..4a748b7c2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/ErrorCode.java @@ -0,0 +1,91 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; + +/** Error codes returned by Databricks APIs to indicate specific failure conditions. */ +@Generated +public enum ErrorCode { + ABORTED, + ALREADY_EXISTS, + BAD_REQUEST, + CANCELLED, + CATALOG_ALREADY_EXISTS, + CATALOG_DOES_NOT_EXIST, + CATALOG_NOT_EMPTY, + COULD_NOT_ACQUIRE_LOCK, + CUSTOMER_UNAUTHORIZED, + DAC_ALREADY_EXISTS, + DAC_DOES_NOT_EXIST, + DATA_LOSS, + DEADLINE_EXCEEDED, + DEPLOYMENT_TIMEOUT, + DIRECTORY_NOT_EMPTY, + DIRECTORY_PROTECTED, + DRY_RUN_FAILED, + ENDPOINT_NOT_FOUND, + EXTERNAL_LOCATION_ALREADY_EXISTS, + EXTERNAL_LOCATION_DOES_NOT_EXIST, + FEATURE_DISABLED, + GIT_CONFLICT, + GIT_REMOTE_ERROR, + GIT_SENSITIVE_TOKEN_DETECTED, + GIT_UNKNOWN_REF, + GIT_URL_NOT_ON_ALLOW_LIST, + INSECURE_PARTNER_RESPONSE, + INTERNAL_ERROR, + INVALID_PARAMETER_VALUE, + INVALID_STATE, + INVALID_STATE_TRANSITION, + IO_ERROR, + IPYNB_FILE_IN_REPO, + MALFORMED_PARTNER_RESPONSE, + MALFORMED_REQUEST, + MANAGED_RESOURCE_GROUP_DOES_NOT_EXIST, + MAX_BLOCK_SIZE_EXCEEDED, + MAX_CHILD_NODE_SIZE_EXCEEDED, + MAX_LIST_SIZE_EXCEEDED, + MAX_NOTEBOOK_SIZE_EXCEEDED, + MAX_READ_SIZE_EXCEEDED, + METASTORE_ALREADY_EXISTS, + METASTORE_DOES_NOT_EXIST, + METASTORE_NOT_EMPTY, + NOT_FOUND, + NOT_IMPLEMENTED, + PARTIAL_DELETE, + PERMISSION_DENIED, + PERMISSION_NOT_PROPAGATED, + PRINCIPAL_DOES_NOT_EXIST, + PROJECTS_OPERATION_TIMEOUT, + PROVIDER_ALREADY_EXISTS, + PROVIDER_DOES_NOT_EXIST, + PROVIDER_SHARE_NOT_ACCESSIBLE, + QUOTA_EXCEEDED, + RECIPIENT_ALREADY_EXISTS, + RECIPIENT_DOES_NOT_EXIST, + REQUEST_LIMIT_EXCEEDED, + RESOURCE_ALREADY_EXISTS, + RESOURCE_CONFLICT, + RESOURCE_DOES_NOT_EXIST, + RESOURCE_EXHAUSTED, + RESOURCE_LIMIT_EXCEEDED, + SCHEMA_ALREADY_EXISTS, + SCHEMA_DOES_NOT_EXIST, + SCHEMA_NOT_EMPTY, + SEARCH_QUERY_TOO_LONG, + SEARCH_QUERY_TOO_SHORT, + SERVICE_UNDER_MAINTENANCE, + SHARE_ALREADY_EXISTS, + SHARE_DOES_NOT_EXIST, + STORAGE_CREDENTIAL_ALREADY_EXISTS, + STORAGE_CREDENTIAL_DOES_NOT_EXIST, + TABLE_ALREADY_EXISTS, + TABLE_DOES_NOT_EXIST, + TEMPORARILY_UNAVAILABLE, + UNAUTHENTICATED, + UNAVAILABLE, + UNKNOWN, + UNPARSEABLE_HTTP_ERROR, + WORKSPACE_TEMPORARILY_UNAVAILABLE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/GetDefaultWorkspaceBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/GetDefaultWorkspaceBaseEnvironmentRequest.java new file mode 100755 index 000000000..8d72396b7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/GetDefaultWorkspaceBaseEnvironmentRequest.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetDefaultWorkspaceBaseEnvironmentRequest { + /** + * A static resource name of the default workspace base environment. Format: + * default-workspace-base-environment + */ + @JsonIgnore private String name; + + public GetDefaultWorkspaceBaseEnvironmentRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDefaultWorkspaceBaseEnvironmentRequest that = (GetDefaultWorkspaceBaseEnvironmentRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetDefaultWorkspaceBaseEnvironmentRequest.class) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/GetOperationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/GetOperationRequest.java new file mode 100755 index 000000000..1e83af316 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/GetOperationRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetOperationRequest { + /** The name of the operation resource. */ + @JsonIgnore private String name; + + public GetOperationRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetOperationRequest that = (GetOperationRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetOperationRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/GetWorkspaceBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/GetWorkspaceBaseEnvironmentRequest.java new file mode 100755 index 000000000..c0810418a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/GetWorkspaceBaseEnvironmentRequest.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetWorkspaceBaseEnvironmentRequest { + /** + * Required. The resource name of the workspace base environment to retrieve. Format: + * workspace-base-environments/{workspace_base_environment} + */ + @JsonIgnore private String name; + + public GetWorkspaceBaseEnvironmentRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetWorkspaceBaseEnvironmentRequest that = (GetWorkspaceBaseEnvironmentRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(GetWorkspaceBaseEnvironmentRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/ListWorkspaceBaseEnvironmentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/ListWorkspaceBaseEnvironmentsRequest.java new file mode 100755 index 000000000..ce3dec07a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/ListWorkspaceBaseEnvironmentsRequest.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListWorkspaceBaseEnvironmentsRequest { + /** The maximum number of environments to return per page. Default is 1000. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** Page token for pagination. Received from a previous ListWorkspaceBaseEnvironments call. */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListWorkspaceBaseEnvironmentsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListWorkspaceBaseEnvironmentsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListWorkspaceBaseEnvironmentsRequest that = (ListWorkspaceBaseEnvironmentsRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListWorkspaceBaseEnvironmentsRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/ListWorkspaceBaseEnvironmentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/ListWorkspaceBaseEnvironmentsResponse.java new file mode 100755 index 000000000..a2b15352a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/ListWorkspaceBaseEnvironmentsResponse.java @@ -0,0 +1,62 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Response message for ListWorkspaceBaseEnvironments. */ +@Generated +public class ListWorkspaceBaseEnvironmentsResponse { + /** Token to retrieve the next page of results. Empty if there are no more results. */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** The list of workspace base environments. */ + @JsonProperty("workspace_base_environments") + private Collection workspaceBaseEnvironments; + + public ListWorkspaceBaseEnvironmentsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListWorkspaceBaseEnvironmentsResponse setWorkspaceBaseEnvironments( + Collection workspaceBaseEnvironments) { + this.workspaceBaseEnvironments = workspaceBaseEnvironments; + return this; + } + + public Collection getWorkspaceBaseEnvironments() { + return workspaceBaseEnvironments; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListWorkspaceBaseEnvironmentsResponse that = (ListWorkspaceBaseEnvironmentsResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(workspaceBaseEnvironments, that.workspaceBaseEnvironments); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, workspaceBaseEnvironments); + } + + @Override + public String toString() { + return new ToStringer(ListWorkspaceBaseEnvironmentsResponse.class) + .add("nextPageToken", nextPageToken) + .add("workspaceBaseEnvironments", workspaceBaseEnvironments) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/Operation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/Operation.java new file mode 100755 index 000000000..bb01510f4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/Operation.java @@ -0,0 +1,116 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** This resource represents a long-running operation that is the result of a network API call. */ +@Generated +public class Operation { + /** + * If the value is `false`, it means the operation is still in progress. If `true`, the operation + * is completed, and either `error` or `response` is available. + */ + @JsonProperty("done") + private Boolean done; + + /** The error result of the operation in case of failure or cancellation. */ + @JsonProperty("error") + private DatabricksServiceExceptionWithDetailsProto error; + + /** + * Service-specific metadata associated with the operation. It typically contains progress + * information and common metadata such as create time. Some services might not provide such + * metadata. + */ + @JsonProperty("metadata") + private Object metadata; + + /** + * The server-assigned name, which is only unique within the same service that originally returns + * it. If you use the default HTTP mapping, the `name` should be a resource name ending with + * `operations/{unique_id}`. + */ + @JsonProperty("name") + private String name; + + /** The normal, successful response of the operation. */ + @JsonProperty("response") + private Object response; + + public Operation setDone(Boolean done) { + this.done = done; + return this; + } + + public Boolean getDone() { + return done; + } + + public Operation setError(DatabricksServiceExceptionWithDetailsProto error) { + this.error = error; + return this; + } + + public DatabricksServiceExceptionWithDetailsProto getError() { + return error; + } + + public Operation setMetadata(Object metadata) { + this.metadata = metadata; + return this; + } + + public Object getMetadata() { + return metadata; + } + + public Operation setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public Operation setResponse(Object response) { + this.response = response; + return this; + } + + public Object getResponse() { + return response; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Operation that = (Operation) o; + return Objects.equals(done, that.done) + && Objects.equals(error, that.error) + && Objects.equals(metadata, that.metadata) + && Objects.equals(name, that.name) + && Objects.equals(response, that.response); + } + + @Override + public int hashCode() { + return Objects.hash(done, error, metadata, name, response); + } + + @Override + public String toString() { + return new ToStringer(Operation.class) + .add("done", done) + .add("error", error) + .add("metadata", metadata) + .add("name", name) + .add("response", response) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/RefreshWorkspaceBaseEnvironmentOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/RefreshWorkspaceBaseEnvironmentOperation.java new file mode 100755 index 000000000..52034f985 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/RefreshWorkspaceBaseEnvironmentOperation.java @@ -0,0 +1,165 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.utils.SerDeUtils; +import com.databricks.sdk.service.common.lro.LroOptions; +import com.databricks.sdk.support.Generated; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.time.Duration; +import java.util.Optional; +import java.util.concurrent.TimeoutException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Wrapper for interacting with a long-running refreshWorkspaceBaseEnvironment operation. Provides + * methods to wait for completion, check status, cancel, and access metadata. + */ +@Generated +public class RefreshWorkspaceBaseEnvironmentOperation { + private static final Logger LOG = + LoggerFactory.getLogger(RefreshWorkspaceBaseEnvironmentOperation.class); + + private final EnvironmentsService impl; + private Operation operation; + private final ObjectMapper objectMapper; + + public RefreshWorkspaceBaseEnvironmentOperation(EnvironmentsService impl, Operation operation) { + this.impl = impl; + this.operation = operation; + this.objectMapper = SerDeUtils.createMapper(); + } + + /** + * Wait for the operation to complete and return the resulting WorkspaceBaseEnvironment. Waits + * indefinitely if no timeout is specified. + * + * @return the created WorkspaceBaseEnvironment + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public WorkspaceBaseEnvironment waitForCompletion() throws TimeoutException { + return waitForCompletion(Optional.empty()); + } + + /** + * Wait for the operation to complete and return the resulting WorkspaceBaseEnvironment. + * + * @param options the options for configuring the wait behavior, can be empty for defaults + * @return the created WorkspaceBaseEnvironment + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public WorkspaceBaseEnvironment waitForCompletion(Optional options) + throws TimeoutException { + Optional timeout = options.flatMap(LroOptions::getTimeout); + long deadline = + timeout.isPresent() + ? System.currentTimeMillis() + timeout.get().toMillis() + : Long.MAX_VALUE; + String statusMessage = "polling operation..."; + int attempt = 1; + + while (System.currentTimeMillis() < deadline) { + // Refresh the operation state + refreshOperation(); + + if (operation.getDone() != null && operation.getDone()) { + // Operation completed, check for success or failure + if (operation.getError() != null) { + String errorMsg = "unknown error"; + if (operation.getError().getMessage() != null + && !operation.getError().getMessage().isEmpty()) { + errorMsg = operation.getError().getMessage(); + } + + if (operation.getError().getErrorCode() != null) { + errorMsg = String.format("[%s] %s", operation.getError().getErrorCode(), errorMsg); + } + + throw new DatabricksException("Operation failed: " + errorMsg); + } + + // Operation completed successfully, unmarshal response + if (operation.getResponse() == null) { + throw new DatabricksException("Operation completed but no response available"); + } + + try { + JsonNode responseJson = objectMapper.valueToTree(operation.getResponse()); + return objectMapper.treeToValue(responseJson, WorkspaceBaseEnvironment.class); + } catch (JsonProcessingException e) { + throw new DatabricksException( + "Failed to unmarshal workspaceBaseEnvironment response: " + e.getMessage(), e); + } + } + + // Operation still in progress, wait before polling again + String prefix = String.format("operation=%s", operation.getName()); + int sleep = Math.min(attempt, 10); // sleep 10s max per attempt + LOG.info("{}: operation in progress (sleeping ~{}s)", prefix, sleep); + + try { + Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new DatabricksException("Current thread was interrupted", e); + } + attempt++; + } + + String timeoutMessage = + timeout.isPresent() + ? String.format("Operation timed out after %s: %s", timeout.get(), statusMessage) + : String.format("Operation timed out: %s", statusMessage); + throw new TimeoutException(timeoutMessage); + } + + /** + * Get the operation name. + * + * @return the operation name + */ + public String getName() { + return operation.getName(); + } + + /** + * Get the operation metadata. + * + * @return the operation metadata, or null if not available + * @throws DatabricksException if the metadata cannot be deserialized + */ + public WorkspaceBaseEnvironmentOperationMetadata getMetadata() { + if (operation.getMetadata() == null) { + return null; + } + + try { + JsonNode metadataJson = objectMapper.valueToTree(operation.getMetadata()); + return objectMapper.treeToValue( + metadataJson, WorkspaceBaseEnvironmentOperationMetadata.class); + } catch (JsonProcessingException e) { + throw new DatabricksException("Failed to unmarshal operation metadata: " + e.getMessage(), e); + } + } + + /** + * Check if the operation is done. This method refreshes the operation state before checking. + * + * @return true if the operation is complete, false otherwise + * @throws DatabricksException if the status check fails + */ + public boolean isDone() { + refreshOperation(); + return operation.getDone() != null && operation.getDone(); + } + + /** Refresh the operation state by polling the server. */ + private void refreshOperation() { + operation = impl.getOperation(new GetOperationRequest().setName(operation.getName())); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/RefreshWorkspaceBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/RefreshWorkspaceBaseEnvironmentRequest.java new file mode 100755 index 000000000..bf415eb79 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/RefreshWorkspaceBaseEnvironmentRequest.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Request message for RefreshWorkspaceBaseEnvironments. */ +@Generated +public class RefreshWorkspaceBaseEnvironmentRequest { + /** + * Required. The resource name of the workspace base environment to delete. Format: + * workspace-base-environments/{workspace_base_environment} + */ + @JsonIgnore private String name; + + public RefreshWorkspaceBaseEnvironmentRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RefreshWorkspaceBaseEnvironmentRequest that = (RefreshWorkspaceBaseEnvironmentRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(RefreshWorkspaceBaseEnvironmentRequest.class) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/UpdateDefaultWorkspaceBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/UpdateDefaultWorkspaceBaseEnvironmentRequest.java new file mode 100755 index 000000000..20ea3edd2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/UpdateDefaultWorkspaceBaseEnvironmentRequest.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.FieldMask; +import java.util.Objects; + +@Generated +public class UpdateDefaultWorkspaceBaseEnvironmentRequest { + /** Required. The default workspace base environment configuration to update. */ + @JsonProperty("default_workspace_base_environment") + private DefaultWorkspaceBaseEnvironment defaultWorkspaceBaseEnvironment; + + /** The resource name of this singleton resource. Format: default-workspace-base-environment */ + @JsonIgnore private String name; + + /** + * Field mask specifying which fields to update. Use comma as the separator for multiple fields + * (no space). The special value '*' indicates that all fields should be updated (full + * replacement). Valid field paths: cpu_workspace_base_environment, gpu_workspace_base_environment + * + *

To unset one or both defaults, include the field path(s) in the mask and omit them from the + * request body. To unset both, you must list both paths explicitly — the wildcard '*' cannot be + * used to unset fields. + */ + @JsonIgnore + @QueryParam("update_mask") + private FieldMask updateMask; + + public UpdateDefaultWorkspaceBaseEnvironmentRequest setDefaultWorkspaceBaseEnvironment( + DefaultWorkspaceBaseEnvironment defaultWorkspaceBaseEnvironment) { + this.defaultWorkspaceBaseEnvironment = defaultWorkspaceBaseEnvironment; + return this; + } + + public DefaultWorkspaceBaseEnvironment getDefaultWorkspaceBaseEnvironment() { + return defaultWorkspaceBaseEnvironment; + } + + public UpdateDefaultWorkspaceBaseEnvironmentRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateDefaultWorkspaceBaseEnvironmentRequest setUpdateMask(FieldMask updateMask) { + this.updateMask = updateMask; + return this; + } + + public FieldMask getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDefaultWorkspaceBaseEnvironmentRequest that = + (UpdateDefaultWorkspaceBaseEnvironmentRequest) o; + return Objects.equals(defaultWorkspaceBaseEnvironment, that.defaultWorkspaceBaseEnvironment) + && Objects.equals(name, that.name) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(defaultWorkspaceBaseEnvironment, name, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateDefaultWorkspaceBaseEnvironmentRequest.class) + .add("defaultWorkspaceBaseEnvironment", defaultWorkspaceBaseEnvironment) + .add("name", name) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/UpdateWorkspaceBaseEnvironmentOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/UpdateWorkspaceBaseEnvironmentOperation.java new file mode 100755 index 000000000..19125a238 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/UpdateWorkspaceBaseEnvironmentOperation.java @@ -0,0 +1,165 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.utils.SerDeUtils; +import com.databricks.sdk.service.common.lro.LroOptions; +import com.databricks.sdk.support.Generated; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.time.Duration; +import java.util.Optional; +import java.util.concurrent.TimeoutException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Wrapper for interacting with a long-running updateWorkspaceBaseEnvironment operation. Provides + * methods to wait for completion, check status, cancel, and access metadata. + */ +@Generated +public class UpdateWorkspaceBaseEnvironmentOperation { + private static final Logger LOG = + LoggerFactory.getLogger(UpdateWorkspaceBaseEnvironmentOperation.class); + + private final EnvironmentsService impl; + private Operation operation; + private final ObjectMapper objectMapper; + + public UpdateWorkspaceBaseEnvironmentOperation(EnvironmentsService impl, Operation operation) { + this.impl = impl; + this.operation = operation; + this.objectMapper = SerDeUtils.createMapper(); + } + + /** + * Wait for the operation to complete and return the resulting WorkspaceBaseEnvironment. Waits + * indefinitely if no timeout is specified. + * + * @return the created WorkspaceBaseEnvironment + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public WorkspaceBaseEnvironment waitForCompletion() throws TimeoutException { + return waitForCompletion(Optional.empty()); + } + + /** + * Wait for the operation to complete and return the resulting WorkspaceBaseEnvironment. + * + * @param options the options for configuring the wait behavior, can be empty for defaults + * @return the created WorkspaceBaseEnvironment + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public WorkspaceBaseEnvironment waitForCompletion(Optional options) + throws TimeoutException { + Optional timeout = options.flatMap(LroOptions::getTimeout); + long deadline = + timeout.isPresent() + ? System.currentTimeMillis() + timeout.get().toMillis() + : Long.MAX_VALUE; + String statusMessage = "polling operation..."; + int attempt = 1; + + while (System.currentTimeMillis() < deadline) { + // Refresh the operation state + refreshOperation(); + + if (operation.getDone() != null && operation.getDone()) { + // Operation completed, check for success or failure + if (operation.getError() != null) { + String errorMsg = "unknown error"; + if (operation.getError().getMessage() != null + && !operation.getError().getMessage().isEmpty()) { + errorMsg = operation.getError().getMessage(); + } + + if (operation.getError().getErrorCode() != null) { + errorMsg = String.format("[%s] %s", operation.getError().getErrorCode(), errorMsg); + } + + throw new DatabricksException("Operation failed: " + errorMsg); + } + + // Operation completed successfully, unmarshal response + if (operation.getResponse() == null) { + throw new DatabricksException("Operation completed but no response available"); + } + + try { + JsonNode responseJson = objectMapper.valueToTree(operation.getResponse()); + return objectMapper.treeToValue(responseJson, WorkspaceBaseEnvironment.class); + } catch (JsonProcessingException e) { + throw new DatabricksException( + "Failed to unmarshal workspaceBaseEnvironment response: " + e.getMessage(), e); + } + } + + // Operation still in progress, wait before polling again + String prefix = String.format("operation=%s", operation.getName()); + int sleep = Math.min(attempt, 10); // sleep 10s max per attempt + LOG.info("{}: operation in progress (sleeping ~{}s)", prefix, sleep); + + try { + Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new DatabricksException("Current thread was interrupted", e); + } + attempt++; + } + + String timeoutMessage = + timeout.isPresent() + ? String.format("Operation timed out after %s: %s", timeout.get(), statusMessage) + : String.format("Operation timed out: %s", statusMessage); + throw new TimeoutException(timeoutMessage); + } + + /** + * Get the operation name. + * + * @return the operation name + */ + public String getName() { + return operation.getName(); + } + + /** + * Get the operation metadata. + * + * @return the operation metadata, or null if not available + * @throws DatabricksException if the metadata cannot be deserialized + */ + public WorkspaceBaseEnvironmentOperationMetadata getMetadata() { + if (operation.getMetadata() == null) { + return null; + } + + try { + JsonNode metadataJson = objectMapper.valueToTree(operation.getMetadata()); + return objectMapper.treeToValue( + metadataJson, WorkspaceBaseEnvironmentOperationMetadata.class); + } catch (JsonProcessingException e) { + throw new DatabricksException("Failed to unmarshal operation metadata: " + e.getMessage(), e); + } + } + + /** + * Check if the operation is done. This method refreshes the operation state before checking. + * + * @return true if the operation is complete, false otherwise + * @throws DatabricksException if the status check fails + */ + public boolean isDone() { + refreshOperation(); + return operation.getDone() != null && operation.getDone(); + } + + /** Refresh the operation state by polling the server. */ + private void refreshOperation() { + operation = impl.getOperation(new GetOperationRequest().setName(operation.getName())); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/UpdateWorkspaceBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/UpdateWorkspaceBaseEnvironmentRequest.java new file mode 100755 index 000000000..7cdc5a7eb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/UpdateWorkspaceBaseEnvironmentRequest.java @@ -0,0 +1,63 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateWorkspaceBaseEnvironmentRequest { + /** */ + @JsonIgnore private String name; + + /** + * Required. The workspace base environment with updated fields. The name field is used to + * identify the environment to update. + */ + @JsonProperty("workspace_base_environment") + private WorkspaceBaseEnvironment workspaceBaseEnvironment; + + public UpdateWorkspaceBaseEnvironmentRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public UpdateWorkspaceBaseEnvironmentRequest setWorkspaceBaseEnvironment( + WorkspaceBaseEnvironment workspaceBaseEnvironment) { + this.workspaceBaseEnvironment = workspaceBaseEnvironment; + return this; + } + + public WorkspaceBaseEnvironment getWorkspaceBaseEnvironment() { + return workspaceBaseEnvironment; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateWorkspaceBaseEnvironmentRequest that = (UpdateWorkspaceBaseEnvironmentRequest) o; + return Objects.equals(name, that.name) + && Objects.equals(workspaceBaseEnvironment, that.workspaceBaseEnvironment); + } + + @Override + public int hashCode() { + return Objects.hash(name, workspaceBaseEnvironment); + } + + @Override + public String toString() { + return new ToStringer(UpdateWorkspaceBaseEnvironmentRequest.class) + .add("name", name) + .add("workspaceBaseEnvironment", workspaceBaseEnvironment) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/WorkspaceBaseEnvironment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/WorkspaceBaseEnvironment.java new file mode 100755 index 000000000..7c21fccb6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/WorkspaceBaseEnvironment.java @@ -0,0 +1,213 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.protobuf.Timestamp; +import java.util.Objects; + +/** + * A WorkspaceBaseEnvironment defines a workspace-level environment configuration consisting of an + * environment version and a list of dependencies. + */ +@Generated +public class WorkspaceBaseEnvironment { + /** The type of base environment (CPU or GPU). */ + @JsonProperty("base_environment_type") + private BaseEnvironmentType baseEnvironmentType; + + /** Timestamp when the environment was created. */ + @JsonProperty("create_time") + private Timestamp createTime; + + /** User ID of the creator. */ + @JsonProperty("creator_user_id") + private String creatorUserId; + + /** Human-readable display name for the workspace base environment. */ + @JsonProperty("display_name") + private String displayName; + + /** The WSFS or UC Volumes path to the environment YAML file. */ + @JsonProperty("filepath") + private String filepath; + + /** Whether this is the default environment for the workspace. */ + @JsonProperty("is_default") + private Boolean isDefault; + + /** User ID of the last user who updated the environment. */ + @JsonProperty("last_updated_user_id") + private String lastUpdatedUserId; + + /** Status message providing additional details about the environment status. */ + @JsonProperty("message") + private String message; + + /** + * The resource name of the workspace base environment. Format: + * workspace-base-environments/{workspace-base-environment} + */ + @JsonProperty("name") + private String name; + + /** The status of the materialized workspace base environment. */ + @JsonProperty("status") + private WorkspaceBaseEnvironmentCacheStatus status; + + /** Timestamp when the environment was last updated. */ + @JsonProperty("update_time") + private Timestamp updateTime; + + public WorkspaceBaseEnvironment setBaseEnvironmentType(BaseEnvironmentType baseEnvironmentType) { + this.baseEnvironmentType = baseEnvironmentType; + return this; + } + + public BaseEnvironmentType getBaseEnvironmentType() { + return baseEnvironmentType; + } + + public WorkspaceBaseEnvironment setCreateTime(Timestamp createTime) { + this.createTime = createTime; + return this; + } + + public Timestamp getCreateTime() { + return createTime; + } + + public WorkspaceBaseEnvironment setCreatorUserId(String creatorUserId) { + this.creatorUserId = creatorUserId; + return this; + } + + public String getCreatorUserId() { + return creatorUserId; + } + + public WorkspaceBaseEnvironment setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public WorkspaceBaseEnvironment setFilepath(String filepath) { + this.filepath = filepath; + return this; + } + + public String getFilepath() { + return filepath; + } + + public WorkspaceBaseEnvironment setIsDefault(Boolean isDefault) { + this.isDefault = isDefault; + return this; + } + + public Boolean getIsDefault() { + return isDefault; + } + + public WorkspaceBaseEnvironment setLastUpdatedUserId(String lastUpdatedUserId) { + this.lastUpdatedUserId = lastUpdatedUserId; + return this; + } + + public String getLastUpdatedUserId() { + return lastUpdatedUserId; + } + + public WorkspaceBaseEnvironment setMessage(String message) { + this.message = message; + return this; + } + + public String getMessage() { + return message; + } + + public WorkspaceBaseEnvironment setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public WorkspaceBaseEnvironment setStatus(WorkspaceBaseEnvironmentCacheStatus status) { + this.status = status; + return this; + } + + public WorkspaceBaseEnvironmentCacheStatus getStatus() { + return status; + } + + public WorkspaceBaseEnvironment setUpdateTime(Timestamp updateTime) { + this.updateTime = updateTime; + return this; + } + + public Timestamp getUpdateTime() { + return updateTime; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkspaceBaseEnvironment that = (WorkspaceBaseEnvironment) o; + return Objects.equals(baseEnvironmentType, that.baseEnvironmentType) + && Objects.equals(createTime, that.createTime) + && Objects.equals(creatorUserId, that.creatorUserId) + && Objects.equals(displayName, that.displayName) + && Objects.equals(filepath, that.filepath) + && Objects.equals(isDefault, that.isDefault) + && Objects.equals(lastUpdatedUserId, that.lastUpdatedUserId) + && Objects.equals(message, that.message) + && Objects.equals(name, that.name) + && Objects.equals(status, that.status) + && Objects.equals(updateTime, that.updateTime); + } + + @Override + public int hashCode() { + return Objects.hash( + baseEnvironmentType, + createTime, + creatorUserId, + displayName, + filepath, + isDefault, + lastUpdatedUserId, + message, + name, + status, + updateTime); + } + + @Override + public String toString() { + return new ToStringer(WorkspaceBaseEnvironment.class) + .add("baseEnvironmentType", baseEnvironmentType) + .add("createTime", createTime) + .add("creatorUserId", creatorUserId) + .add("displayName", displayName) + .add("filepath", filepath) + .add("isDefault", isDefault) + .add("lastUpdatedUserId", lastUpdatedUserId) + .add("message", message) + .add("name", name) + .add("status", status) + .add("updateTime", updateTime) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/WorkspaceBaseEnvironmentCacheStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/WorkspaceBaseEnvironmentCacheStatus.java new file mode 100755 index 000000000..6c398d06a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/WorkspaceBaseEnvironmentCacheStatus.java @@ -0,0 +1,16 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; + +/** Status of the environment materialization. */ +@Generated +public enum WorkspaceBaseEnvironmentCacheStatus { + CREATED, + EXPIRED, + FAILED, + INVALID, + PENDING, + REFRESHING, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/WorkspaceBaseEnvironmentOperationMetadata.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/WorkspaceBaseEnvironmentOperationMetadata.java new file mode 100755 index 000000000..22e0dfb97 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/environments/WorkspaceBaseEnvironmentOperationMetadata.java @@ -0,0 +1,32 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.environments; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** + * Metadata for the WorkspaceBaseEnvironment long-running operations. This message tracks the + * progress of the workspace base environment long-running process. + */ +@Generated +public class WorkspaceBaseEnvironmentOperationMetadata { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(WorkspaceBaseEnvironmentOperationMetadata.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteRoleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteRoleRequest.java index 51d128aae..781072aa2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteRoleRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/DeleteRoleRequest.java @@ -22,9 +22,6 @@ public class DeleteRoleRequest { * *

NOTE: setting this requires spinning up a compute to succeed, since it involves running SQL * queries. - * - *

TODO: #LKB-7187 implement reassign_owned_to on LBM side. This might end-up being a - * synchronous query when this parameter is used. */ @JsonIgnore @QueryParam("reassign_owned_to") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java index ea8b27b53..7cd94edc4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/VpcEndpoint.java @@ -59,7 +59,7 @@ public class VpcEndpoint { * This enumeration represents the type of Databricks VPC endpoint service that was used when * creating this VPC endpoint. If the VPC endpoint connects to the Databricks control plane for * either the front-end connection or the back-end REST API connection, the value is - * WORKSPACE_ACCESS. If the VPC endpoint connects to the Databricks workspace for the back-end + * GENERAL_ACCESS. If the VPC endpoint connects to the Databricks workspace for the back-end * secure cluster connectivity relay, the value is DATAPLANE_RELAY_ACCESS. */ @JsonProperty("use_case") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesRequest.java index 41bb6164a..a71b19a9e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListWarehousesRequest.java @@ -27,8 +27,8 @@ public class ListWarehousesRequest { private String pageToken; /** - * Service Principal which will be used to fetch the list of endpoints. If not specified, SQL - * Gateway will use the user from the session header. + * Deprecated: this field is ignored by the server. Service Principal which will be used to fetch + * the list of endpoints. If not specified, SQL Gateway will use the user from the session header. */ @JsonIgnore @QueryParam("run_as_user_id")