From a91001d807a21de6eca4be5c06ab39553f30a8d9 Mon Sep 17 00:00:00 2001 From: "databricks-ci-ghec-1[bot]" <184311507+databricks-ci-ghec-1[bot]@users.noreply.github.com> Date: Wed, 11 Mar 2026 10:35:24 +0000 Subject: [PATCH] Update SDK to a194dd8a4fc0bf0a409db5fd7fbd591f96956cb2 --- .codegen/_openapi_sha | 2 +- .gitattributes | 2 + NEXT_CHANGELOG.md | 3 + .../sdk/service/pipelines/ConnectorType.java | 16 ++++ .../service/pipelines/DataStagingOptions.java | 81 ++++++++++++++++ .../IngestionPipelineDefinition.java | 37 ++++++++ .../sdk/service/pipelines/Origin.java | 92 +++++++++++++++++++ .../serving/ExternalFunctionRequest.java | 24 ++++- 8 files changed, 254 insertions(+), 3 deletions(-) mode change 100644 => 100755 NEXT_CHANGELOG.md create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectorType.java create mode 100755 databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataStagingOptions.java diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index d93beb365..99552624c 100755 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -803daa7abba0b78423769fbdce29ba4c66854776 \ No newline at end of file +a194dd8a4fc0bf0a409db5fd7fbd591f96956cb2 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 56ec99730..c5f629c8a 100755 --- a/.gitattributes +++ b/.gitattributes @@ -2122,10 +2122,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CloneMode databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ClonePipelineRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ClonePipelineResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectorType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelineResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CronTrigger.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataPlaneId.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataStagingOptions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DayOfWeek.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeploymentKind.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md old mode 100644 new mode 100755 index b96f53a7b..46c0db250 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -13,3 +13,6 @@ ### Internal Changes ### API Changes +* Add `connectorType` and `dataStagingOptions` fields for `com.databricks.sdk.service.pipelines.IngestionPipelineDefinition`. +* Add `ingestionSourceCatalogName`, `ingestionSourceConnectionName`, `ingestionSourceSchemaName`, `ingestionSourceTableName` and `ingestionSourceTableVersion` fields for `com.databricks.sdk.service.pipelines.Origin`. +* Add `subDomain` field for `com.databricks.sdk.service.serving.ExternalFunctionRequest`. \ No newline at end of file diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectorType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectorType.java new file mode 100755 index 000000000..87da36358 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectorType.java @@ -0,0 +1,16 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; + +/** + * For certain database sources LakeFlow Connect offers both query based and cdc ingestion, + * ConnectorType can bse used to convey the type of ingestion. If connection_name is provided for + * database sources, we default to Query Based ingestion + */ +@Generated +public enum ConnectorType { + CDC, + QUERY_BASED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataStagingOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataStagingOptions.java new file mode 100755 index 000000000..285da6781 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataStagingOptions.java @@ -0,0 +1,81 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Location of staged data storage */ +@Generated +public class DataStagingOptions { + /** (Required, Immutable) The name of the catalog for the connector's staging storage location. */ + @JsonProperty("catalog_name") + private String catalogName; + + /** (Required, Immutable) The name of the schema for the connector's staging storage location. */ + @JsonProperty("schema_name") + private String schemaName; + + /** + * (Optional) The Unity Catalog-compatible name for the storage location. This is the volume to + * use for the data that is extracted by the connector. Spark Declarative Pipelines system will + * automatically create the volume under the catalog and schema. For Combined Cdc Managed + * Ingestion pipelines default name for the volume would be : + * __databricks_ingestion_gateway_staging_data-$pipelineId + */ + @JsonProperty("volume_name") + private String volumeName; + + public DataStagingOptions setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public DataStagingOptions setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public DataStagingOptions setVolumeName(String volumeName) { + this.volumeName = volumeName; + return this; + } + + public String getVolumeName() { + return volumeName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DataStagingOptions that = (DataStagingOptions) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(volumeName, that.volumeName); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, schemaName, volumeName); + } + + @Override + public String toString() { + return new ToStringer(DataStagingOptions.class) + .add("catalogName", catalogName) + .add("schemaName", schemaName) + .add("volumeName", volumeName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java index bcaa545ef..d4dbc0e8f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java @@ -23,6 +23,19 @@ public class IngestionPipelineDefinition { @JsonProperty("connection_name") private String connectionName; + /** (Optional) Connector Type for sources. Ex: CDC, Query Based. */ + @JsonProperty("connector_type") + private ConnectorType connectorType; + + /** + * (Optional) Location of staged data storage. This is required for migration from Cdc Managed + * Ingestion Pipeline with Gateway pipeline to Combined Cdc Managed Ingestion Pipeline. If not + * specified, the volume for staged data will be created in catalog and schema/target specified in + * the top level pipeline definition. + */ + @JsonProperty("data_staging_options") + private DataStagingOptions dataStagingOptions; + /** (Optional) A window that specifies a set of time ranges for snapshot queries in CDC. */ @JsonProperty("full_refresh_window") private OperationTimeWindow fullRefreshWindow; @@ -85,6 +98,24 @@ public String getConnectionName() { return connectionName; } + public IngestionPipelineDefinition setConnectorType(ConnectorType connectorType) { + this.connectorType = connectorType; + return this; + } + + public ConnectorType getConnectorType() { + return connectorType; + } + + public IngestionPipelineDefinition setDataStagingOptions(DataStagingOptions dataStagingOptions) { + this.dataStagingOptions = dataStagingOptions; + return this; + } + + public DataStagingOptions getDataStagingOptions() { + return dataStagingOptions; + } + public IngestionPipelineDefinition setFullRefreshWindow(OperationTimeWindow fullRefreshWindow) { this.fullRefreshWindow = fullRefreshWindow; return this; @@ -165,6 +196,8 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; IngestionPipelineDefinition that = (IngestionPipelineDefinition) o; return Objects.equals(connectionName, that.connectionName) + && Objects.equals(connectorType, that.connectorType) + && Objects.equals(dataStagingOptions, that.dataStagingOptions) && Objects.equals(fullRefreshWindow, that.fullRefreshWindow) && Objects.equals(ingestFromUcForeignCatalog, that.ingestFromUcForeignCatalog) && Objects.equals(ingestionGatewayId, that.ingestionGatewayId) @@ -179,6 +212,8 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( connectionName, + connectorType, + dataStagingOptions, fullRefreshWindow, ingestFromUcForeignCatalog, ingestionGatewayId, @@ -193,6 +228,8 @@ public int hashCode() { public String toString() { return new ToStringer(IngestionPipelineDefinition.class) .add("connectionName", connectionName) + .add("connectorType", connectorType) + .add("dataStagingOptions", dataStagingOptions) .add("fullRefreshWindow", fullRefreshWindow) .add("ingestFromUcForeignCatalog", ingestFromUcForeignCatalog) .add("ingestionGatewayId", ingestionGatewayId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java index 9e36878d7..06fb0e8a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Origin.java @@ -40,6 +40,38 @@ public class Origin { @JsonProperty("host") private String host; + /** + * The name of the source catalog name (if known) from whose data ingestion is described by this + * event. + */ + @JsonProperty("ingestion_source_catalog_name") + private String ingestionSourceCatalogName; + + /** + * The name of the source UC connection (if known) from whose data ingestion is described by this + * event. + */ + @JsonProperty("ingestion_source_connection_name") + private String ingestionSourceConnectionName; + + /** + * The name of the source schema name (if known) from whose data ingestion is described by this + * event. + */ + @JsonProperty("ingestion_source_schema_name") + private String ingestionSourceSchemaName; + + /** + * The name of the source table name (if known) from whose data ingestion is described by this + * event. + */ + @JsonProperty("ingestion_source_table_name") + private String ingestionSourceTableName; + + /** An optional implementation-defined source table version of a dataset being (re)ingested. */ + @JsonProperty("ingestion_source_table_version") + private String ingestionSourceTableVersion; + /** The id of a maintenance run. Globally unique. */ @JsonProperty("maintenance_id") private String maintenanceId; @@ -143,6 +175,51 @@ public String getHost() { return host; } + public Origin setIngestionSourceCatalogName(String ingestionSourceCatalogName) { + this.ingestionSourceCatalogName = ingestionSourceCatalogName; + return this; + } + + public String getIngestionSourceCatalogName() { + return ingestionSourceCatalogName; + } + + public Origin setIngestionSourceConnectionName(String ingestionSourceConnectionName) { + this.ingestionSourceConnectionName = ingestionSourceConnectionName; + return this; + } + + public String getIngestionSourceConnectionName() { + return ingestionSourceConnectionName; + } + + public Origin setIngestionSourceSchemaName(String ingestionSourceSchemaName) { + this.ingestionSourceSchemaName = ingestionSourceSchemaName; + return this; + } + + public String getIngestionSourceSchemaName() { + return ingestionSourceSchemaName; + } + + public Origin setIngestionSourceTableName(String ingestionSourceTableName) { + this.ingestionSourceTableName = ingestionSourceTableName; + return this; + } + + public String getIngestionSourceTableName() { + return ingestionSourceTableName; + } + + public Origin setIngestionSourceTableVersion(String ingestionSourceTableVersion) { + this.ingestionSourceTableVersion = ingestionSourceTableVersion; + return this; + } + + public String getIngestionSourceTableVersion() { + return ingestionSourceTableVersion; + } + public Origin setMaintenanceId(String maintenanceId) { this.maintenanceId = maintenanceId; return this; @@ -245,6 +322,11 @@ public boolean equals(Object o) { && Objects.equals(flowId, that.flowId) && Objects.equals(flowName, that.flowName) && Objects.equals(host, that.host) + && Objects.equals(ingestionSourceCatalogName, that.ingestionSourceCatalogName) + && Objects.equals(ingestionSourceConnectionName, that.ingestionSourceConnectionName) + && Objects.equals(ingestionSourceSchemaName, that.ingestionSourceSchemaName) + && Objects.equals(ingestionSourceTableName, that.ingestionSourceTableName) + && Objects.equals(ingestionSourceTableVersion, that.ingestionSourceTableVersion) && Objects.equals(maintenanceId, that.maintenanceId) && Objects.equals(materializationName, that.materializationName) && Objects.equals(orgId, that.orgId) @@ -267,6 +349,11 @@ public int hashCode() { flowId, flowName, host, + ingestionSourceCatalogName, + ingestionSourceConnectionName, + ingestionSourceSchemaName, + ingestionSourceTableName, + ingestionSourceTableVersion, maintenanceId, materializationName, orgId, @@ -289,6 +376,11 @@ public String toString() { .add("flowId", flowId) .add("flowName", flowName) .add("host", host) + .add("ingestionSourceCatalogName", ingestionSourceCatalogName) + .add("ingestionSourceConnectionName", ingestionSourceConnectionName) + .add("ingestionSourceSchemaName", ingestionSourceSchemaName) + .add("ingestionSourceTableName", ingestionSourceTableName) + .add("ingestionSourceTableVersion", ingestionSourceTableVersion) .add("maintenanceId", maintenanceId) .add("materializationName", materializationName) .add("orgId", orgId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequest.java index ab122974a..6fd727e30 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFunctionRequest.java @@ -37,6 +37,15 @@ public class ExternalFunctionRequest { @JsonProperty("path") private String path; + /** + * Optional subdomain to prepend to the connection URL's host. If provided, this will be added as + * a prefix to the connection URL's host. For example, if the connection URL is + * `https://api.example.com/v1` and `sub_domain` is `"custom"`, the resulting URL will be + * `https://custom.api.example.com/v1`. + */ + @JsonProperty("sub_domain") + private String subDomain; + public ExternalFunctionRequest setConnectionName(String connectionName) { this.connectionName = connectionName; return this; @@ -91,6 +100,15 @@ public String getPath() { return path; } + public ExternalFunctionRequest setSubDomain(String subDomain) { + this.subDomain = subDomain; + return this; + } + + public String getSubDomain() { + return subDomain; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -101,12 +119,13 @@ public boolean equals(Object o) { && Objects.equals(json, that.json) && Objects.equals(method, that.method) && Objects.equals(params, that.params) - && Objects.equals(path, that.path); + && Objects.equals(path, that.path) + && Objects.equals(subDomain, that.subDomain); } @Override public int hashCode() { - return Objects.hash(connectionName, headers, json, method, params, path); + return Objects.hash(connectionName, headers, json, method, params, path, subDomain); } @Override @@ -118,6 +137,7 @@ public String toString() { .add("method", method) .add("params", params) .add("path", path) + .add("subDomain", subDomain) .toString(); } }