diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 19009050a..6d4ada17b 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-59c4c0f3d5f0ef00cd5350b5674e941a7606d91a
\ No newline at end of file
+8f5eedbc991c4f04ce1284406577b0c92d59a224
\ No newline at end of file
diff --git a/.gitattributes b/.gitattributes
index c5f560256..0063251d2 100755
--- a/.gitattributes
+++ b/.gitattributes
@@ -185,6 +185,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudge
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetConfigurationResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateBudgetPolicyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UpdateLogDeliveryConfigurationStatusRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardMajorVersion.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardsImpl.java linguist-generated=true
@@ -951,6 +952,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAtt
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversationSummary.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateSpaceRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieDeleteConversationMessageRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieDeleteConversationRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequest.java linguist-generated=true
@@ -980,6 +982,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSta
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSuggestedQuestionsAttachment.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieTrashSpaceRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java linguist-generated=true
@@ -1014,6 +1017,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscrip
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberDestination.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUser.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachmentPurpose.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java linguist-generated=true
@@ -1411,6 +1415,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsReques
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfiguration.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfigurationCondition.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookOutput.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookTask.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/OutputSchemaInfo.java linguist-generated=true
@@ -1672,6 +1678,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.j
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ApproveTransitionRequestResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/AuthConfig.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/BatchCreateMaterializedFeaturesRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/BatchCreateMaterializedFeaturesResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ColumnIdentifier.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentActivityAction.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CommentObject.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ContinuousWindow.java linguist-generated=true
@@ -1683,6 +1693,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateFeatureReq
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateFeatureTagRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateKafkaConfigRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateMaterializedFeatureRequest.java linguist-generated=true
@@ -1704,6 +1715,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteCommentReq
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteExperiment.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteFeatureRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteFeatureTagRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteKafkaConfigRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteMaterializedFeatureRequest.java linguist-generated=true
@@ -1712,6 +1724,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagRe
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionTagRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteOnlineStoreRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteOnlineTableRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRun.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRuns.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteRunsResponse.java linguist-generated=true
@@ -1768,6 +1781,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetFeatureReques
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetFeatureTagRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetForecastingExperimentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetHistoryRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetKafkaConfigRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelRequest.java linguist-generated=true
@@ -1789,8 +1803,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetRunResponse.j
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/HttpUrlSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/HttpUrlSpecWithoutSecret.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/InputTag.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobContext.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobSpecWithoutSecret.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/KafkaConfig.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/KafkaSource.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LineageContext.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LinkedFeature.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListArtifactsResponse.java linguist-generated=true
@@ -1800,6 +1818,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListFeatureTagsR
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListFeatureTagsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListFeaturesRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListFeaturesResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListKafkaConfigsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListKafkaConfigsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListMaterializedFeaturesRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListMaterializedFeaturesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsRequest.java linguist-generated=true
@@ -1843,6 +1863,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionStat
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionTag.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/OfflineStoreConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/OnlineStore.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/OnlineStoreConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/OnlineStoreState.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Param.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/PermissionLevel.java linguist-generated=true
@@ -1875,6 +1896,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfo.java lin
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfoStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunTag.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SchemaConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperiments.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchLoggedModelsDataset.java linguist-generated=true
@@ -1894,6 +1916,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetModelVersionT
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SetTag.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SlidingWindow.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Status.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SubscriptionMode.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TestRegistryWebhookResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TimeWindow.java linguist-generated=true
@@ -1906,6 +1929,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentRes
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperiment.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateFeatureRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateFeatureTagRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateKafkaConfigRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateMaterializedFeatureRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java linguist-generated=true
@@ -1978,6 +2002,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccoun
databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegration.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelineResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CronTrigger.java linguist-generated=true
@@ -2044,6 +2069,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PostgresC
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PostgresSlotConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RewindDatasetSpec.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RewindSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RunAs.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Sequencing.java linguist-generated=true
@@ -3017,6 +3044,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListIn
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListValue.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListVectorIndexesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MapStringValueEntry.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Metric.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MetricLabel.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MetricValue.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MetricValues.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyResponse.java linguist-generated=true
@@ -3028,6 +3059,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Rerank
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RerankerConfigRerankerParameters.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultData.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultManifest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RetrieveUserVisibleMetricsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RetrieveUserVisibleMetricsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Struct.java linguist-generated=true
diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md
index 7ab88fe73..c7aeec686 100644
--- a/NEXT_CHANGELOG.md
+++ b/NEXT_CHANGELOG.md
@@ -13,3 +13,30 @@
### Internal Changes
### API Changes
+* Add `createSpace()` and `updateSpace()` methods for `workspaceClient.genie()` service.
+* Add `batchCreateMaterializedFeatures()`, `createKafkaConfig()`, `deleteKafkaConfig()`, `getKafkaConfig()`, `listKafkaConfigs()` and `updateKafkaConfig()` methods for `workspaceClient.featureEngineering()` service.
+* Add `deleteOnlineTable()` method for `workspaceClient.featureStore()` service.
+* Add `retrieveUserVisibleMetrics()` method for `workspaceClient.vectorSearchEndpoints()` service.
+* Add `majorVersion` field for `com.databricks.sdk.service.billing.CreateBillingUsageDashboardRequest`.
+* Add `includeSerializedSpace` field for `com.databricks.sdk.service.dashboards.GenieGetSpaceRequest`.
+* Add `serializedSpace` field for `com.databricks.sdk.service.dashboards.GenieSpace`.
+* Add `purpose` field for `com.databricks.sdk.service.dashboards.TextAttachment`.
+* Add `budgetPolicyId` field for `com.databricks.sdk.service.database.NewPipelineSpec`.
+* Add `model` field for `com.databricks.sdk.service.jobs.TriggerSettings`.
+* Add `kafkaSource` field for `com.databricks.sdk.service.ml.DataSource`.
+* Add `lineageContext` field for `com.databricks.sdk.service.ml.Feature`.
+* Add `connectionParameters` field for `com.databricks.sdk.service.pipelines.IngestionGatewayPipelineDefinition`.
+* Add `ingestFromUcForeignCatalog` field for `com.databricks.sdk.service.pipelines.IngestionPipelineDefinition`.
+* Add `rewindSpec` field for `com.databricks.sdk.service.pipelines.StartUpdate`.
+* Add `typeText` field for `com.databricks.sdk.service.vectorsearch.ColumnInfo`.
+* Add `AUTOSCALE_V2` enum value for `com.databricks.sdk.service.compute.EventDetailsCause`.
+* Add `UNSUPPORTED_CONVERSATION_TYPE_EXCEPTION` enum value for `com.databricks.sdk.service.dashboards.MessageErrorType`.
+* Add `FOREIGN_CATALOG` enum value for `com.databricks.sdk.service.pipelines.IngestionSourceType`.
+* Add `CREATING` and `CREATE_FAILED` enum values for `com.databricks.sdk.service.settings.CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState`.
+* Add `CREATING` and `CREATE_FAILED` enum values for `com.databricks.sdk.service.settings.NccAzurePrivateEndpointRuleConnectionState`.
+* Add `RED_STATE` and `YELLOW_STATE` enum values for `com.databricks.sdk.service.vectorsearch.EndpointStatusState`.
+* Change `destinations` field for `com.databricks.sdk.service.catalog.AccessRequestDestinations` to no longer be required.
+* [Breaking] Change `destinations` field for `com.databricks.sdk.service.catalog.AccessRequestDestinations` to no longer be required.
+* Change `tableNames` field for `com.databricks.sdk.service.jobs.TableUpdateTriggerConfiguration` to be required.
+* [Breaking] Change `tableNames` field for `com.databricks.sdk.service.jobs.TableUpdateTriggerConfiguration` to be required.
+* [Breaking] Change `onlineStoreConfig` field for `com.databricks.sdk.service.ml.MaterializedFeature` to type `com.databricks.sdk.service.ml.OnlineStoreConfig` class.
\ No newline at end of file
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
index e0c6ccce2..343026d7d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
@@ -1116,7 +1116,7 @@ public IpAccessListsAPI ipAccessLists() {
* multi-task workflow with complex dependencies. Databricks manages the task orchestration,
* cluster management, monitoring, and error reporting for all of your jobs. You can run your jobs
* immediately or periodically through an easy-to-use scheduling system. You can implement job
- * tasks using notebooks, JARS, Delta Live Tables pipelines, or Python, Scala, Spark submit, and
+ * tasks using notebooks, JARS, Spark Declarative Pipelines, or Python, Scala, Spark submit, and
* Java applications.
*
*
You should never hard code secrets or store them in plain text. Use the [Secrets CLI] to
@@ -1272,19 +1272,19 @@ public PermissionsAPI permissions() {
}
/**
- * The Delta Live Tables API allows you to create, edit, delete, start, and view details about
- * pipelines.
+ * The Lakeflow Spark Declarative Pipelines API allows you to create, edit, delete, start, and
+ * view details about pipelines.
*
- *
Delta Live Tables is a framework for building reliable, maintainable, and testable data
- * processing pipelines. You define the transformations to perform on your data, and Delta Live
- * Tables manages task orchestration, cluster management, monitoring, data quality, and error
- * handling.
+ *
Spark Declarative Pipelines is a framework for building reliable, maintainable, and testable
+ * data processing pipelines. You define the transformations to perform on your data, and Spark
+ * Declarative Pipelines manages task orchestration, cluster management, monitoring, data quality,
+ * and error handling.
*
- *
Instead of defining your data pipelines using a series of separate Apache Spark tasks, Delta
- * Live Tables manages how your data is transformed based on a target schema you define for each
- * processing step. You can also enforce data quality with Delta Live Tables expectations.
- * Expectations allow you to define expected data quality and specify how to handle records that
- * fail those expectations.
+ *
Instead of defining your data pipelines using a series of separate Apache Spark tasks, Spark
+ * Declarative Pipelines manages how your data is transformed based on a target schema you define
+ * for each processing step. You can also enforce data quality with Spark Declarative Pipelines
+ * expectations. Expectations allow you to define expected data quality and specify how to handle
+ * records that fail those expectations.
*/
public PipelinesAPI pipelines() {
return pipelinesAPI;
@@ -1602,12 +1602,10 @@ public ResourceQuotasAPI resourceQuotas() {
}
/**
- * Request for Access enables customers to request access to and manage access request
- * destinations for Unity Catalog securables.
+ * Request for Access enables users to request access for Unity Catalog securables.
*
- *
These APIs provide a standardized way to update, get, and request to access request
- * destinations. Fine-grained authorization ensures that only users with appropriate permissions
- * can manage access request destinations.
+ *
These APIs provide a standardized way for securable owners (or users with MANAGE privileges)
+ * to manage access request destinations.
*/
public RfaAPI rfa() {
return rfaAPI;
@@ -1863,11 +1861,14 @@ public TablesAPI tables() {
}
/**
- * The Tag Policy API allows you to manage policies for governed tags in Databricks. Permissions
- * for tag policies can be managed using the [Account Access Control Proxy API].
+ * The Tag Policy API allows you to manage policies for governed tags in Databricks. For Terraform
+ * usage, see the [Tag Policy Terraform documentation]. Permissions for tag policies can be
+ * managed using the [Account Access Control Proxy API].
*
*
[Account Access Control Proxy API]:
- * https://docs.databricks.com/api/workspace/accountaccesscontrolproxy
+ * https://docs.databricks.com/api/workspace/accountaccesscontrolproxy [Tag Policy Terraform
+ * documentation]:
+ * https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/tag_policy
*/
public TagPoliciesAPI tagPolicies() {
return tagPoliciesAPI;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeSize.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeSize.java
index ff5b63350..ea90ad401 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeSize.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeSize.java
@@ -7,6 +7,5 @@
@Generated
public enum ComputeSize {
LARGE,
- LIQUID,
MEDIUM,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardRequest.java
index 60f8d7e60..329e8b7a8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/CreateBillingUsageDashboardRequest.java
@@ -16,6 +16,10 @@ public class CreateBillingUsageDashboardRequest {
@JsonProperty("dashboard_type")
private UsageDashboardType dashboardType;
+ /** The major version of the usage dashboard template to use. Defaults to VERSION_1. */
+ @JsonProperty("major_version")
+ private UsageDashboardMajorVersion majorVersion;
+
/** The workspace ID of the workspace in which the usage dashboard is created. */
@JsonProperty("workspace_id")
private Long workspaceId;
@@ -29,6 +33,16 @@ public UsageDashboardType getDashboardType() {
return dashboardType;
}
+ public CreateBillingUsageDashboardRequest setMajorVersion(
+ UsageDashboardMajorVersion majorVersion) {
+ this.majorVersion = majorVersion;
+ return this;
+ }
+
+ public UsageDashboardMajorVersion getMajorVersion() {
+ return majorVersion;
+ }
+
public CreateBillingUsageDashboardRequest setWorkspaceId(Long workspaceId) {
this.workspaceId = workspaceId;
return this;
@@ -44,18 +58,20 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
CreateBillingUsageDashboardRequest that = (CreateBillingUsageDashboardRequest) o;
return Objects.equals(dashboardType, that.dashboardType)
+ && Objects.equals(majorVersion, that.majorVersion)
&& Objects.equals(workspaceId, that.workspaceId);
}
@Override
public int hashCode() {
- return Objects.hash(dashboardType, workspaceId);
+ return Objects.hash(dashboardType, majorVersion, workspaceId);
}
@Override
public String toString() {
return new ToStringer(CreateBillingUsageDashboardRequest.class)
.add("dashboardType", dashboardType)
+ .add("majorVersion", majorVersion)
.add("workspaceId", workspaceId)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardMajorVersion.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardMajorVersion.java
new file mode 100755
index 000000000..5affd678e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashboardMajorVersion.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.billing;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum UsageDashboardMajorVersion {
+ USAGE_DASHBOARD_MAJOR_VERSION_1,
+ USAGE_DASHBOARD_MAJOR_VERSION_2,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java
index efeb27019..c97815f48 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java
@@ -79,6 +79,10 @@ public CatalogInfo get(GetCatalogRequest request) {
* reached.
*/
public Iterable list(ListCatalogsRequest request) {
+
+ if (request.getMaxResults() == null) {
+ request.setMaxResults(0L);
+ }
return new Paginator<>(
request,
impl::list,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java
index ffbf0fff6..b71e21003 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java
@@ -4,7 +4,7 @@
import com.databricks.sdk.support.Generated;
-/** Next Id: 47 */
+/** Next Id: 48 */
@Generated
public enum ConnectionType {
BIGQUERY,
@@ -15,7 +15,6 @@ public enum ConnectionType {
HTTP,
MYSQL,
ORACLE,
- PALANTIR,
POSTGRESQL,
POWER_BI,
REDSHIFT,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java
index a776e44c0..33636ebfa 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java
@@ -74,6 +74,10 @@ public ConnectionInfo get(GetConnectionRequest request) {
* reached.
*/
public Iterable list(ListConnectionsRequest request) {
+
+ if (request.getMaxResults() == null) {
+ request.setMaxResults(0L);
+ }
return new Paginator<>(
request,
impl::list,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
index a9f351033..f44fc3f02 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java
@@ -83,6 +83,10 @@ public ExternalLocationInfo get(GetExternalLocationRequest request) {
* reached.
*/
public Iterable list(ListExternalLocationsRequest request) {
+
+ if (request.getMaxResults() == null) {
+ request.setMaxResults(0L);
+ }
return new Paginator<>(
request,
impl::list,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java
index 135730627..8a1a22cd7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java
@@ -96,6 +96,10 @@ public Iterable list(String catalogName, String schemaName) {
* reached.
*/
public Iterable list(ListFunctionsRequest request) {
+
+ if (request.getMaxResults() == null) {
+ request.setMaxResults(0L);
+ }
return new Paginator<>(
request,
impl::list,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java
index 35e643dbf..a12fed217 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java
@@ -95,6 +95,10 @@ public MetastoreInfo get(GetMetastoreRequest request) {
* reached.
*/
public Iterable list(ListMetastoresRequest request) {
+
+ if (request.getMaxResults() == null) {
+ request.setMaxResults(0L);
+ }
return new Paginator<>(
request,
impl::list,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RfaAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RfaAPI.java
index a37bc8093..e4d4e14f6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RfaAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RfaAPI.java
@@ -7,12 +7,10 @@
import org.slf4j.LoggerFactory;
/**
- * Request for Access enables customers to request access to and manage access request destinations
- * for Unity Catalog securables.
+ * Request for Access enables users to request access for Unity Catalog securables.
*
- *
These APIs provide a standardized way to update, get, and request to access request
- * destinations. Fine-grained authorization ensures that only users with appropriate permissions can
- * manage access request destinations.
+ *
These APIs provide a standardized way for securable owners (or users with MANAGE privileges)
+ * to manage access request destinations.
*/
@Generated
public class RfaAPI {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RfaService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RfaService.java
index 15b8dc7c3..cdd8ef843 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RfaService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RfaService.java
@@ -4,12 +4,10 @@
import com.databricks.sdk.support.Generated;
/**
- * Request for Access enables customers to request access to and manage access request destinations
- * for Unity Catalog securables.
+ * Request for Access enables users to request access for Unity Catalog securables.
*
- *
These APIs provide a standardized way to update, get, and request to access request
- * destinations. Fine-grained authorization ensures that only users with appropriate permissions can
- * manage access request destinations.
+ *
These APIs provide a standardized way for securable owners (or users with MANAGE privileges)
+ * to manage access request destinations.
*
*
This is the high-level interface, that contains generated methods.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
index c69c4b0b9..73a1763b9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java
@@ -80,6 +80,10 @@ public Iterable list(String catalogName) {
* reached.
*/
public Iterable list(ListSchemasRequest request) {
+
+ if (request.getMaxResults() == null) {
+ request.setMaxResults(0L);
+ }
return new Paginator<>(
request,
impl::list,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
index d947e19aa..56e2712bd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
@@ -4,7 +4,7 @@
import com.databricks.sdk.support.Generated;
-/** Latest kind: CONNECTION_AWS_SECRETS_MANAGER = 270; Next id:271 */
+/** Latest kind: SECRET_EXTERNAL_AWS_SECRETS_MANAGER = 273; Next id:274 */
@Generated
public enum SecurableKind {
TABLE_DB_STORAGE,
@@ -41,7 +41,6 @@ public enum SecurableKind {
TABLE_FOREIGN_MYSQL,
TABLE_FOREIGN_NETSUITE,
TABLE_FOREIGN_ORACLE,
- TABLE_FOREIGN_PALANTIR,
TABLE_FOREIGN_POSTGRESQL,
TABLE_FOREIGN_REDSHIFT,
TABLE_FOREIGN_SALESFORCE,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java
index 7f0708807..ed1d215f2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java
@@ -85,6 +85,10 @@ public StorageCredentialInfo get(GetStorageCredentialRequest request) {
* reached.
*/
public Iterable list(ListStorageCredentialsRequest request) {
+
+ if (request.getMaxResults() == null) {
+ request.setMaxResults(0L);
+ }
return new Paginator<>(
request,
impl::list,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java
index 56b33ef00..ff6a9ab0d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java
@@ -65,6 +65,10 @@ public Iterable list(String metastoreId) {
* reached.
*/
public Iterable list(ListSystemSchemasRequest request) {
+
+ if (request.getMaxResults() == null) {
+ request.setMaxResults(0L);
+ }
return new Paginator<>(
request,
impl::list,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
index 3047b6db3..ac41bdf5a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java
@@ -126,6 +126,10 @@ public Iterable list(String catalogName, String schemaName) {
* reached.
*/
public Iterable list(ListTablesRequest request) {
+
+ if (request.getMaxResults() == null) {
+ request.setMaxResults(0L);
+ }
return new Paginator<>(
request,
impl::list,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java
index 7ca47d320..41882d9c3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/WorkspaceBindingsAPI.java
@@ -72,6 +72,10 @@ public Iterable getBindings(String securableType, String secur
* reached.
*/
public Iterable getBindings(GetBindingsRequest request) {
+
+ if (request.getMaxResults() == null) {
+ request.setMaxResults(0L);
+ }
return new Paginator<>(
request,
impl::getBindings,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java
index 377cbeea7..82690ff99 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java
@@ -101,9 +101,9 @@ public class AwsAttributes {
* be of a form like "us-west-2a". The provided availability zone must be in the same region as
* the Databricks deployment. For example, "us-west-2a" is not a valid zone id if the Databricks
* deployment resides in the "us-east-1" region. This is an optional field at cluster creation,
- * and if not specified, a default zone will be used. If the zone specified is "auto", will try to
- * place cluster in a zone with high availability, and will retry placement in a different AZ if
- * there is not enough capacity.
+ * and if not specified, the zone "auto" will be used. If the zone specified is "auto", will try
+ * to place cluster in a zone with high availability, and will retry placement in a different AZ
+ * if there is not enough capacity.
*
*
The list of available zones as well as the default value can be found by using the `List
* Zones` method.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetailsCause.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetailsCause.java
index f4f71b0ec..6c0d6b655 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetailsCause.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetailsCause.java
@@ -9,6 +9,7 @@
public enum EventDetailsCause {
AUTORECOVERY,
AUTOSCALE,
+ AUTOSCALE_V2,
REPLACE_BAD_NODES,
USER_REQUEST,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java
index 70f27a572..8d1a96478 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java
@@ -71,7 +71,6 @@ public enum TerminationReasonCode {
DOCKER_IMAGE_PULL_FAILURE,
DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION,
DOCKER_INVALID_OS_EXCEPTION,
- DRIVER_DNS_RESOLUTION_FAILURE,
DRIVER_EVICTION,
DRIVER_LAUNCH_TIMEOUT,
DRIVER_NODE_UNREACHABLE,
@@ -146,8 +145,6 @@ public enum TerminationReasonCode {
NETWORK_CHECK_STORAGE_FAILURE_DUE_TO_MISCONFIG,
NETWORK_CONFIGURATION_FAILURE,
NFS_MOUNT_FAILURE,
- NO_ACTIVATED_K8S,
- NO_ACTIVATED_K8S_TESTING_TAG,
NO_MATCHED_K8S,
NO_MATCHED_K8S_TESTING_TAG,
NPIP_TUNNEL_SETUP_FAILURE,
@@ -160,7 +157,6 @@ public enum TerminationReasonCode {
SECRET_CREATION_FAILURE,
SECRET_PERMISSION_DENIED,
SECRET_RESOLUTION_ERROR,
- SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION,
SECURITY_DAEMON_REGISTRATION_EXCEPTION,
SELF_BOOTSTRAP_FAILURE,
SERVERLESS_LONG_RUNNING_TERMINATED,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
index 7bd915755..0cc4fdbf2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
@@ -110,6 +110,11 @@ public Wait createMessage(
response);
}
+ /** Creates a Genie space from a serialized payload. */
+ public GenieSpace createSpace(GenieCreateSpaceRequest request) {
+ return impl.createSpace(request);
+ }
+
public void deleteConversation(String spaceId, String conversationId) {
deleteConversation(
new GenieDeleteConversationRequest().setSpaceId(spaceId).setConversationId(conversationId));
@@ -286,6 +291,11 @@ public void trashSpace(GenieTrashSpaceRequest request) {
impl.trashSpace(request);
}
+ /** Updates a Genie space with a serialized payload. */
+ public GenieSpace updateSpace(GenieUpdateSpaceRequest request) {
+ return impl.updateSpace(request);
+ }
+
public GenieService impl() {
return impl;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateSpaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateSpaceRequest.java
new file mode 100755
index 000000000..bbd5e11b6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateSpaceRequest.java
@@ -0,0 +1,109 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GenieCreateSpaceRequest {
+ /** Optional description */
+ @JsonProperty("description")
+ private String description;
+
+ /** Parent folder path where the space will be registered */
+ @JsonProperty("parent_path")
+ private String parentPath;
+
+ /**
+ * The contents of the Genie Space in serialized string form. Use the [Get Genie
+ * Space](:method:genie/getspace) API to retrieve an example response, which includes the
+ * `serialized_space` field. This field provides the structure of the JSON string that represents
+ * the space's layout and components.
+ */
+ @JsonProperty("serialized_space")
+ private String serializedSpace;
+
+ /** Optional title override */
+ @JsonProperty("title")
+ private String title;
+
+ /** Warehouse to associate with the new space */
+ @JsonProperty("warehouse_id")
+ private String warehouseId;
+
+ public GenieCreateSpaceRequest setDescription(String description) {
+ this.description = description;
+ return this;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public GenieCreateSpaceRequest setParentPath(String parentPath) {
+ this.parentPath = parentPath;
+ return this;
+ }
+
+ public String getParentPath() {
+ return parentPath;
+ }
+
+ public GenieCreateSpaceRequest setSerializedSpace(String serializedSpace) {
+ this.serializedSpace = serializedSpace;
+ return this;
+ }
+
+ public String getSerializedSpace() {
+ return serializedSpace;
+ }
+
+ public GenieCreateSpaceRequest setTitle(String title) {
+ this.title = title;
+ return this;
+ }
+
+ public String getTitle() {
+ return title;
+ }
+
+ public GenieCreateSpaceRequest setWarehouseId(String warehouseId) {
+ this.warehouseId = warehouseId;
+ return this;
+ }
+
+ public String getWarehouseId() {
+ return warehouseId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieCreateSpaceRequest that = (GenieCreateSpaceRequest) o;
+ return Objects.equals(description, that.description)
+ && Objects.equals(parentPath, that.parentPath)
+ && Objects.equals(serializedSpace, that.serializedSpace)
+ && Objects.equals(title, that.title)
+ && Objects.equals(warehouseId, that.warehouseId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(description, parentPath, serializedSpace, title, warehouseId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieCreateSpaceRequest.class)
+ .add("description", description)
+ .add("parentPath", parentPath)
+ .add("serializedSpace", serializedSpace)
+ .add("title", title)
+ .add("warehouseId", warehouseId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java
index ce017eddc..bf2fbf82b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java
@@ -3,15 +3,33 @@
package com.databricks.sdk.service.dashboards;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
@Generated
public class GenieGetSpaceRequest {
+ /**
+ * Whether to include the serialized space export in the response. Requires at least CAN EDIT
+ * permission on the space.
+ */
+ @JsonIgnore
+ @QueryParam("include_serialized_space")
+ private Boolean includeSerializedSpace;
+
/** The ID associated with the Genie space */
@JsonIgnore private String spaceId;
+ public GenieGetSpaceRequest setIncludeSerializedSpace(Boolean includeSerializedSpace) {
+ this.includeSerializedSpace = includeSerializedSpace;
+ return this;
+ }
+
+ public Boolean getIncludeSerializedSpace() {
+ return includeSerializedSpace;
+ }
+
public GenieGetSpaceRequest setSpaceId(String spaceId) {
this.spaceId = spaceId;
return this;
@@ -26,16 +44,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GenieGetSpaceRequest that = (GenieGetSpaceRequest) o;
- return Objects.equals(spaceId, that.spaceId);
+ return Objects.equals(includeSerializedSpace, that.includeSerializedSpace)
+ && Objects.equals(spaceId, that.spaceId);
}
@Override
public int hashCode() {
- return Objects.hash(spaceId);
+ return Objects.hash(includeSerializedSpace, spaceId);
}
@Override
public String toString() {
- return new ToStringer(GenieGetSpaceRequest.class).add("spaceId", spaceId).toString();
+ return new ToStringer(GenieGetSpaceRequest.class)
+ .add("includeSerializedSpace", includeSerializedSpace)
+ .add("spaceId", spaceId)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
index a8b46711e..3daab8509 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
@@ -34,6 +34,21 @@ public GenieMessage createMessage(GenieCreateConversationMessageRequest request)
}
}
+ @Override
+ public GenieSpace createSpace(GenieCreateSpaceRequest request) {
+ String path = "/api/2.0/genie/spaces";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, GenieSpace.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public void deleteConversation(GenieDeleteConversationRequest request) {
String path =
@@ -292,4 +307,19 @@ public void trashSpace(GenieTrashSpaceRequest request) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
+
+ @Override
+ public GenieSpace updateSpace(GenieUpdateSpaceRequest request) {
+ String path = String.format("/api/2.0/genie/spaces/%s", request.getSpaceId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, GenieSpace.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
index d12aa918c..b11a502df 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
@@ -22,6 +22,9 @@ public interface GenieService {
GenieMessage createMessage(
GenieCreateConversationMessageRequest genieCreateConversationMessageRequest);
+ /** Creates a Genie space from a serialized payload. */
+ GenieSpace createSpace(GenieCreateSpaceRequest genieCreateSpaceRequest);
+
/** Delete a conversation. */
void deleteConversation(GenieDeleteConversationRequest genieDeleteConversationRequest);
@@ -90,4 +93,7 @@ GenieStartConversationResponse startConversation(
/** Move a Genie Space to the trash. */
void trashSpace(GenieTrashSpaceRequest genieTrashSpaceRequest);
+
+ /** Updates a Genie space with a serialized payload. */
+ GenieSpace updateSpace(GenieUpdateSpaceRequest genieUpdateSpaceRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java
index 73caf1963..ba43cf50a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSpace.java
@@ -13,6 +13,15 @@ public class GenieSpace {
@JsonProperty("description")
private String description;
+ /**
+ * The contents of the Genie Space in serialized string form. This field is excluded in List Genie
+ * spaces responses. Use the [Get Genie Space](:method:genie/getspace) API to retrieve an example
+ * response, which includes the `serialized_space` field. This field provides the structure of the
+ * JSON string that represents the space's layout and components.
+ */
+ @JsonProperty("serialized_space")
+ private String serializedSpace;
+
/** Genie space ID */
@JsonProperty("space_id")
private String spaceId;
@@ -34,6 +43,15 @@ public String getDescription() {
return description;
}
+ public GenieSpace setSerializedSpace(String serializedSpace) {
+ this.serializedSpace = serializedSpace;
+ return this;
+ }
+
+ public String getSerializedSpace() {
+ return serializedSpace;
+ }
+
public GenieSpace setSpaceId(String spaceId) {
this.spaceId = spaceId;
return this;
@@ -67,6 +85,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
GenieSpace that = (GenieSpace) o;
return Objects.equals(description, that.description)
+ && Objects.equals(serializedSpace, that.serializedSpace)
&& Objects.equals(spaceId, that.spaceId)
&& Objects.equals(title, that.title)
&& Objects.equals(warehouseId, that.warehouseId);
@@ -74,13 +93,14 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(description, spaceId, title, warehouseId);
+ return Objects.hash(description, serializedSpace, spaceId, title, warehouseId);
}
@Override
public String toString() {
return new ToStringer(GenieSpace.class)
.add("description", description)
+ .add("serializedSpace", serializedSpace)
.add("spaceId", spaceId)
.add("title", title)
.add("warehouseId", warehouseId)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java
new file mode 100755
index 000000000..938ac63ba
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieUpdateSpaceRequest.java
@@ -0,0 +1,109 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GenieUpdateSpaceRequest {
+ /** Optional description */
+ @JsonProperty("description")
+ private String description;
+
+ /**
+ * The contents of the Genie Space in serialized string form (full replacement). Use the [Get
+ * Genie Space](:method:genie/getspace) API to retrieve an example response, which includes the
+ * `serialized_space` field. This field provides the structure of the JSON string that represents
+ * the space's layout and components.
+ */
+ @JsonProperty("serialized_space")
+ private String serializedSpace;
+
+ /** Genie space ID */
+ @JsonIgnore private String spaceId;
+
+ /** Optional title override */
+ @JsonProperty("title")
+ private String title;
+
+ /** Optional warehouse override */
+ @JsonProperty("warehouse_id")
+ private String warehouseId;
+
+ public GenieUpdateSpaceRequest setDescription(String description) {
+ this.description = description;
+ return this;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public GenieUpdateSpaceRequest setSerializedSpace(String serializedSpace) {
+ this.serializedSpace = serializedSpace;
+ return this;
+ }
+
+ public String getSerializedSpace() {
+ return serializedSpace;
+ }
+
+ public GenieUpdateSpaceRequest setSpaceId(String spaceId) {
+ this.spaceId = spaceId;
+ return this;
+ }
+
+ public String getSpaceId() {
+ return spaceId;
+ }
+
+ public GenieUpdateSpaceRequest setTitle(String title) {
+ this.title = title;
+ return this;
+ }
+
+ public String getTitle() {
+ return title;
+ }
+
+ public GenieUpdateSpaceRequest setWarehouseId(String warehouseId) {
+ this.warehouseId = warehouseId;
+ return this;
+ }
+
+ public String getWarehouseId() {
+ return warehouseId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieUpdateSpaceRequest that = (GenieUpdateSpaceRequest) o;
+ return Objects.equals(description, that.description)
+ && Objects.equals(serializedSpace, that.serializedSpace)
+ && Objects.equals(spaceId, that.spaceId)
+ && Objects.equals(title, that.title)
+ && Objects.equals(warehouseId, that.warehouseId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(description, serializedSpace, spaceId, title, warehouseId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieUpdateSpaceRequest.class)
+ .add("description", description)
+ .add("serializedSpace", serializedSpace)
+ .add("spaceId", spaceId)
+ .add("title", title)
+ .add("warehouseId", warehouseId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java
index 75e28eb70..699e88ae0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java
@@ -36,7 +36,6 @@ public enum MessageErrorType {
INTERNAL_CATALOG_PATH_OVERLAP_EXCEPTION,
INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION,
INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION,
- INVALID_CHAT_COMPLETION_ARGUMENTS_JSON_EXCEPTION,
INVALID_CHAT_COMPLETION_JSON_EXCEPTION,
INVALID_COMPLETION_REQUEST_EXCEPTION,
INVALID_FUNCTION_CALL_EXCEPTION,
@@ -64,6 +63,7 @@ public enum MessageErrorType {
TOO_MANY_TABLES_EXCEPTION,
UNEXPECTED_REPLY_PROCESS_EXCEPTION,
UNKNOWN_AI_MODEL,
+ UNSUPPORTED_CONVERSATION_TYPE_EXCEPTION,
WAREHOUSE_ACCESS_MISSING_EXCEPTION,
WAREHOUSE_NOT_FOUND_EXCEPTION,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java
index d1401e1d7..f0ca381eb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java
@@ -17,6 +17,10 @@ public class TextAttachment {
@JsonProperty("id")
private String id;
+ /** Purpose/intent of this text attachment */
+ @JsonProperty("purpose")
+ private TextAttachmentPurpose purpose;
+
public TextAttachment setContent(String content) {
this.content = content;
return this;
@@ -35,21 +39,36 @@ public String getId() {
return id;
}
+ public TextAttachment setPurpose(TextAttachmentPurpose purpose) {
+ this.purpose = purpose;
+ return this;
+ }
+
+ public TextAttachmentPurpose getPurpose() {
+ return purpose;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TextAttachment that = (TextAttachment) o;
- return Objects.equals(content, that.content) && Objects.equals(id, that.id);
+ return Objects.equals(content, that.content)
+ && Objects.equals(id, that.id)
+ && Objects.equals(purpose, that.purpose);
}
@Override
public int hashCode() {
- return Objects.hash(content, id);
+ return Objects.hash(content, id, purpose);
}
@Override
public String toString() {
- return new ToStringer(TextAttachment.class).add("content", content).add("id", id).toString();
+ return new ToStringer(TextAttachment.class)
+ .add("content", content)
+ .add("id", id)
+ .add("purpose", purpose)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachmentPurpose.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachmentPurpose.java
new file mode 100755
index 000000000..b372231ef
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachmentPurpose.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+
+/** Purpose/intent of a text attachment */
+@Generated
+public enum TextAttachmentPurpose {
+ FOLLOW_UP_QUESTION,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java
index f3a3befad..c60a9ec9f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java
@@ -13,6 +13,10 @@
*/
@Generated
public class NewPipelineSpec {
+ /** Budget policy to set on the newly created pipeline. */
+ @JsonProperty("budget_policy_id")
+ private String budgetPolicyId;
+
/**
* This field needs to be specified if the destination catalog is a managed postgres catalog.
*
@@ -31,6 +35,15 @@ public class NewPipelineSpec {
@JsonProperty("storage_schema")
private String storageSchema;
+ public NewPipelineSpec setBudgetPolicyId(String budgetPolicyId) {
+ this.budgetPolicyId = budgetPolicyId;
+ return this;
+ }
+
+ public String getBudgetPolicyId() {
+ return budgetPolicyId;
+ }
+
public NewPipelineSpec setStorageCatalog(String storageCatalog) {
this.storageCatalog = storageCatalog;
return this;
@@ -54,18 +67,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
NewPipelineSpec that = (NewPipelineSpec) o;
- return Objects.equals(storageCatalog, that.storageCatalog)
+ return Objects.equals(budgetPolicyId, that.budgetPolicyId)
+ && Objects.equals(storageCatalog, that.storageCatalog)
&& Objects.equals(storageSchema, that.storageSchema);
}
@Override
public int hashCode() {
- return Objects.hash(storageCatalog, storageSchema);
+ return Objects.hash(budgetPolicyId, storageCatalog, storageSchema);
}
@Override
public String toString() {
return new ToStringer(NewPipelineSpec.class)
+ .add("budgetPolicyId", budgetPolicyId)
.add("storageCatalog", storageCatalog)
.add("storageSchema", storageSchema)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsV2API.java
index 919600f29..570dce648 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsV2API.java
@@ -58,8 +58,9 @@ public AccountGroup get(GetAccountGroupRequest request) {
/**
* Gets all details of the groups associated with the Databricks account. As of 08/22/2025, this
- * endpoint will not return members. Instead, members should be retrieved by iterating through
- * `Get group details`.
+ * endpoint will no longer return members. Instead, members should be retrieved by iterating
+ * through `Get group details`. Existing accounts that rely on this attribute will not be impacted
+ * and will continue receiving member data as before.
*/
public Iterable list(ListAccountGroupsRequest request) {
request.setStartIndex(1L);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsV2Service.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsV2Service.java
index b11eb8bce..ddb34d517 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsV2Service.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsV2Service.java
@@ -30,8 +30,9 @@ public interface AccountGroupsV2Service {
/**
* Gets all details of the groups associated with the Databricks account. As of 08/22/2025, this
- * endpoint will not return members. Instead, members should be retrieved by iterating through
- * `Get group details`.
+ * endpoint will no longer return members. Instead, members should be retrieved by iterating
+ * through `Get group details`. Existing accounts that rely on this attribute will not be impacted
+ * and will continue receiving member data as before.
*/
ListAccountGroupsResponse list(ListAccountGroupsRequest listAccountGroupsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
index c38a239bd..d02f5af0e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
@@ -58,10 +58,10 @@ public class CreateJob {
/**
* A list of task execution environment specifications that can be referenced by serverless tasks
- * of this job. An environment is required to be present for serverless tasks. For serverless
- * notebook tasks, the environment is accessible in the notebook environment panel. For other
- * serverless tasks, the task environment is required to be specified using environment_key in the
- * task settings.
+ * of this job. For serverless notebook tasks, if the environment_key is not specified, the
+ * notebook environment will be used if present. If a jobs environment is specified, it will
+ * override the notebook environment. For other serverless tasks, the task environment is required
+ * to be specified using environment_key in the task settings.
*/
@JsonProperty("environments")
private Collection environments;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
index d0d2ad7ee..77564da22 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
@@ -54,10 +54,10 @@ public class JobSettings {
/**
* A list of task execution environment specifications that can be referenced by serverless tasks
- * of this job. An environment is required to be present for serverless tasks. For serverless
- * notebook tasks, the environment is accessible in the notebook environment panel. For other
- * serverless tasks, the task environment is required to be specified using environment_key in the
- * task settings.
+ * of this job. For serverless notebook tasks, if the environment_key is not specified, the
+ * notebook environment will be used if present. If a jobs environment is specified, it will
+ * override the notebook environment. For other serverless tasks, the task environment is required
+ * to be specified using environment_key in the task settings.
*/
@JsonProperty("environments")
private Collection environments;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java
index 30b5a5c6a..6f842fde6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java
@@ -21,7 +21,7 @@
* multi-task workflow with complex dependencies. Databricks manages the task orchestration, cluster
* management, monitoring, and error reporting for all of your jobs. You can run your jobs
* immediately or periodically through an easy-to-use scheduling system. You can implement job tasks
- * using notebooks, JARS, Delta Live Tables pipelines, or Python, Scala, Spark submit, and Java
+ * using notebooks, JARS, Spark Declarative Pipelines, or Python, Scala, Spark submit, and Java
* applications.
*
*
You should never hard code secrets or store them in plain text. Use the [Secrets CLI] to
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java
index 951b35a5c..58e6f635b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsService.java
@@ -11,7 +11,7 @@
* multi-task workflow with complex dependencies. Databricks manages the task orchestration, cluster
* management, monitoring, and error reporting for all of your jobs. You can run your jobs
* immediately or periodically through an easy-to-use scheduling system. You can implement job tasks
- * using notebooks, JARS, Delta Live Tables pipelines, or Python, Scala, Spark submit, and Java
+ * using notebooks, JARS, Spark Declarative Pipelines, or Python, Scala, Spark submit, and Java
* applications.
*
*
You should never hard code secrets or store them in plain text. Use the [Secrets CLI] to
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfiguration.java
new file mode 100755
index 000000000..36452e42e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfiguration.java
@@ -0,0 +1,125 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ModelTriggerConfiguration {
+ /**
+ * Aliases of the model versions to monitor. Can only be used in conjunction with condition
+ * MODEL_ALIAS_SET.
+ */
+ @JsonProperty("aliases")
+ private Collection aliases;
+
+ /** The condition based on which to trigger a job run. */
+ @JsonProperty("condition")
+ private ModelTriggerConfigurationCondition condition;
+
+ /**
+ * If set, the trigger starts a run only after the specified amount of time has passed since the
+ * last time the trigger fired. The minimum allowed value is 60 seconds.
+ */
+ @JsonProperty("min_time_between_triggers_seconds")
+ private Long minTimeBetweenTriggersSeconds;
+
+ /**
+ * Name of the securable to monitor ("mycatalog.myschema.mymodel" in the case of model-level
+ * triggers, "mycatalog.myschema" in the case of schema-level triggers) or empty in the case of
+ * metastore-level triggers.
+ */
+ @JsonProperty("securable_name")
+ private String securableName;
+
+ /**
+ * If set, the trigger starts a run only after no model updates have occurred for the specified
+ * time and can be used to wait for a series of model updates before triggering a run. The minimum
+ * allowed value is 60 seconds.
+ */
+ @JsonProperty("wait_after_last_change_seconds")
+ private Long waitAfterLastChangeSeconds;
+
+ public ModelTriggerConfiguration setAliases(Collection aliases) {
+ this.aliases = aliases;
+ return this;
+ }
+
+ public Collection getAliases() {
+ return aliases;
+ }
+
+ public ModelTriggerConfiguration setCondition(ModelTriggerConfigurationCondition condition) {
+ this.condition = condition;
+ return this;
+ }
+
+ public ModelTriggerConfigurationCondition getCondition() {
+ return condition;
+ }
+
+ public ModelTriggerConfiguration setMinTimeBetweenTriggersSeconds(
+ Long minTimeBetweenTriggersSeconds) {
+ this.minTimeBetweenTriggersSeconds = minTimeBetweenTriggersSeconds;
+ return this;
+ }
+
+ public Long getMinTimeBetweenTriggersSeconds() {
+ return minTimeBetweenTriggersSeconds;
+ }
+
+ public ModelTriggerConfiguration setSecurableName(String securableName) {
+ this.securableName = securableName;
+ return this;
+ }
+
+ public String getSecurableName() {
+ return securableName;
+ }
+
+ public ModelTriggerConfiguration setWaitAfterLastChangeSeconds(Long waitAfterLastChangeSeconds) {
+ this.waitAfterLastChangeSeconds = waitAfterLastChangeSeconds;
+ return this;
+ }
+
+ public Long getWaitAfterLastChangeSeconds() {
+ return waitAfterLastChangeSeconds;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ModelTriggerConfiguration that = (ModelTriggerConfiguration) o;
+ return Objects.equals(aliases, that.aliases)
+ && Objects.equals(condition, that.condition)
+ && Objects.equals(minTimeBetweenTriggersSeconds, that.minTimeBetweenTriggersSeconds)
+ && Objects.equals(securableName, that.securableName)
+ && Objects.equals(waitAfterLastChangeSeconds, that.waitAfterLastChangeSeconds);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ aliases,
+ condition,
+ minTimeBetweenTriggersSeconds,
+ securableName,
+ waitAfterLastChangeSeconds);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ModelTriggerConfiguration.class)
+ .add("aliases", aliases)
+ .add("condition", condition)
+ .add("minTimeBetweenTriggersSeconds", minTimeBetweenTriggersSeconds)
+ .add("securableName", securableName)
+ .add("waitAfterLastChangeSeconds", waitAfterLastChangeSeconds)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfigurationCondition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfigurationCondition.java
new file mode 100755
index 000000000..263b649fa
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfigurationCondition.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum ModelTriggerConfigurationCondition {
+ MODEL_ALIAS_SET,
+ MODEL_CREATED,
+ MODEL_VERSION_READY,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java
index 3753c0c6c..c77cfda4c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java
@@ -13,6 +13,10 @@ public class TriggerSettings {
@JsonProperty("file_arrival")
private FileArrivalTriggerConfiguration fileArrival;
+ /** */
+ @JsonProperty("model")
+ private ModelTriggerConfiguration model;
+
/** Whether this trigger is paused or not. */
@JsonProperty("pause_status")
private PauseStatus pauseStatus;
@@ -34,6 +38,15 @@ public FileArrivalTriggerConfiguration getFileArrival() {
return fileArrival;
}
+ public TriggerSettings setModel(ModelTriggerConfiguration model) {
+ this.model = model;
+ return this;
+ }
+
+ public ModelTriggerConfiguration getModel() {
+ return model;
+ }
+
public TriggerSettings setPauseStatus(PauseStatus pauseStatus) {
this.pauseStatus = pauseStatus;
return this;
@@ -67,6 +80,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
TriggerSettings that = (TriggerSettings) o;
return Objects.equals(fileArrival, that.fileArrival)
+ && Objects.equals(model, that.model)
&& Objects.equals(pauseStatus, that.pauseStatus)
&& Objects.equals(periodic, that.periodic)
&& Objects.equals(tableUpdate, that.tableUpdate);
@@ -74,13 +88,14 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(fileArrival, pauseStatus, periodic, tableUpdate);
+ return Objects.hash(fileArrival, model, pauseStatus, periodic, tableUpdate);
}
@Override
public String toString() {
return new ToStringer(TriggerSettings.class)
.add("fileArrival", fileArrival)
+ .add("model", model)
.add("pauseStatus", pauseStatus)
.add("periodic", periodic)
.add("tableUpdate", tableUpdate)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java
index 11e2e383e..202ae79fb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java
@@ -57,7 +57,10 @@ public class PersonalizationRequest {
@JsonProperty("recipient_type")
private DeltaSharingRecipientType recipientType;
- /** */
+ /**
+ * Share information is required for data listings but should be empty/ignored for non-data
+ * listings (MCP and App).
+ */
@JsonProperty("share")
private ShareInfo share;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsImpl.java
index b9c15bbc0..ee50a32fd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsImpl.java
@@ -6,7 +6,6 @@
import com.databricks.sdk.core.http.Request;
import com.databricks.sdk.support.Generated;
import java.io.IOException;
-import java.util.UUID;
/** Package-local implementation of ProviderPersonalizationRequests */
@Generated
@@ -41,9 +40,6 @@ public UpdatePersonalizationRequestResponse update(UpdatePersonalizationRequestR
try {
Request req = new Request("PUT", path, apiClient.serialize(request));
- if (request.getRequestId() == null || request.getRequestId().isEmpty()) {
- request.setRequestId(UUID.randomUUID().toString());
- }
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/AuthConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/AuthConfig.java
new file mode 100755
index 000000000..eb4fec399
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/AuthConfig.java
@@ -0,0 +1,47 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AuthConfig {
+ /**
+ * Name of the Unity Catalog service credential. This value will be set under the option
+ * databricks.serviceCredential
+ */
+ @JsonProperty("uc_service_credential_name")
+ private String ucServiceCredentialName;
+
+ public AuthConfig setUcServiceCredentialName(String ucServiceCredentialName) {
+ this.ucServiceCredentialName = ucServiceCredentialName;
+ return this;
+ }
+
+ public String getUcServiceCredentialName() {
+ return ucServiceCredentialName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AuthConfig that = (AuthConfig) o;
+ return Objects.equals(ucServiceCredentialName, that.ucServiceCredentialName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(ucServiceCredentialName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AuthConfig.class)
+ .add("ucServiceCredentialName", ucServiceCredentialName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/BatchCreateMaterializedFeaturesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/BatchCreateMaterializedFeaturesRequest.java
new file mode 100755
index 000000000..b2dbe55d5
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/BatchCreateMaterializedFeaturesRequest.java
@@ -0,0 +1,46 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class BatchCreateMaterializedFeaturesRequest {
+ /** The requests to create materialized features. */
+ @JsonProperty("requests")
+ private Collection requests;
+
+ public BatchCreateMaterializedFeaturesRequest setRequests(
+ Collection requests) {
+ this.requests = requests;
+ return this;
+ }
+
+ public Collection getRequests() {
+ return requests;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ BatchCreateMaterializedFeaturesRequest that = (BatchCreateMaterializedFeaturesRequest) o;
+ return Objects.equals(requests, that.requests);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(requests);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(BatchCreateMaterializedFeaturesRequest.class)
+ .add("requests", requests)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/BatchCreateMaterializedFeaturesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/BatchCreateMaterializedFeaturesResponse.java
new file mode 100755
index 000000000..f9f51ae3c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/BatchCreateMaterializedFeaturesResponse.java
@@ -0,0 +1,46 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class BatchCreateMaterializedFeaturesResponse {
+ /** The created materialized features with assigned IDs. */
+ @JsonProperty("materialized_features")
+ private Collection materializedFeatures;
+
+ public BatchCreateMaterializedFeaturesResponse setMaterializedFeatures(
+ Collection materializedFeatures) {
+ this.materializedFeatures = materializedFeatures;
+ return this;
+ }
+
+ public Collection getMaterializedFeatures() {
+ return materializedFeatures;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ BatchCreateMaterializedFeaturesResponse that = (BatchCreateMaterializedFeaturesResponse) o;
+ return Objects.equals(materializedFeatures, that.materializedFeatures);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(materializedFeatures);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(BatchCreateMaterializedFeaturesResponse.class)
+ .add("materializedFeatures", materializedFeatures)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ColumnIdentifier.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ColumnIdentifier.java
new file mode 100755
index 000000000..d5b5558dd
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ColumnIdentifier.java
@@ -0,0 +1,48 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class ColumnIdentifier {
+ /**
+ * String representation of the column name or variant expression path. For nested fields, the
+ * leaf value is what will be present in materialized tables and expected to match at query time.
+ * For example, the leaf node of value:trip_details.location_details.pickup_zip is pickup_zip.
+ */
+ @JsonProperty("variant_expr_path")
+ private String variantExprPath;
+
+ public ColumnIdentifier setVariantExprPath(String variantExprPath) {
+ this.variantExprPath = variantExprPath;
+ return this;
+ }
+
+ public String getVariantExprPath() {
+ return variantExprPath;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ColumnIdentifier that = (ColumnIdentifier) o;
+ return Objects.equals(variantExprPath, that.variantExprPath);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(variantExprPath);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ColumnIdentifier.class)
+ .add("variantExprPath", variantExprPath)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateKafkaConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateKafkaConfigRequest.java
new file mode 100755
index 000000000..1cc444f91
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateKafkaConfigRequest.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateKafkaConfigRequest {
+ /** */
+ @JsonProperty("kafka_config")
+ private KafkaConfig kafkaConfig;
+
+ public CreateKafkaConfigRequest setKafkaConfig(KafkaConfig kafkaConfig) {
+ this.kafkaConfig = kafkaConfig;
+ return this;
+ }
+
+ public KafkaConfig getKafkaConfig() {
+ return kafkaConfig;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateKafkaConfigRequest that = (CreateKafkaConfigRequest) o;
+ return Objects.equals(kafkaConfig, that.kafkaConfig);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(kafkaConfig);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateKafkaConfigRequest.class)
+ .add("kafkaConfig", kafkaConfig)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DataSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DataSource.java
index 389d2547b..722123889 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DataSource.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DataSource.java
@@ -13,6 +13,10 @@ public class DataSource {
@JsonProperty("delta_table_source")
private DeltaTableSource deltaTableSource;
+ /** */
+ @JsonProperty("kafka_source")
+ private KafkaSource kafkaSource;
+
public DataSource setDeltaTableSource(DeltaTableSource deltaTableSource) {
this.deltaTableSource = deltaTableSource;
return this;
@@ -22,21 +26,34 @@ public DeltaTableSource getDeltaTableSource() {
return deltaTableSource;
}
+ public DataSource setKafkaSource(KafkaSource kafkaSource) {
+ this.kafkaSource = kafkaSource;
+ return this;
+ }
+
+ public KafkaSource getKafkaSource() {
+ return kafkaSource;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DataSource that = (DataSource) o;
- return Objects.equals(deltaTableSource, that.deltaTableSource);
+ return Objects.equals(deltaTableSource, that.deltaTableSource)
+ && Objects.equals(kafkaSource, that.kafkaSource);
}
@Override
public int hashCode() {
- return Objects.hash(deltaTableSource);
+ return Objects.hash(deltaTableSource, kafkaSource);
}
@Override
public String toString() {
- return new ToStringer(DataSource.class).add("deltaTableSource", deltaTableSource).toString();
+ return new ToStringer(DataSource.class)
+ .add("deltaTableSource", deltaTableSource)
+ .add("kafkaSource", kafkaSource)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteKafkaConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteKafkaConfigRequest.java
new file mode 100755
index 000000000..f07b63b43
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteKafkaConfigRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteKafkaConfigRequest {
+ /** Name of the Kafka config to delete. */
+ @JsonIgnore private String name;
+
+ public DeleteKafkaConfigRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteKafkaConfigRequest that = (DeleteKafkaConfigRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteKafkaConfigRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteOnlineTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteOnlineTableRequest.java
new file mode 100755
index 000000000..4a956fcbf
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteOnlineTableRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteOnlineTableRequest {
+ /** The full three-part (catalog, schema, table) name of the online table. */
+ @JsonIgnore private String onlineTableName;
+
+ public DeleteOnlineTableRequest setOnlineTableName(String onlineTableName) {
+ this.onlineTableName = onlineTableName;
+ return this;
+ }
+
+ public String getOnlineTableName() {
+ return onlineTableName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteOnlineTableRequest that = (DeleteOnlineTableRequest) o;
+ return Objects.equals(onlineTableName, that.onlineTableName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(onlineTableName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteOnlineTableRequest.class)
+ .add("onlineTableName", onlineTableName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java
index a95df6e9e..e93ad03a4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java
@@ -30,6 +30,16 @@ public class Feature {
@JsonProperty("inputs")
private Collection inputs;
+ /**
+ * WARNING: This field is primarily intended for internal use by Databricks systems and is
+ * automatically populated when features are created through Databricks notebooks or jobs. Users
+ * should not manually set this field as incorrect values may lead to inaccurate lineage tracking
+ * or unexpected behavior. This field will be set by feature-engineering client and should be left
+ * unset by SDK and terraform users.
+ */
+ @JsonProperty("lineage_context")
+ private LineageContext lineageContext;
+
/** The data source of the feature. */
@JsonProperty("source")
private DataSource source;
@@ -83,6 +93,15 @@ public Collection getInputs() {
return inputs;
}
+ public Feature setLineageContext(LineageContext lineageContext) {
+ this.lineageContext = lineageContext;
+ return this;
+ }
+
+ public LineageContext getLineageContext() {
+ return lineageContext;
+ }
+
public Feature setSource(DataSource source) {
this.source = source;
return this;
@@ -111,6 +130,7 @@ public boolean equals(Object o) {
&& Objects.equals(fullName, that.fullName)
&& Objects.equals(function, that.function)
&& Objects.equals(inputs, that.inputs)
+ && Objects.equals(lineageContext, that.lineageContext)
&& Objects.equals(source, that.source)
&& Objects.equals(timeWindow, that.timeWindow);
}
@@ -118,7 +138,14 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
- description, filterCondition, fullName, function, inputs, source, timeWindow);
+ description,
+ filterCondition,
+ fullName,
+ function,
+ inputs,
+ lineageContext,
+ source,
+ timeWindow);
}
@Override
@@ -129,6 +156,7 @@ public String toString() {
.add("fullName", fullName)
.add("function", function)
.add("inputs", inputs)
+ .add("lineageContext", lineageContext)
.add("source", source)
.add("timeWindow", timeWindow)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringAPI.java
index 87f248c97..14ebda0b2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringAPI.java
@@ -24,11 +24,22 @@ public FeatureEngineeringAPI(FeatureEngineeringService mock) {
impl = mock;
}
+ /** Batch create materialized features. */
+ public BatchCreateMaterializedFeaturesResponse batchCreateMaterializedFeatures(
+ BatchCreateMaterializedFeaturesRequest request) {
+ return impl.batchCreateMaterializedFeatures(request);
+ }
+
/** Create a Feature. */
public Feature createFeature(CreateFeatureRequest request) {
return impl.createFeature(request);
}
+ /** Create a Kafka config. */
+ public KafkaConfig createKafkaConfig(CreateKafkaConfigRequest request) {
+ return impl.createKafkaConfig(request);
+ }
+
/** Create a materialized feature. */
public MaterializedFeature createMaterializedFeature(CreateMaterializedFeatureRequest request) {
return impl.createMaterializedFeature(request);
@@ -43,6 +54,15 @@ public void deleteFeature(DeleteFeatureRequest request) {
impl.deleteFeature(request);
}
+ public void deleteKafkaConfig(String name) {
+ deleteKafkaConfig(new DeleteKafkaConfigRequest().setName(name));
+ }
+
+ /** Delete a Kafka config. */
+ public void deleteKafkaConfig(DeleteKafkaConfigRequest request) {
+ impl.deleteKafkaConfig(request);
+ }
+
public void deleteMaterializedFeature(String materializedFeatureId) {
deleteMaterializedFeature(
new DeleteMaterializedFeatureRequest().setMaterializedFeatureId(materializedFeatureId));
@@ -62,6 +82,15 @@ public Feature getFeature(GetFeatureRequest request) {
return impl.getFeature(request);
}
+ public KafkaConfig getKafkaConfig(String name) {
+ return getKafkaConfig(new GetKafkaConfigRequest().setName(name));
+ }
+
+ /** Get a Kafka config. */
+ public KafkaConfig getKafkaConfig(GetKafkaConfigRequest request) {
+ return impl.getKafkaConfig(request);
+ }
+
public MaterializedFeature getMaterializedFeature(String materializedFeatureId) {
return getMaterializedFeature(
new GetMaterializedFeatureRequest().setMaterializedFeatureId(materializedFeatureId));
@@ -87,6 +116,21 @@ public Iterable listFeatures(ListFeaturesRequest request) {
});
}
+ /** List Kafka configs. */
+ public Iterable listKafkaConfigs(ListKafkaConfigsRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listKafkaConfigs,
+ ListKafkaConfigsResponse::getKafkaConfigs,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
/** List materialized features. */
public Iterable listMaterializedFeatures(
ListMaterializedFeaturesRequest request) {
@@ -108,6 +152,11 @@ public Feature updateFeature(UpdateFeatureRequest request) {
return impl.updateFeature(request);
}
+ /** Update a Kafka config. */
+ public KafkaConfig updateKafkaConfig(UpdateKafkaConfigRequest request) {
+ return impl.updateKafkaConfig(request);
+ }
+
/** Update a materialized feature (pause/resume). */
public MaterializedFeature updateMaterializedFeature(UpdateMaterializedFeatureRequest request) {
return impl.updateMaterializedFeature(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringImpl.java
index 227950793..4b26f4703 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringImpl.java
@@ -16,6 +16,22 @@ public FeatureEngineeringImpl(ApiClient apiClient) {
this.apiClient = apiClient;
}
+ @Override
+ public BatchCreateMaterializedFeaturesResponse batchCreateMaterializedFeatures(
+ BatchCreateMaterializedFeaturesRequest request) {
+ String path = "/api/2.0/feature-engineering/materialized-features:batchCreate";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, BatchCreateMaterializedFeaturesResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public Feature createFeature(CreateFeatureRequest request) {
String path = "/api/2.0/feature-engineering/features";
@@ -31,6 +47,21 @@ public Feature createFeature(CreateFeatureRequest request) {
}
}
+ @Override
+ public KafkaConfig createKafkaConfig(CreateKafkaConfigRequest request) {
+ String path = "/api/2.0/feature-engineering/features/kafka-configs";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getKafkaConfig()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, KafkaConfig.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public MaterializedFeature createMaterializedFeature(CreateMaterializedFeatureRequest request) {
String path = "/api/2.0/feature-engineering/materialized-features";
@@ -61,6 +92,22 @@ public void deleteFeature(DeleteFeatureRequest request) {
}
}
+ @Override
+ public void deleteKafkaConfig(DeleteKafkaConfigRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/feature-engineering/features/kafka-configs/kafka/%s", request.getName());
+ try {
+ Request req = new Request("DELETE", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public void deleteMaterializedFeature(DeleteMaterializedFeatureRequest request) {
String path =
@@ -92,6 +139,21 @@ public Feature getFeature(GetFeatureRequest request) {
}
}
+ @Override
+ public KafkaConfig getKafkaConfig(GetKafkaConfigRequest request) {
+ String path =
+ String.format("/api/2.0/feature-engineering/features/kafka-configs/%s", request.getName());
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, KafkaConfig.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public MaterializedFeature getMaterializedFeature(GetMaterializedFeatureRequest request) {
String path =
@@ -123,6 +185,20 @@ public ListFeaturesResponse listFeatures(ListFeaturesRequest request) {
}
}
+ @Override
+ public ListKafkaConfigsResponse listKafkaConfigs(ListKafkaConfigsRequest request) {
+ String path = "/api/2.0/feature-engineering/features/kafka-configs";
+ try {
+ Request req = new Request("GET", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListKafkaConfigsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public ListMaterializedFeaturesResponse listMaterializedFeatures(
ListMaterializedFeaturesRequest request) {
@@ -153,6 +229,22 @@ public Feature updateFeature(UpdateFeatureRequest request) {
}
}
+ @Override
+ public KafkaConfig updateKafkaConfig(UpdateKafkaConfigRequest request) {
+ String path =
+ String.format("/api/2.0/feature-engineering/features/kafka-configs/%s", request.getName());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getKafkaConfig()));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, KafkaConfig.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public MaterializedFeature updateMaterializedFeature(UpdateMaterializedFeatureRequest request) {
String path =
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringService.java
index 4d99afef5..5a6d11387 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringService.java
@@ -12,9 +12,16 @@
*/
@Generated
public interface FeatureEngineeringService {
+ /** Batch create materialized features. */
+ BatchCreateMaterializedFeaturesResponse batchCreateMaterializedFeatures(
+ BatchCreateMaterializedFeaturesRequest batchCreateMaterializedFeaturesRequest);
+
/** Create a Feature. */
Feature createFeature(CreateFeatureRequest createFeatureRequest);
+ /** Create a Kafka config. */
+ KafkaConfig createKafkaConfig(CreateKafkaConfigRequest createKafkaConfigRequest);
+
/** Create a materialized feature. */
MaterializedFeature createMaterializedFeature(
CreateMaterializedFeatureRequest createMaterializedFeatureRequest);
@@ -22,12 +29,18 @@ MaterializedFeature createMaterializedFeature(
/** Delete a Feature. */
void deleteFeature(DeleteFeatureRequest deleteFeatureRequest);
+ /** Delete a Kafka config. */
+ void deleteKafkaConfig(DeleteKafkaConfigRequest deleteKafkaConfigRequest);
+
/** Delete a materialized feature. */
void deleteMaterializedFeature(DeleteMaterializedFeatureRequest deleteMaterializedFeatureRequest);
/** Get a Feature. */
Feature getFeature(GetFeatureRequest getFeatureRequest);
+ /** Get a Kafka config. */
+ KafkaConfig getKafkaConfig(GetKafkaConfigRequest getKafkaConfigRequest);
+
/** Get a materialized feature. */
MaterializedFeature getMaterializedFeature(
GetMaterializedFeatureRequest getMaterializedFeatureRequest);
@@ -35,6 +48,9 @@ MaterializedFeature getMaterializedFeature(
/** List Features. */
ListFeaturesResponse listFeatures(ListFeaturesRequest listFeaturesRequest);
+ /** List Kafka configs. */
+ ListKafkaConfigsResponse listKafkaConfigs(ListKafkaConfigsRequest listKafkaConfigsRequest);
+
/** List materialized features. */
ListMaterializedFeaturesResponse listMaterializedFeatures(
ListMaterializedFeaturesRequest listMaterializedFeaturesRequest);
@@ -42,6 +58,9 @@ ListMaterializedFeaturesResponse listMaterializedFeatures(
/** Update a Feature. */
Feature updateFeature(UpdateFeatureRequest updateFeatureRequest);
+ /** Update a Kafka config. */
+ KafkaConfig updateKafkaConfig(UpdateKafkaConfigRequest updateKafkaConfigRequest);
+
/** Update a materialized feature (pause/resume). */
MaterializedFeature updateMaterializedFeature(
UpdateMaterializedFeatureRequest updateMaterializedFeatureRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreAPI.java
index e822a137b..62e440d3d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreAPI.java
@@ -45,6 +45,15 @@ public void deleteOnlineStore(DeleteOnlineStoreRequest request) {
impl.deleteOnlineStore(request);
}
+ public void deleteOnlineTable(String onlineTableName) {
+ deleteOnlineTable(new DeleteOnlineTableRequest().setOnlineTableName(onlineTableName));
+ }
+
+ /** Delete online table. */
+ public void deleteOnlineTable(DeleteOnlineTableRequest request) {
+ impl.deleteOnlineTable(request);
+ }
+
public OnlineStore getOnlineStore(String name) {
return getOnlineStore(new GetOnlineStoreRequest().setName(name));
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreImpl.java
index eaf695aa8..f49868d73 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreImpl.java
@@ -45,6 +45,21 @@ public void deleteOnlineStore(DeleteOnlineStoreRequest request) {
}
}
+ @Override
+ public void deleteOnlineTable(DeleteOnlineTableRequest request) {
+ String path =
+ String.format("/api/2.0/feature-store/online-tables/%s", request.getOnlineTableName());
+ try {
+ Request req = new Request("DELETE", path);
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public OnlineStore getOnlineStore(GetOnlineStoreRequest request) {
String path = String.format("/api/2.0/feature-store/online-stores/%s", request.getName());
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreService.java
index 65bbe351a..c9de2f866 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreService.java
@@ -23,6 +23,9 @@ public interface FeatureStoreService {
/** Delete an Online Feature Store. */
void deleteOnlineStore(DeleteOnlineStoreRequest deleteOnlineStoreRequest);
+ /** Delete online table. */
+ void deleteOnlineTable(DeleteOnlineTableRequest deleteOnlineTableRequest);
+
/** Get an Online Feature Store. */
OnlineStore getOnlineStore(GetOnlineStoreRequest getOnlineStoreRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetKafkaConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetKafkaConfigRequest.java
new file mode 100755
index 000000000..1cea0d8d7
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetKafkaConfigRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetKafkaConfigRequest {
+ /** Name of the Kafka config to get. */
+ @JsonIgnore private String name;
+
+ public GetKafkaConfigRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetKafkaConfigRequest that = (GetKafkaConfigRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetKafkaConfigRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobContext.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobContext.java
new file mode 100755
index 000000000..0b986b126
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/JobContext.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class JobContext {
+ /** The job ID where this API invoked. */
+ @JsonProperty("job_id")
+ private Long jobId;
+
+ /** The job run ID where this API was invoked. */
+ @JsonProperty("job_run_id")
+ private Long jobRunId;
+
+ public JobContext setJobId(Long jobId) {
+ this.jobId = jobId;
+ return this;
+ }
+
+ public Long getJobId() {
+ return jobId;
+ }
+
+ public JobContext setJobRunId(Long jobRunId) {
+ this.jobRunId = jobRunId;
+ return this;
+ }
+
+ public Long getJobRunId() {
+ return jobRunId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ JobContext that = (JobContext) o;
+ return Objects.equals(jobId, that.jobId) && Objects.equals(jobRunId, that.jobRunId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobId, jobRunId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(JobContext.class)
+ .add("jobId", jobId)
+ .add("jobRunId", jobRunId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/KafkaConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/KafkaConfig.java
new file mode 100755
index 000000000..4fc314969
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/KafkaConfig.java
@@ -0,0 +1,149 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Map;
+import java.util.Objects;
+
+@Generated
+public class KafkaConfig {
+ /** Authentication configuration for connection to topics. */
+ @JsonProperty("auth_config")
+ private AuthConfig authConfig;
+
+ /** A comma-separated list of host/port pairs pointing to Kafka cluster. */
+ @JsonProperty("bootstrap_servers")
+ private String bootstrapServers;
+
+ /**
+ * Catch-all for miscellaneous options. Keys should be source options or Kafka consumer options
+ * (kafka.*)
+ */
+ @JsonProperty("extra_options")
+ private Map extraOptions;
+
+ /**
+ * Schema configuration for extracting message keys from topics. At least one of key_schema and
+ * value_schema must be provided.
+ */
+ @JsonProperty("key_schema")
+ private SchemaConfig keySchema;
+
+ /**
+ * Name that uniquely identifies this Kafka config within the metastore. This will be the
+ * identifier used from the Feature object to reference these configs for a feature. Can be
+ * distinct from topic name.
+ */
+ @JsonProperty("name")
+ private String name;
+
+ /** Options to configure which Kafka topics to pull data from. */
+ @JsonProperty("subscription_mode")
+ private SubscriptionMode subscriptionMode;
+
+ /**
+ * Schema configuration for extracting message values from topics. At least one of key_schema and
+ * value_schema must be provided.
+ */
+ @JsonProperty("value_schema")
+ private SchemaConfig valueSchema;
+
+ public KafkaConfig setAuthConfig(AuthConfig authConfig) {
+ this.authConfig = authConfig;
+ return this;
+ }
+
+ public AuthConfig getAuthConfig() {
+ return authConfig;
+ }
+
+ public KafkaConfig setBootstrapServers(String bootstrapServers) {
+ this.bootstrapServers = bootstrapServers;
+ return this;
+ }
+
+ public String getBootstrapServers() {
+ return bootstrapServers;
+ }
+
+ public KafkaConfig setExtraOptions(Map extraOptions) {
+ this.extraOptions = extraOptions;
+ return this;
+ }
+
+ public Map getExtraOptions() {
+ return extraOptions;
+ }
+
+ public KafkaConfig setKeySchema(SchemaConfig keySchema) {
+ this.keySchema = keySchema;
+ return this;
+ }
+
+ public SchemaConfig getKeySchema() {
+ return keySchema;
+ }
+
+ public KafkaConfig setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public KafkaConfig setSubscriptionMode(SubscriptionMode subscriptionMode) {
+ this.subscriptionMode = subscriptionMode;
+ return this;
+ }
+
+ public SubscriptionMode getSubscriptionMode() {
+ return subscriptionMode;
+ }
+
+ public KafkaConfig setValueSchema(SchemaConfig valueSchema) {
+ this.valueSchema = valueSchema;
+ return this;
+ }
+
+ public SchemaConfig getValueSchema() {
+ return valueSchema;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ KafkaConfig that = (KafkaConfig) o;
+ return Objects.equals(authConfig, that.authConfig)
+ && Objects.equals(bootstrapServers, that.bootstrapServers)
+ && Objects.equals(extraOptions, that.extraOptions)
+ && Objects.equals(keySchema, that.keySchema)
+ && Objects.equals(name, that.name)
+ && Objects.equals(subscriptionMode, that.subscriptionMode)
+ && Objects.equals(valueSchema, that.valueSchema);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ authConfig, bootstrapServers, extraOptions, keySchema, name, subscriptionMode, valueSchema);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(KafkaConfig.class)
+ .add("authConfig", authConfig)
+ .add("bootstrapServers", bootstrapServers)
+ .add("extraOptions", extraOptions)
+ .add("keySchema", keySchema)
+ .add("name", name)
+ .add("subscriptionMode", subscriptionMode)
+ .add("valueSchema", valueSchema)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/KafkaSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/KafkaSource.java
new file mode 100755
index 000000000..57036af65
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/KafkaSource.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class KafkaSource {
+ /** The entity column identifiers of the Kafka source. */
+ @JsonProperty("entity_column_identifiers")
+ private Collection entityColumnIdentifiers;
+
+ /**
+ * Name of the Kafka source, used to identify it. This is used to look up the corresponding
+ * KafkaConfig object. Can be distinct from topic name.
+ */
+ @JsonProperty("name")
+ private String name;
+
+ /** The timeseries column identifier of the Kafka source. */
+ @JsonProperty("timeseries_column_identifier")
+ private ColumnIdentifier timeseriesColumnIdentifier;
+
+ public KafkaSource setEntityColumnIdentifiers(
+ Collection entityColumnIdentifiers) {
+ this.entityColumnIdentifiers = entityColumnIdentifiers;
+ return this;
+ }
+
+ public Collection getEntityColumnIdentifiers() {
+ return entityColumnIdentifiers;
+ }
+
+ public KafkaSource setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public KafkaSource setTimeseriesColumnIdentifier(ColumnIdentifier timeseriesColumnIdentifier) {
+ this.timeseriesColumnIdentifier = timeseriesColumnIdentifier;
+ return this;
+ }
+
+ public ColumnIdentifier getTimeseriesColumnIdentifier() {
+ return timeseriesColumnIdentifier;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ KafkaSource that = (KafkaSource) o;
+ return Objects.equals(entityColumnIdentifiers, that.entityColumnIdentifiers)
+ && Objects.equals(name, that.name)
+ && Objects.equals(timeseriesColumnIdentifier, that.timeseriesColumnIdentifier);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(entityColumnIdentifiers, name, timeseriesColumnIdentifier);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(KafkaSource.class)
+ .add("entityColumnIdentifiers", entityColumnIdentifiers)
+ .add("name", name)
+ .add("timeseriesColumnIdentifier", timeseriesColumnIdentifier)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LineageContext.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LineageContext.java
new file mode 100755
index 000000000..6472cf0b3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LineageContext.java
@@ -0,0 +1,64 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * Lineage context information for tracking where an API was invoked. This will allow us to track
+ * lineage, which currently uses caller entity information for use across the Lineage Client and
+ * Observability in Lumberjack.
+ */
+@Generated
+public class LineageContext {
+ /** Job context information including job ID and run ID. */
+ @JsonProperty("job_context")
+ private JobContext jobContext;
+
+ /** The notebook ID where this API was invoked. */
+ @JsonProperty("notebook_id")
+ private Long notebookId;
+
+ public LineageContext setJobContext(JobContext jobContext) {
+ this.jobContext = jobContext;
+ return this;
+ }
+
+ public JobContext getJobContext() {
+ return jobContext;
+ }
+
+ public LineageContext setNotebookId(Long notebookId) {
+ this.notebookId = notebookId;
+ return this;
+ }
+
+ public Long getNotebookId() {
+ return notebookId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ LineageContext that = (LineageContext) o;
+ return Objects.equals(jobContext, that.jobContext)
+ && Objects.equals(notebookId, that.notebookId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobContext, notebookId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(LineageContext.class)
+ .add("jobContext", jobContext)
+ .add("notebookId", notebookId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListKafkaConfigsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListKafkaConfigsRequest.java
new file mode 100755
index 000000000..9294af304
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListKafkaConfigsRequest.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListKafkaConfigsRequest {
+ /** The maximum number of results to return. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /** Pagination token to go to the next page based on a previous query. */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListKafkaConfigsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListKafkaConfigsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListKafkaConfigsRequest that = (ListKafkaConfigsRequest) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListKafkaConfigsRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListKafkaConfigsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListKafkaConfigsResponse.java
new file mode 100755
index 000000000..e043b88d9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListKafkaConfigsResponse.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListKafkaConfigsResponse {
+ /** List of Kafka configs. Schemas are not included in the response. */
+ @JsonProperty("kafka_configs")
+ private Collection kafkaConfigs;
+
+ /** Pagination token to request the next page of results for this query. */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public ListKafkaConfigsResponse setKafkaConfigs(Collection kafkaConfigs) {
+ this.kafkaConfigs = kafkaConfigs;
+ return this;
+ }
+
+ public Collection getKafkaConfigs() {
+ return kafkaConfigs;
+ }
+
+ public ListKafkaConfigsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListKafkaConfigsResponse that = (ListKafkaConfigsResponse) o;
+ return Objects.equals(kafkaConfigs, that.kafkaConfigs)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(kafkaConfigs, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListKafkaConfigsResponse.class)
+ .add("kafkaConfigs", kafkaConfigs)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeature.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeature.java
index 598f1df11..848476656 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeature.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeature.java
@@ -31,7 +31,7 @@ public class MaterializedFeature {
/** */
@JsonProperty("online_store_config")
- private OnlineStore onlineStoreConfig;
+ private OnlineStoreConfig onlineStoreConfig;
/** The schedule state of the materialization pipeline. */
@JsonProperty("pipeline_schedule_state")
@@ -80,12 +80,12 @@ public OfflineStoreConfig getOfflineStoreConfig() {
return offlineStoreConfig;
}
- public MaterializedFeature setOnlineStoreConfig(OnlineStore onlineStoreConfig) {
+ public MaterializedFeature setOnlineStoreConfig(OnlineStoreConfig onlineStoreConfig) {
this.onlineStoreConfig = onlineStoreConfig;
return this;
}
- public OnlineStore getOnlineStoreConfig() {
+ public OnlineStoreConfig getOnlineStoreConfig() {
return onlineStoreConfig;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/OnlineStoreConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/OnlineStoreConfig.java
new file mode 100755
index 000000000..c5d47c508
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/OnlineStoreConfig.java
@@ -0,0 +1,95 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Configuration for online store destination. */
+@Generated
+public class OnlineStoreConfig {
+ /**
+ * The Unity Catalog catalog name. This name is also used as the Lakebase logical database name.
+ */
+ @JsonProperty("catalog_name")
+ private String catalogName;
+
+ /** The name of the target online store. */
+ @JsonProperty("online_store_name")
+ private String onlineStoreName;
+
+ /** The Unity Catalog schema name. */
+ @JsonProperty("schema_name")
+ private String schemaName;
+
+ /**
+ * Prefix for Unity Catalog table name. The materialized feature will be stored in a Lakebase
+ * table with this prefix and a generated postfix.
+ */
+ @JsonProperty("table_name_prefix")
+ private String tableNamePrefix;
+
+ public OnlineStoreConfig setCatalogName(String catalogName) {
+ this.catalogName = catalogName;
+ return this;
+ }
+
+ public String getCatalogName() {
+ return catalogName;
+ }
+
+ public OnlineStoreConfig setOnlineStoreName(String onlineStoreName) {
+ this.onlineStoreName = onlineStoreName;
+ return this;
+ }
+
+ public String getOnlineStoreName() {
+ return onlineStoreName;
+ }
+
+ public OnlineStoreConfig setSchemaName(String schemaName) {
+ this.schemaName = schemaName;
+ return this;
+ }
+
+ public String getSchemaName() {
+ return schemaName;
+ }
+
+ public OnlineStoreConfig setTableNamePrefix(String tableNamePrefix) {
+ this.tableNamePrefix = tableNamePrefix;
+ return this;
+ }
+
+ public String getTableNamePrefix() {
+ return tableNamePrefix;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ OnlineStoreConfig that = (OnlineStoreConfig) o;
+ return Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(onlineStoreName, that.onlineStoreName)
+ && Objects.equals(schemaName, that.schemaName)
+ && Objects.equals(tableNamePrefix, that.tableNamePrefix);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(catalogName, onlineStoreName, schemaName, tableNamePrefix);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(OnlineStoreConfig.class)
+ .add("catalogName", catalogName)
+ .add("onlineStoreName", onlineStoreName)
+ .add("schemaName", schemaName)
+ .add("tableNamePrefix", tableNamePrefix)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SchemaConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SchemaConfig.java
new file mode 100755
index 000000000..6ce1dd5f8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SchemaConfig.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class SchemaConfig {
+ /** Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/) */
+ @JsonProperty("json_schema")
+ private String jsonSchema;
+
+ public SchemaConfig setJsonSchema(String jsonSchema) {
+ this.jsonSchema = jsonSchema;
+ return this;
+ }
+
+ public String getJsonSchema() {
+ return jsonSchema;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ SchemaConfig that = (SchemaConfig) o;
+ return Objects.equals(jsonSchema, that.jsonSchema);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jsonSchema);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(SchemaConfig.class).add("jsonSchema", jsonSchema).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SubscriptionMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SubscriptionMode.java
new file mode 100755
index 000000000..042e26b74
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SubscriptionMode.java
@@ -0,0 +1,80 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class SubscriptionMode {
+ /**
+ * A JSON string that contains the specific topic-partitions to consume from. For example, for
+ * '{"topicA":[0,1],"topicB":[2,4]}', topicA's 0'th and 1st partitions will be consumed from.
+ */
+ @JsonProperty("assign")
+ private String assign;
+
+ /** A comma-separated list of Kafka topics to read from. For example, 'topicA,topicB,topicC'. */
+ @JsonProperty("subscribe")
+ private String subscribe;
+
+ /**
+ * A regular expression matching topics to subscribe to. For example, 'topic.*' will subscribe to
+ * all topics starting with 'topic'.
+ */
+ @JsonProperty("subscribe_pattern")
+ private String subscribePattern;
+
+ public SubscriptionMode setAssign(String assign) {
+ this.assign = assign;
+ return this;
+ }
+
+ public String getAssign() {
+ return assign;
+ }
+
+ public SubscriptionMode setSubscribe(String subscribe) {
+ this.subscribe = subscribe;
+ return this;
+ }
+
+ public String getSubscribe() {
+ return subscribe;
+ }
+
+ public SubscriptionMode setSubscribePattern(String subscribePattern) {
+ this.subscribePattern = subscribePattern;
+ return this;
+ }
+
+ public String getSubscribePattern() {
+ return subscribePattern;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ SubscriptionMode that = (SubscriptionMode) o;
+ return Objects.equals(assign, that.assign)
+ && Objects.equals(subscribe, that.subscribe)
+ && Objects.equals(subscribePattern, that.subscribePattern);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(assign, subscribe, subscribePattern);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(SubscriptionMode.class)
+ .add("assign", assign)
+ .add("subscribe", subscribe)
+ .add("subscribePattern", subscribePattern)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateKafkaConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateKafkaConfigRequest.java
new file mode 100755
index 000000000..faa384026
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateKafkaConfigRequest.java
@@ -0,0 +1,81 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.FieldMask;
+import java.util.Objects;
+
+@Generated
+public class UpdateKafkaConfigRequest {
+ /** The Kafka config to update. */
+ @JsonProperty("kafka_config")
+ private KafkaConfig kafkaConfig;
+
+ /**
+ * Name that uniquely identifies this Kafka config within the metastore. This will be the
+ * identifier used from the Feature object to reference these configs for a feature. Can be
+ * distinct from topic name.
+ */
+ @JsonIgnore private String name;
+
+ /** The list of fields to update. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private FieldMask updateMask;
+
+ public UpdateKafkaConfigRequest setKafkaConfig(KafkaConfig kafkaConfig) {
+ this.kafkaConfig = kafkaConfig;
+ return this;
+ }
+
+ public KafkaConfig getKafkaConfig() {
+ return kafkaConfig;
+ }
+
+ public UpdateKafkaConfigRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public UpdateKafkaConfigRequest setUpdateMask(FieldMask updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public FieldMask getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateKafkaConfigRequest that = (UpdateKafkaConfigRequest) o;
+ return Objects.equals(kafkaConfig, that.kafkaConfig)
+ && Objects.equals(name, that.name)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(kafkaConfig, name, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateKafkaConfigRequest.class)
+ .add("kafkaConfig", kafkaConfig)
+ .add("name", name)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java
new file mode 100755
index 000000000..729e23fae
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java
@@ -0,0 +1,48 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class ConnectionParameters {
+ /**
+ * Source catalog for initial connection. This is necessary for schema exploration in some
+ * database systems like Oracle, and optional but nice-to-have in some other database systems like
+ * Postgres. For Oracle databases, this maps to a service name.
+ */
+ @JsonProperty("source_catalog")
+ private String sourceCatalog;
+
+ public ConnectionParameters setSourceCatalog(String sourceCatalog) {
+ this.sourceCatalog = sourceCatalog;
+ return this;
+ }
+
+ public String getSourceCatalog() {
+ return sourceCatalog;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ConnectionParameters that = (ConnectionParameters) o;
+ return Objects.equals(sourceCatalog, that.sourceCatalog);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(sourceCatalog);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ConnectionParameters.class)
+ .add("sourceCatalog", sourceCatalog)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java
index 58142fafd..fa0d33a90 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java
@@ -23,14 +23,18 @@ public class IngestionGatewayPipelineDefinition {
@JsonProperty("connection_name")
private String connectionName;
+ /** Optional, Internal. Parameters required to establish an initial connection with the source. */
+ @JsonProperty("connection_parameters")
+ private ConnectionParameters connectionParameters;
+
/** Required, Immutable. The name of the catalog for the gateway pipeline's storage location. */
@JsonProperty("gateway_storage_catalog")
private String gatewayStorageCatalog;
/**
* Optional. The Unity Catalog-compatible name for the gateway storage location. This is the
- * destination to use for the data that is extracted by the gateway. Delta Live Tables system will
- * automatically create the storage location under the catalog and schema.
+ * destination to use for the data that is extracted by the gateway. Spark Declarative Pipelines
+ * system will automatically create the storage location under the catalog and schema.
*/
@JsonProperty("gateway_storage_name")
private String gatewayStorageName;
@@ -57,6 +61,16 @@ public String getConnectionName() {
return connectionName;
}
+ public IngestionGatewayPipelineDefinition setConnectionParameters(
+ ConnectionParameters connectionParameters) {
+ this.connectionParameters = connectionParameters;
+ return this;
+ }
+
+ public ConnectionParameters getConnectionParameters() {
+ return connectionParameters;
+ }
+
public IngestionGatewayPipelineDefinition setGatewayStorageCatalog(String gatewayStorageCatalog) {
this.gatewayStorageCatalog = gatewayStorageCatalog;
return this;
@@ -91,6 +105,7 @@ public boolean equals(Object o) {
IngestionGatewayPipelineDefinition that = (IngestionGatewayPipelineDefinition) o;
return Objects.equals(connectionId, that.connectionId)
&& Objects.equals(connectionName, that.connectionName)
+ && Objects.equals(connectionParameters, that.connectionParameters)
&& Objects.equals(gatewayStorageCatalog, that.gatewayStorageCatalog)
&& Objects.equals(gatewayStorageName, that.gatewayStorageName)
&& Objects.equals(gatewayStorageSchema, that.gatewayStorageSchema);
@@ -101,6 +116,7 @@ public int hashCode() {
return Objects.hash(
connectionId,
connectionName,
+ connectionParameters,
gatewayStorageCatalog,
gatewayStorageName,
gatewayStorageSchema);
@@ -111,6 +127,7 @@ public String toString() {
return new ToStringer(IngestionGatewayPipelineDefinition.class)
.add("connectionId", connectionId)
.add("connectionName", connectionName)
+ .add("connectionParameters", connectionParameters)
.add("gatewayStorageCatalog", gatewayStorageCatalog)
.add("gatewayStorageName", gatewayStorageName)
.add("gatewayStorageSchema", gatewayStorageSchema)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java
index 6c311c809..1e96af349 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java
@@ -17,6 +17,14 @@ public class IngestionPipelineDefinition {
@JsonProperty("connection_name")
private String connectionName;
+ /**
+ * Immutable. If set to true, the pipeline will ingest tables from the UC foreign catalogs
+ * directly without the need to specify a UC connection or ingestion gateway. The `source_catalog`
+ * fields in objects of IngestionConfig are interpreted as the UC foreign catalogs to ingest from.
+ */
+ @JsonProperty("ingest_from_uc_foreign_catalog")
+ private Boolean ingestFromUcForeignCatalog;
+
/**
* Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate
* with the source database. This is used with connectors to databases like SQL Server.
@@ -65,6 +73,16 @@ public String getConnectionName() {
return connectionName;
}
+ public IngestionPipelineDefinition setIngestFromUcForeignCatalog(
+ Boolean ingestFromUcForeignCatalog) {
+ this.ingestFromUcForeignCatalog = ingestFromUcForeignCatalog;
+ return this;
+ }
+
+ public Boolean getIngestFromUcForeignCatalog() {
+ return ingestFromUcForeignCatalog;
+ }
+
public IngestionPipelineDefinition setIngestionGatewayId(String ingestionGatewayId) {
this.ingestionGatewayId = ingestionGatewayId;
return this;
@@ -126,6 +144,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
IngestionPipelineDefinition that = (IngestionPipelineDefinition) o;
return Objects.equals(connectionName, that.connectionName)
+ && Objects.equals(ingestFromUcForeignCatalog, that.ingestFromUcForeignCatalog)
&& Objects.equals(ingestionGatewayId, that.ingestionGatewayId)
&& Objects.equals(netsuiteJarPath, that.netsuiteJarPath)
&& Objects.equals(objects, that.objects)
@@ -138,6 +157,7 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
connectionName,
+ ingestFromUcForeignCatalog,
ingestionGatewayId,
netsuiteJarPath,
objects,
@@ -150,6 +170,7 @@ public int hashCode() {
public String toString() {
return new ToStringer(IngestionPipelineDefinition.class)
.add("connectionName", connectionName)
+ .add("ingestFromUcForeignCatalog", ingestFromUcForeignCatalog)
.add("ingestionGatewayId", ingestionGatewayId)
.add("netsuiteJarPath", netsuiteJarPath)
.add("objects", objects)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java
index ca1ccf7cb..9e68c3c76 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java
@@ -7,29 +7,18 @@
@Generated
public enum IngestionSourceType {
BIGQUERY,
- CONFLUENCE,
DYNAMICS365,
FOREIGN_CATALOG,
GA4_RAW_DATA,
- GOOGLE_ADS,
- GUIDEWIRE,
- HUBSPOT,
MANAGED_POSTGRESQL,
- META_MARKETING,
MYSQL,
NETSUITE,
ORACLE,
POSTGRESQL,
- REDSHIFT,
SALESFORCE,
- SALESFORCE_MARKETING_CLOUD,
SERVICENOW,
SHAREPOINT,
- SQLDW,
SQLSERVER,
TERADATA,
- TIKTOK_ADS,
- WORKDAY_HCM,
WORKDAY_RAAS,
- ZENDESK,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java
index 0a270aad3..ba84cc1cc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java
@@ -14,19 +14,19 @@
import org.slf4j.LoggerFactory;
/**
- * The Delta Live Tables API allows you to create, edit, delete, start, and view details about
- * pipelines.
+ * The Lakeflow Spark Declarative Pipelines API allows you to create, edit, delete, start, and view
+ * details about pipelines.
*
- *
Delta Live Tables is a framework for building reliable, maintainable, and testable data
- * processing pipelines. You define the transformations to perform on your data, and Delta Live
- * Tables manages task orchestration, cluster management, monitoring, data quality, and error
- * handling.
+ *
Spark Declarative Pipelines is a framework for building reliable, maintainable, and testable
+ * data processing pipelines. You define the transformations to perform on your data, and Spark
+ * Declarative Pipelines manages task orchestration, cluster management, monitoring, data quality,
+ * and error handling.
*
- *
Instead of defining your data pipelines using a series of separate Apache Spark tasks, Delta
- * Live Tables manages how your data is transformed based on a target schema you define for each
- * processing step. You can also enforce data quality with Delta Live Tables expectations.
- * Expectations allow you to define expected data quality and specify how to handle records that
- * fail those expectations.
+ *
Instead of defining your data pipelines using a series of separate Apache Spark tasks, Spark
+ * Declarative Pipelines manages how your data is transformed based on a target schema you define
+ * for each processing step. You can also enforce data quality with Spark Declarative Pipelines
+ * expectations. Expectations allow you to define expected data quality and specify how to handle
+ * records that fail those expectations.
*/
@Generated
public class PipelinesAPI {
@@ -102,8 +102,9 @@ public void delete(String pipelineId) {
}
/**
- * Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the
- * pipeline and its tables. You cannot undo this action.
+ * Deletes a pipeline. If the pipeline publishes to Unity Catalog, pipeline deletion will cascade
+ * to all pipeline tables. Please reach out to Databricks support for assistance to undo this
+ * action.
*/
public void delete(DeletePipelineRequest request) {
impl.delete(request);
@@ -167,7 +168,7 @@ public Iterable listPipelineEvents(ListPipelineEventsRequest requ
});
}
- /** Lists pipelines defined in the Delta Live Tables system. */
+ /** Lists pipelines defined in the Spark Declarative Pipelines system. */
public Iterable listPipelines(ListPipelinesRequest request) {
return new Paginator<>(
request,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java
index d2d0a81c4..3947e67d1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java
@@ -4,19 +4,19 @@
import com.databricks.sdk.support.Generated;
/**
- * The Delta Live Tables API allows you to create, edit, delete, start, and view details about
- * pipelines.
+ * The Lakeflow Spark Declarative Pipelines API allows you to create, edit, delete, start, and view
+ * details about pipelines.
*
- *
Delta Live Tables is a framework for building reliable, maintainable, and testable data
- * processing pipelines. You define the transformations to perform on your data, and Delta Live
- * Tables manages task orchestration, cluster management, monitoring, data quality, and error
- * handling.
+ *
Spark Declarative Pipelines is a framework for building reliable, maintainable, and testable
+ * data processing pipelines. You define the transformations to perform on your data, and Spark
+ * Declarative Pipelines manages task orchestration, cluster management, monitoring, data quality,
+ * and error handling.
*
- *
Instead of defining your data pipelines using a series of separate Apache Spark tasks, Delta
- * Live Tables manages how your data is transformed based on a target schema you define for each
- * processing step. You can also enforce data quality with Delta Live Tables expectations.
- * Expectations allow you to define expected data quality and specify how to handle records that
- * fail those expectations.
+ *
Instead of defining your data pipelines using a series of separate Apache Spark tasks, Spark
+ * Declarative Pipelines manages how your data is transformed based on a target schema you define
+ * for each processing step. You can also enforce data quality with Spark Declarative Pipelines
+ * expectations. Expectations allow you to define expected data quality and specify how to handle
+ * records that fail those expectations.
*
*
This is the high-level interface, that contains generated methods.
*
@@ -31,8 +31,9 @@ public interface PipelinesService {
CreatePipelineResponse create(CreatePipeline createPipeline);
/**
- * Deletes a pipeline. Deleting a pipeline is a permanent action that stops and removes the
- * pipeline and its tables. You cannot undo this action.
+ * Deletes a pipeline. If the pipeline publishes to Unity Catalog, pipeline deletion will cascade
+ * to all pipeline tables. Please reach out to Databricks support for assistance to undo this
+ * action.
*/
void delete(DeletePipelineRequest deletePipelineRequest);
@@ -55,7 +56,7 @@ GetPipelinePermissionLevelsResponse getPermissionLevels(
ListPipelineEventsResponse listPipelineEvents(
ListPipelineEventsRequest listPipelineEventsRequest);
- /** Lists pipelines defined in the Delta Live Tables system. */
+ /** Lists pipelines defined in the Spark Declarative Pipelines system. */
ListPipelinesResponse listPipelines(ListPipelinesRequest listPipelinesRequest);
/** List updates for an active pipeline. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RewindDatasetSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RewindDatasetSpec.java
new file mode 100755
index 000000000..ffc07674d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RewindDatasetSpec.java
@@ -0,0 +1,75 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Configuration for rewinding a specific dataset. */
+@Generated
+public class RewindDatasetSpec {
+ /** Whether to cascade the rewind to dependent datasets. Must be specified. */
+ @JsonProperty("cascade")
+ private Boolean cascade;
+
+ /** The identifier of the dataset (e.g., "main.foo.tbl1"). */
+ @JsonProperty("identifier")
+ private String identifier;
+
+ /** Whether to reset checkpoints for this dataset. */
+ @JsonProperty("reset_checkpoints")
+ private Boolean resetCheckpoints;
+
+ public RewindDatasetSpec setCascade(Boolean cascade) {
+ this.cascade = cascade;
+ return this;
+ }
+
+ public Boolean getCascade() {
+ return cascade;
+ }
+
+ public RewindDatasetSpec setIdentifier(String identifier) {
+ this.identifier = identifier;
+ return this;
+ }
+
+ public String getIdentifier() {
+ return identifier;
+ }
+
+ public RewindDatasetSpec setResetCheckpoints(Boolean resetCheckpoints) {
+ this.resetCheckpoints = resetCheckpoints;
+ return this;
+ }
+
+ public Boolean getResetCheckpoints() {
+ return resetCheckpoints;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ RewindDatasetSpec that = (RewindDatasetSpec) o;
+ return Objects.equals(cascade, that.cascade)
+ && Objects.equals(identifier, that.identifier)
+ && Objects.equals(resetCheckpoints, that.resetCheckpoints);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(cascade, identifier, resetCheckpoints);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RewindDatasetSpec.class)
+ .add("cascade", cascade)
+ .add("identifier", identifier)
+ .add("resetCheckpoints", resetCheckpoints)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RewindSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RewindSpec.java
new file mode 100755
index 000000000..d6a16df9b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RewindSpec.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Information about a rewind being requested for this pipeline or some of the datasets in it. */
+@Generated
+public class RewindSpec {
+ /**
+ * List of datasets to rewind with specific configuration for each. When not specified, all
+ * datasets will be rewound with cascade = true and reset_checkpoints = true.
+ */
+ @JsonProperty("datasets")
+ private Collection datasets;
+
+ /** If true, this is a dry run and we should emit the RewindSummary but not perform the rewind. */
+ @JsonProperty("dry_run")
+ private Boolean dryRun;
+
+ /** The base timestamp to rewind to. Must be specified. */
+ @JsonProperty("rewind_timestamp")
+ private String rewindTimestamp;
+
+ public RewindSpec setDatasets(Collection datasets) {
+ this.datasets = datasets;
+ return this;
+ }
+
+ public Collection getDatasets() {
+ return datasets;
+ }
+
+ public RewindSpec setDryRun(Boolean dryRun) {
+ this.dryRun = dryRun;
+ return this;
+ }
+
+ public Boolean getDryRun() {
+ return dryRun;
+ }
+
+ public RewindSpec setRewindTimestamp(String rewindTimestamp) {
+ this.rewindTimestamp = rewindTimestamp;
+ return this;
+ }
+
+ public String getRewindTimestamp() {
+ return rewindTimestamp;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ RewindSpec that = (RewindSpec) o;
+ return Objects.equals(datasets, that.datasets)
+ && Objects.equals(dryRun, that.dryRun)
+ && Objects.equals(rewindTimestamp, that.rewindTimestamp);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(datasets, dryRun, rewindTimestamp);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RewindSpec.class)
+ .add("datasets", datasets)
+ .add("dryRun", dryRun)
+ .add("rewindTimestamp", rewindTimestamp)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java
index 14df875b5..ee03854df 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StartUpdate.java
@@ -38,6 +38,13 @@ public class StartUpdate {
@JsonProperty("refresh_selection")
private Collection refreshSelection;
+ /**
+ * The information about the requested rewind operation. If specified this is a rewind mode
+ * update.
+ */
+ @JsonProperty("rewind_spec")
+ private RewindSpec rewindSpec;
+
/**
* If true, this update only validates the correctness of pipeline source code but does not
* materialize or publish any datasets.
@@ -90,6 +97,15 @@ public Collection getRefreshSelection() {
return refreshSelection;
}
+ public StartUpdate setRewindSpec(RewindSpec rewindSpec) {
+ this.rewindSpec = rewindSpec;
+ return this;
+ }
+
+ public RewindSpec getRewindSpec() {
+ return rewindSpec;
+ }
+
public StartUpdate setValidateOnly(Boolean validateOnly) {
this.validateOnly = validateOnly;
return this;
@@ -109,13 +125,20 @@ public boolean equals(Object o) {
&& Objects.equals(fullRefreshSelection, that.fullRefreshSelection)
&& Objects.equals(pipelineId, that.pipelineId)
&& Objects.equals(refreshSelection, that.refreshSelection)
+ && Objects.equals(rewindSpec, that.rewindSpec)
&& Objects.equals(validateOnly, that.validateOnly);
}
@Override
public int hashCode() {
return Objects.hash(
- cause, fullRefresh, fullRefreshSelection, pipelineId, refreshSelection, validateOnly);
+ cause,
+ fullRefresh,
+ fullRefreshSelection,
+ pipelineId,
+ refreshSelection,
+ rewindSpec,
+ validateOnly);
}
@Override
@@ -126,6 +149,7 @@ public String toString() {
.add("fullRefreshSelection", fullRefreshSelection)
.add("pipelineId", pipelineId)
.add("refreshSelection", refreshSelection)
+ .add("rewindSpec", rewindSpec)
.add("validateOnly", validateOnly)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java
index 7626f06c3..4e8dc6013 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java
@@ -49,8 +49,8 @@ public class TableSpecificConfig {
private TableSpecificConfigScdType scdType;
/**
- * The column names specifying the logical order of events in the source data. Delta Live Tables
- * uses this sequencing to handle change events that arrive out of order.
+ * The column names specifying the logical order of events in the source data. Spark Declarative
+ * Pipelines uses this sequencing to handle change events that arrive out of order.
*/
@JsonProperty("sequence_by")
private Collection sequenceBy;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState.java
index 54c96f842..1d56d65e7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState.java
@@ -7,6 +7,8 @@
@Generated
public enum
CustomerFacingNetworkConnectivityConfigAwsPrivateEndpointRulePrivateLinkConnectionState {
+ CREATE_FAILED,
+ CREATING,
DISCONNECTED,
ESTABLISHED,
EXPIRED,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRuleConnectionState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRuleConnectionState.java
index 2331f89ed..2c44865e7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRuleConnectionState.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRuleConnectionState.java
@@ -6,6 +6,8 @@
@Generated
public enum NccAzurePrivateEndpointRuleConnectionState {
+ CREATE_FAILED,
+ CREATING,
DISCONNECTED,
ESTABLISHED,
EXPIRED,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java
index d2c3d1407..0b0bcdebd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java
@@ -6,8 +6,6 @@
@Generated
public enum NccPrivateEndpointRulePrivateLinkConnectionState {
- CREATE_FAILED,
- CREATING,
DISCONNECTED,
ESTABLISHED,
EXPIRED,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2API.java
index 423585a33..452b64b01 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2API.java
@@ -58,7 +58,9 @@ public Iterable listAccountSettingsMetadata(
/**
* Patch a setting value at account level. See :method:settingsv2/listaccountsettingsmetadata for
- * list of setting available via public APIs at account level.
+ * list of setting available via public APIs at account level. To determine the correct field to
+ * include in a patch request, refer to the type field of the setting returned in the
+ * :method:settingsv2/listaccountsettingsmetadata response.
*/
public Setting patchPublicAccountSetting(PatchPublicAccountSettingRequest request) {
return impl.patchPublicAccountSetting(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2Service.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2Service.java
index 3b0ed61d7..394132635 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2Service.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2Service.java
@@ -28,7 +28,9 @@ ListAccountSettingsMetadataResponse listAccountSettingsMetadata(
/**
* Patch a setting value at account level. See :method:settingsv2/listaccountsettingsmetadata for
- * list of setting available via public APIs at account level.
+ * list of setting available via public APIs at account level. To determine the correct field to
+ * include in a patch request, refer to the type field of the setting returned in the
+ * :method:settingsv2/listaccountsettingsmetadata response.
*/
Setting patchPublicAccountSetting(
PatchPublicAccountSettingRequest patchPublicAccountSettingRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/GetPublicWorkspaceSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/GetPublicWorkspaceSettingRequest.java
index 928c553e6..762c2ea6c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/GetPublicWorkspaceSettingRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/GetPublicWorkspaceSettingRequest.java
@@ -9,7 +9,7 @@
@Generated
public class GetPublicWorkspaceSettingRequest {
- /** */
+ /** Name of the setting */
@JsonIgnore private String name;
public GetPublicWorkspaceSettingRequest setName(String name) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/PatchPublicWorkspaceSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/PatchPublicWorkspaceSettingRequest.java
index 595a9704b..e3722e5b5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/PatchPublicWorkspaceSettingRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/PatchPublicWorkspaceSettingRequest.java
@@ -10,7 +10,7 @@
@Generated
public class PatchPublicWorkspaceSettingRequest {
- /** */
+ /** Name of the setting */
@JsonIgnore private String name;
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/Setting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/Setting.java
index e5bc56bc6..65473a94a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/Setting.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/Setting.java
@@ -9,55 +9,95 @@
@Generated
public class Setting {
- /** */
+ /**
+ * Setting value for aibi_dashboard_embedding_access_policy setting. This is the setting value set
+ * by consumers, check effective_aibi_dashboard_embedding_access_policy for final setting value.
+ */
@JsonProperty("aibi_dashboard_embedding_access_policy")
private AibiDashboardEmbeddingAccessPolicy aibiDashboardEmbeddingAccessPolicy;
- /** */
+ /**
+ * Setting value for aibi_dashboard_embedding_approved_domains setting. This is the setting value
+ * set by consumers, check effective_aibi_dashboard_embedding_approved_domains for final setting
+ * value.
+ */
@JsonProperty("aibi_dashboard_embedding_approved_domains")
private AibiDashboardEmbeddingApprovedDomains aibiDashboardEmbeddingApprovedDomains;
- /** */
+ /**
+ * Setting value for automatic_cluster_update_workspace setting. This is the setting value set by
+ * consumers, check effective_automatic_cluster_update_workspace for final setting value.
+ */
@JsonProperty("automatic_cluster_update_workspace")
private ClusterAutoRestartMessage automaticClusterUpdateWorkspace;
- /** */
+ /**
+ * Setting value for boolean type setting. This is the setting value set by consumers, check
+ * effective_boolean_val for final setting value.
+ */
@JsonProperty("boolean_val")
private BooleanMessage booleanVal;
- /** */
+ /**
+ * Effective setting value for aibi_dashboard_embedding_access_policy setting. This is the final
+ * effective value of setting. To set a value use aibi_dashboard_embedding_access_policy.
+ */
@JsonProperty("effective_aibi_dashboard_embedding_access_policy")
private AibiDashboardEmbeddingAccessPolicy effectiveAibiDashboardEmbeddingAccessPolicy;
- /** */
+ /**
+ * Effective setting value for aibi_dashboard_embedding_approved_domains setting. This is the
+ * final effective value of setting. To set a value use aibi_dashboard_embedding_approved_domains.
+ */
@JsonProperty("effective_aibi_dashboard_embedding_approved_domains")
private AibiDashboardEmbeddingApprovedDomains effectiveAibiDashboardEmbeddingApprovedDomains;
- /** */
+ /**
+ * Effective setting value for automatic_cluster_update_workspace setting. This is the final
+ * effective value of setting. To set a value use automatic_cluster_update_workspace.
+ */
@JsonProperty("effective_automatic_cluster_update_workspace")
private ClusterAutoRestartMessage effectiveAutomaticClusterUpdateWorkspace;
- /** */
+ /**
+ * Effective setting value for boolean type setting. This is the final effective value of setting.
+ * To set a value use boolean_val.
+ */
@JsonProperty("effective_boolean_val")
private BooleanMessage effectiveBooleanVal;
- /** */
+ /**
+ * Effective setting value for integer type setting. This is the final effective value of setting.
+ * To set a value use integer_val.
+ */
@JsonProperty("effective_integer_val")
private IntegerMessage effectiveIntegerVal;
- /** */
+ /**
+ * Effective setting value for personal_compute setting. This is the final effective value of
+ * setting. To set a value use personal_compute.
+ */
@JsonProperty("effective_personal_compute")
private PersonalComputeMessage effectivePersonalCompute;
- /** */
+ /**
+ * Effective setting value for restrict_workspace_admins setting. This is the final effective
+ * value of setting. To set a value use restrict_workspace_admins.
+ */
@JsonProperty("effective_restrict_workspace_admins")
private RestrictWorkspaceAdminsMessage effectiveRestrictWorkspaceAdmins;
- /** */
+ /**
+ * Effective setting value for string type setting. This is the final effective value of setting.
+ * To set a value use string_val.
+ */
@JsonProperty("effective_string_val")
private StringMessage effectiveStringVal;
- /** */
+ /**
+ * Setting value for integer type setting. This is the setting value set by consumers, check
+ * effective_integer_val for final setting value.
+ */
@JsonProperty("integer_val")
private IntegerMessage integerVal;
@@ -65,15 +105,24 @@ public class Setting {
@JsonProperty("name")
private String name;
- /** */
+ /**
+ * Setting value for personal_compute setting. This is the setting value set by consumers, check
+ * effective_personal_compute for final setting value.
+ */
@JsonProperty("personal_compute")
private PersonalComputeMessage personalCompute;
- /** */
+ /**
+ * Setting value for restrict_workspace_admins setting. This is the setting value set by
+ * consumers, check effective_restrict_workspace_admins for final setting value.
+ */
@JsonProperty("restrict_workspace_admins")
private RestrictWorkspaceAdminsMessage restrictWorkspaceAdmins;
- /** */
+ /**
+ * Setting value for string type setting. This is the setting value set by consumers, check
+ * effective_string_val for final setting value.
+ */
@JsonProperty("string_val")
private StringMessage stringVal;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/SettingsMetadata.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/SettingsMetadata.java
index 3708c5e3d..dca2478fb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/SettingsMetadata.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/SettingsMetadata.java
@@ -21,7 +21,10 @@ public class SettingsMetadata {
@JsonProperty("name")
private String name;
- /** Type of the setting. To set this setting, the value sent must match this type. */
+ /**
+ * Sample message depicting the type of the setting. To set this setting, the value sent must
+ * match this type.
+ */
@JsonProperty("type")
private String typeValue;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2API.java
index d2281117f..27ecc0d03 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2API.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2API.java
@@ -58,7 +58,9 @@ public Iterable listWorkspaceSettingsMetadata(
/**
* Patch a setting value at workspace level. See :method:settingsv2/listworkspacesettingsmetadata
- * for list of setting available via public APIs at workspace level.
+ * for list of setting available via public APIs at workspace level. To determine the correct
+ * field to include in a patch request, refer to the type field of the setting returned in the
+ * :method:settingsv2/listworkspacesettingsmetadata response.
*/
public Setting patchPublicWorkspaceSetting(PatchPublicWorkspaceSettingRequest request) {
return impl.patchPublicWorkspaceSetting(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2Service.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2Service.java
index 7f23eab6e..0d37055bb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2Service.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2Service.java
@@ -29,7 +29,9 @@ ListWorkspaceSettingsMetadataResponse listWorkspaceSettingsMetadata(
/**
* Patch a setting value at workspace level. See :method:settingsv2/listworkspacesettingsmetadata
- * for list of setting available via public APIs at workspace level.
+ * for list of setting available via public APIs at workspace level. To determine the correct
+ * field to include in a patch request, refer to the type field of the setting returned in the
+ * :method:settingsv2/listworkspacesettingsmetadata response.
*/
Setting patchPublicWorkspaceSetting(
PatchPublicWorkspaceSettingRequest patchPublicWorkspaceSettingRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandColumn.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandColumn.java
index 2e8776e18..ca1ac3fe0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandColumn.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandColumn.java
@@ -9,7 +9,7 @@
@Generated
public class AlertV2OperandColumn {
- /** */
+ /** If not set, the behavior is equivalent to using `First row` in the UI. */
@JsonProperty("aggregation")
private Aggregation aggregation;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java
index d6a04f45a..ddd2de72a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java
@@ -71,7 +71,6 @@ public enum TerminationReasonCode {
DOCKER_IMAGE_PULL_FAILURE,
DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION,
DOCKER_INVALID_OS_EXCEPTION,
- DRIVER_DNS_RESOLUTION_FAILURE,
DRIVER_EVICTION,
DRIVER_LAUNCH_TIMEOUT,
DRIVER_NODE_UNREACHABLE,
@@ -146,8 +145,6 @@ public enum TerminationReasonCode {
NETWORK_CHECK_STORAGE_FAILURE_DUE_TO_MISCONFIG,
NETWORK_CONFIGURATION_FAILURE,
NFS_MOUNT_FAILURE,
- NO_ACTIVATED_K8S,
- NO_ACTIVATED_K8S_TESTING_TAG,
NO_MATCHED_K8S,
NO_MATCHED_K8S_TESTING_TAG,
NPIP_TUNNEL_SETUP_FAILURE,
@@ -160,7 +157,6 @@ public enum TerminationReasonCode {
SECRET_CREATION_FAILURE,
SECRET_PERMISSION_DENIED,
SECRET_RESOLUTION_ERROR,
- SECURITY_AGENTS_FAILED_INITIAL_VERIFICATION,
SECURITY_DAEMON_REGISTRATION_EXCEPTION,
SELF_BOOTSTRAP_FAILURE,
SERVERLESS_LONG_RUNNING_TERMINATED,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesAPI.java
index 92724193a..850e5a07a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesAPI.java
@@ -8,11 +8,14 @@
import org.slf4j.LoggerFactory;
/**
- * The Tag Policy API allows you to manage policies for governed tags in Databricks. Permissions for
- * tag policies can be managed using the [Account Access Control Proxy API].
+ * The Tag Policy API allows you to manage policies for governed tags in Databricks. For Terraform
+ * usage, see the [Tag Policy Terraform documentation]. Permissions for tag policies can be managed
+ * using the [Account Access Control Proxy API].
*
*
[Account Access Control Proxy API]:
- * https://docs.databricks.com/api/workspace/accountaccesscontrolproxy
+ * https://docs.databricks.com/api/workspace/accountaccesscontrolproxy [Tag Policy Terraform
+ * documentation]:
+ * https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/tag_policy
*/
@Generated
public class TagPoliciesAPI {
@@ -30,7 +33,16 @@ public TagPoliciesAPI(TagPoliciesService mock) {
impl = mock;
}
- /** Creates a new tag policy, making the associated tag key governed. */
+ /**
+ * Creates a new tag policy, making the associated tag key governed. For Terraform usage, see the
+ * [Tag Policy Terraform documentation]. To manage permissions for tag policies, use the [Account
+ * Access Control Proxy API].
+ *
+ *
[Account Access Control Proxy API]:
+ * https://docs.databricks.com/api/workspace/accountaccesscontrolproxy [Tag Policy Terraform
+ * documentation]:
+ * https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/tag_policy
+ */
public TagPolicy createTagPolicy(CreateTagPolicyRequest request) {
return impl.createTagPolicy(request);
}
@@ -39,7 +51,13 @@ public void deleteTagPolicy(String tagKey) {
deleteTagPolicy(new DeleteTagPolicyRequest().setTagKey(tagKey));
}
- /** Deletes a tag policy by its associated governed tag's key, leaving that tag key ungoverned. */
+ /**
+ * Deletes a tag policy by its associated governed tag's key, leaving that tag key ungoverned. For
+ * Terraform usage, see the [Tag Policy Terraform documentation].
+ *
+ *
[Tag Policy Terraform documentation]:
+ * https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/tag_policy
+ */
public void deleteTagPolicy(DeleteTagPolicyRequest request) {
impl.deleteTagPolicy(request);
}
@@ -48,12 +66,30 @@ public TagPolicy getTagPolicy(String tagKey) {
return getTagPolicy(new GetTagPolicyRequest().setTagKey(tagKey));
}
- /** Gets a single tag policy by its associated governed tag's key. */
+ /**
+ * Gets a single tag policy by its associated governed tag's key. For Terraform usage, see the
+ * [Tag Policy Terraform documentation]. To list granted permissions for tag policies, use the
+ * [Account Access Control Proxy API].
+ *
+ *
[Account Access Control Proxy API]:
+ * https://docs.databricks.com/api/workspace/accountaccesscontrolproxy [Tag Policy Terraform
+ * documentation]:
+ * https://registry.terraform.io/providers/databricks/databricks/latest/docs/data-sources/tag_policy
+ */
public TagPolicy getTagPolicy(GetTagPolicyRequest request) {
return impl.getTagPolicy(request);
}
- /** Lists the tag policies for all governed tags in the account. */
+ /**
+ * Lists the tag policies for all governed tags in the account. For Terraform usage, see the [Tag
+ * Policy Terraform documentation]. To list granted permissions for tag policies, use the [Account
+ * Access Control Proxy API].
+ *
+ *
[Account Access Control Proxy API]:
+ * https://docs.databricks.com/api/workspace/accountaccesscontrolproxy [Tag Policy Terraform
+ * documentation]:
+ * https://registry.terraform.io/providers/databricks/databricks/latest/docs/data-sources/tag_policies
+ */
public Iterable listTagPolicies(ListTagPoliciesRequest request) {
return new Paginator<>(
request,
@@ -68,7 +104,16 @@ public Iterable listTagPolicies(ListTagPoliciesRequest request) {
});
}
- /** Updates an existing tag policy for a single governed tag. */
+ /**
+ * Updates an existing tag policy for a single governed tag. For Terraform usage, see the [Tag
+ * Policy Terraform documentation]. To manage permissions for tag policies, use the [Account
+ * Access Control Proxy API].
+ *
+ *
[Account Access Control Proxy API]:
+ * https://docs.databricks.com/api/workspace/accountaccesscontrolproxy [Tag Policy Terraform
+ * documentation]:
+ * https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/tag_policy
+ */
public TagPolicy updateTagPolicy(UpdateTagPolicyRequest request) {
return impl.updateTagPolicy(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesService.java
index fc092fd39..89937cf8b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/tags/TagPoliciesService.java
@@ -4,11 +4,14 @@
import com.databricks.sdk.support.Generated;
/**
- * The Tag Policy API allows you to manage policies for governed tags in Databricks. Permissions for
- * tag policies can be managed using the [Account Access Control Proxy API].
+ * The Tag Policy API allows you to manage policies for governed tags in Databricks. For Terraform
+ * usage, see the [Tag Policy Terraform documentation]. Permissions for tag policies can be managed
+ * using the [Account Access Control Proxy API].
*
*
This is the high-level interface, that contains generated methods.
*
@@ -16,18 +19,60 @@
*/
@Generated
public interface TagPoliciesService {
- /** Creates a new tag policy, making the associated tag key governed. */
+ /**
+ * Creates a new tag policy, making the associated tag key governed. For Terraform usage, see the
+ * [Tag Policy Terraform documentation]. To manage permissions for tag policies, use the [Account
+ * Access Control Proxy API].
+ *
+ *
[Account Access Control Proxy API]:
+ * https://docs.databricks.com/api/workspace/accountaccesscontrolproxy [Tag Policy Terraform
+ * documentation]:
+ * https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/tag_policy
+ */
TagPolicy createTagPolicy(CreateTagPolicyRequest createTagPolicyRequest);
- /** Deletes a tag policy by its associated governed tag's key, leaving that tag key ungoverned. */
+ /**
+ * Deletes a tag policy by its associated governed tag's key, leaving that tag key ungoverned. For
+ * Terraform usage, see the [Tag Policy Terraform documentation].
+ *
+ *
[Tag Policy Terraform documentation]:
+ * https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/tag_policy
+ */
void deleteTagPolicy(DeleteTagPolicyRequest deleteTagPolicyRequest);
- /** Gets a single tag policy by its associated governed tag's key. */
+ /**
+ * Gets a single tag policy by its associated governed tag's key. For Terraform usage, see the
+ * [Tag Policy Terraform documentation]. To list granted permissions for tag policies, use the
+ * [Account Access Control Proxy API].
+ *
+ *
[Account Access Control Proxy API]:
+ * https://docs.databricks.com/api/workspace/accountaccesscontrolproxy [Tag Policy Terraform
+ * documentation]:
+ * https://registry.terraform.io/providers/databricks/databricks/latest/docs/data-sources/tag_policy
+ */
TagPolicy getTagPolicy(GetTagPolicyRequest getTagPolicyRequest);
- /** Lists the tag policies for all governed tags in the account. */
+ /**
+ * Lists the tag policies for all governed tags in the account. For Terraform usage, see the [Tag
+ * Policy Terraform documentation]. To list granted permissions for tag policies, use the [Account
+ * Access Control Proxy API].
+ *
+ *
[Account Access Control Proxy API]:
+ * https://docs.databricks.com/api/workspace/accountaccesscontrolproxy [Tag Policy Terraform
+ * documentation]:
+ * https://registry.terraform.io/providers/databricks/databricks/latest/docs/data-sources/tag_policies
+ */
ListTagPoliciesResponse listTagPolicies(ListTagPoliciesRequest listTagPoliciesRequest);
- /** Updates an existing tag policy for a single governed tag. */
+ /**
+ * Updates an existing tag policy for a single governed tag. For Terraform usage, see the [Tag
+ * Policy Terraform documentation]. To manage permissions for tag policies, use the [Account
+ * Access Control Proxy API].
+ *
+ *
[Account Access Control Proxy API]:
+ * https://docs.databricks.com/api/workspace/accountaccesscontrolproxy [Tag Policy Terraform
+ * documentation]:
+ * https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/tag_policy
+ */
TagPolicy updateTagPolicy(UpdateTagPolicyRequest updateTagPolicyRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfo.java
index e403d7e5a..52cc75acb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ColumnInfo.java
@@ -13,6 +13,10 @@ public class ColumnInfo {
@JsonProperty("name")
private String name;
+ /** Data type of the column (e.g., "string", "int", "array") */
+ @JsonProperty("type_text")
+ private String typeText;
+
public ColumnInfo setName(String name) {
this.name = name;
return this;
@@ -22,21 +26,30 @@ public String getName() {
return name;
}
+ public ColumnInfo setTypeText(String typeText) {
+ this.typeText = typeText;
+ return this;
+ }
+
+ public String getTypeText() {
+ return typeText;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ColumnInfo that = (ColumnInfo) o;
- return Objects.equals(name, that.name);
+ return Objects.equals(name, that.name) && Objects.equals(typeText, that.typeText);
}
@Override
public int hashCode() {
- return Objects.hash(name);
+ return Objects.hash(name, typeText);
}
@Override
public String toString() {
- return new ToStringer(ColumnInfo.class).add("name", name).toString();
+ return new ToStringer(ColumnInfo.class).add("name", name).add("typeText", typeText).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatusState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatusState.java
index 6356c17ed..302e74033 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatusState.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointStatusState.java
@@ -10,4 +10,6 @@ public enum EndpointStatusState {
OFFLINE,
ONLINE,
PROVISIONING,
+ RED_STATE,
+ YELLOW_STATE,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Metric.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Metric.java
new file mode 100755
index 000000000..55b920283
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Metric.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Metric specification */
+@Generated
+public class Metric {
+ /** Metric labels */
+ @JsonProperty("labels")
+ private Collection labels;
+
+ /** Metric name */
+ @JsonProperty("name")
+ private String name;
+
+ /** Percentile for the metric */
+ @JsonProperty("percentile")
+ private Double percentile;
+
+ public Metric setLabels(Collection labels) {
+ this.labels = labels;
+ return this;
+ }
+
+ public Collection getLabels() {
+ return labels;
+ }
+
+ public Metric setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public Metric setPercentile(Double percentile) {
+ this.percentile = percentile;
+ return this;
+ }
+
+ public Double getPercentile() {
+ return percentile;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Metric that = (Metric) o;
+ return Objects.equals(labels, that.labels)
+ && Objects.equals(name, that.name)
+ && Objects.equals(percentile, that.percentile);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(labels, name, percentile);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Metric.class)
+ .add("labels", labels)
+ .add("name", name)
+ .add("percentile", percentile)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MetricLabel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MetricLabel.java
new file mode 100755
index 000000000..fdfd49e83
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MetricLabel.java
@@ -0,0 +1,56 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Label for a metric */
+@Generated
+public class MetricLabel {
+ /** Label name */
+ @JsonProperty("name")
+ private String name;
+
+ /** Label value */
+ @JsonProperty("value")
+ private String value;
+
+ public MetricLabel setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public MetricLabel setValue(String value) {
+ this.value = value;
+ return this;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ MetricLabel that = (MetricLabel) o;
+ return Objects.equals(name, that.name) && Objects.equals(value, that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, value);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(MetricLabel.class).add("name", name).add("value", value).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MetricValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MetricValue.java
new file mode 100755
index 000000000..7eca58c0d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MetricValue.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Single metric value at a specific timestamp */
+@Generated
+public class MetricValue {
+ /** Timestamp of the metric value (milliseconds since epoch) */
+ @JsonProperty("timestamp")
+ private Long timestamp;
+
+ /** Metric value */
+ @JsonProperty("value")
+ private Double value;
+
+ public MetricValue setTimestamp(Long timestamp) {
+ this.timestamp = timestamp;
+ return this;
+ }
+
+ public Long getTimestamp() {
+ return timestamp;
+ }
+
+ public MetricValue setValue(Double value) {
+ this.value = value;
+ return this;
+ }
+
+ public Double getValue() {
+ return value;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ MetricValue that = (MetricValue) o;
+ return Objects.equals(timestamp, that.timestamp) && Objects.equals(value, that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(timestamp, value);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(MetricValue.class)
+ .add("timestamp", timestamp)
+ .add("value", value)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MetricValues.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MetricValues.java
new file mode 100755
index 000000000..2b2e9eaad
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MetricValues.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Collection of metric values for a specific metric */
+@Generated
+public class MetricValues {
+ /** Metric specification */
+ @JsonProperty("metric")
+ private Metric metric;
+
+ /** Time series of metric values */
+ @JsonProperty("values")
+ private Collection values;
+
+ public MetricValues setMetric(Metric metric) {
+ this.metric = metric;
+ return this;
+ }
+
+ public Metric getMetric() {
+ return metric;
+ }
+
+ public MetricValues setValues(Collection values) {
+ this.values = values;
+ return this;
+ }
+
+ public Collection getValues() {
+ return values;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ MetricValues that = (MetricValues) o;
+ return Objects.equals(metric, that.metric) && Objects.equals(values, that.values);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(metric, values);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(MetricValues.class)
+ .add("metric", metric)
+ .add("values", values)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java
index bdc74194e..98d3df913 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java
@@ -42,7 +42,7 @@ public class QueryVectorIndexRequest {
@JsonProperty("query_text")
private String queryText;
- /** The query type to use. Choices are `ANN` and `HYBRID`. Defaults to `ANN`. */
+ /** The query type to use. Choices are `ANN` and `HYBRID` and `FULL_TEXT`. Defaults to `ANN`. */
@JsonProperty("query_type")
private String queryType;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RetrieveUserVisibleMetricsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RetrieveUserVisibleMetricsRequest.java
new file mode 100755
index 000000000..89cb381be
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RetrieveUserVisibleMetricsRequest.java
@@ -0,0 +1,121 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Request to retrieve user-visible metrics */
+@Generated
+public class RetrieveUserVisibleMetricsRequest {
+ /** End time for metrics query */
+ @JsonProperty("end_time")
+ private String endTime;
+
+ /** Granularity in seconds */
+ @JsonProperty("granularity_in_seconds")
+ private Long granularityInSeconds;
+
+ /** List of metrics to retrieve */
+ @JsonProperty("metrics")
+ private Collection metrics;
+
+ /** Vector search endpoint name */
+ @JsonIgnore private String name;
+
+ /** Token for pagination */
+ @JsonProperty("page_token")
+ private String pageToken;
+
+ /** Start time for metrics query */
+ @JsonProperty("start_time")
+ private String startTime;
+
+ public RetrieveUserVisibleMetricsRequest setEndTime(String endTime) {
+ this.endTime = endTime;
+ return this;
+ }
+
+ public String getEndTime() {
+ return endTime;
+ }
+
+ public RetrieveUserVisibleMetricsRequest setGranularityInSeconds(Long granularityInSeconds) {
+ this.granularityInSeconds = granularityInSeconds;
+ return this;
+ }
+
+ public Long getGranularityInSeconds() {
+ return granularityInSeconds;
+ }
+
+ public RetrieveUserVisibleMetricsRequest setMetrics(Collection metrics) {
+ this.metrics = metrics;
+ return this;
+ }
+
+ public Collection getMetrics() {
+ return metrics;
+ }
+
+ public RetrieveUserVisibleMetricsRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public RetrieveUserVisibleMetricsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ public RetrieveUserVisibleMetricsRequest setStartTime(String startTime) {
+ this.startTime = startTime;
+ return this;
+ }
+
+ public String getStartTime() {
+ return startTime;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ RetrieveUserVisibleMetricsRequest that = (RetrieveUserVisibleMetricsRequest) o;
+ return Objects.equals(endTime, that.endTime)
+ && Objects.equals(granularityInSeconds, that.granularityInSeconds)
+ && Objects.equals(metrics, that.metrics)
+ && Objects.equals(name, that.name)
+ && Objects.equals(pageToken, that.pageToken)
+ && Objects.equals(startTime, that.startTime);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(endTime, granularityInSeconds, metrics, name, pageToken, startTime);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RetrieveUserVisibleMetricsRequest.class)
+ .add("endTime", endTime)
+ .add("granularityInSeconds", granularityInSeconds)
+ .add("metrics", metrics)
+ .add("name", name)
+ .add("pageToken", pageToken)
+ .add("startTime", startTime)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RetrieveUserVisibleMetricsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RetrieveUserVisibleMetricsResponse.java
new file mode 100755
index 000000000..4d3a69f6d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RetrieveUserVisibleMetricsResponse.java
@@ -0,0 +1,64 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/** Response containing user-visible metrics */
+@Generated
+public class RetrieveUserVisibleMetricsResponse {
+ /** Collection of metric values */
+ @JsonProperty("metric_values")
+ private Collection metricValues;
+
+ /**
+ * A token that can be used to get the next page of results. If not present, there are no more
+ * results to show.
+ */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public RetrieveUserVisibleMetricsResponse setMetricValues(Collection metricValues) {
+ this.metricValues = metricValues;
+ return this;
+ }
+
+ public Collection getMetricValues() {
+ return metricValues;
+ }
+
+ public RetrieveUserVisibleMetricsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ RetrieveUserVisibleMetricsResponse that = (RetrieveUserVisibleMetricsResponse) o;
+ return Objects.equals(metricValues, that.metricValues)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(metricValues, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RetrieveUserVisibleMetricsResponse.class)
+ .add("metricValues", metricValues)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsAPI.java
index a9ce746e5..936d1fd7f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsAPI.java
@@ -121,6 +121,12 @@ public Iterable listEndpoints(ListEndpointsRequest request) {
});
}
+ /** Retrieve user-visible metrics for an endpoint */
+ public RetrieveUserVisibleMetricsResponse retrieveUserVisibleMetrics(
+ RetrieveUserVisibleMetricsRequest request) {
+ return impl.retrieveUserVisibleMetrics(request);
+ }
+
/** Update the budget policy of an endpoint */
public PatchEndpointBudgetPolicyResponse updateEndpointBudgetPolicy(
PatchEndpointBudgetPolicyRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java
index 1554bf96f..b3ea315c2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsImpl.java
@@ -73,6 +73,22 @@ public ListEndpointResponse listEndpoints(ListEndpointsRequest request) {
}
}
+ @Override
+ public RetrieveUserVisibleMetricsResponse retrieveUserVisibleMetrics(
+ RetrieveUserVisibleMetricsRequest request) {
+ String path = String.format("/api/2.0/vector-search/endpoints/%s/metrics", request.getName());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, RetrieveUserVisibleMetricsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public PatchEndpointBudgetPolicyResponse updateEndpointBudgetPolicy(
PatchEndpointBudgetPolicyRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsService.java
index 150d294a3..919394cf1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsService.java
@@ -24,6 +24,10 @@ public interface VectorSearchEndpointsService {
/** List all vector search endpoints in the workspace. */
ListEndpointResponse listEndpoints(ListEndpointsRequest listEndpointsRequest);
+ /** Retrieve user-visible metrics for an endpoint */
+ RetrieveUserVisibleMetricsResponse retrieveUserVisibleMetrics(
+ RetrieveUserVisibleMetricsRequest retrieveUserVisibleMetricsRequest);
+
/** Update the budget policy of an endpoint */
PatchEndpointBudgetPolicyResponse updateEndpointBudgetPolicy(
PatchEndpointBudgetPolicyRequest patchEndpointBudgetPolicyRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsRequest.java
index 6199a0737..dddda847b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionLevelsRequest.java
@@ -12,7 +12,11 @@ public class GetWorkspaceObjectPermissionLevelsRequest {
/** The workspace object for which to get or manage permissions. */
@JsonIgnore private String workspaceObjectId;
- /** The workspace object type for which to get or manage permissions. */
+ /**
+ * The workspace object type for which to get or manage permissions. Could be one of the
+ * following: alerts, alertsv2, dashboards, dbsql-dashboards, directories, experiments, files,
+ * genie, notebooks, queries
+ */
@JsonIgnore private String workspaceObjectType;
public GetWorkspaceObjectPermissionLevelsRequest setWorkspaceObjectId(String workspaceObjectId) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionsRequest.java
index b852f879f..2e0478247 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetWorkspaceObjectPermissionsRequest.java
@@ -12,7 +12,11 @@ public class GetWorkspaceObjectPermissionsRequest {
/** The workspace object for which to get or manage permissions. */
@JsonIgnore private String workspaceObjectId;
- /** The workspace object type for which to get or manage permissions. */
+ /**
+ * The workspace object type for which to get or manage permissions. Could be one of the
+ * following: alerts, alertsv2, dashboards, dbsql-dashboards, directories, experiments, files,
+ * genie, notebooks, queries
+ */
@JsonIgnore private String workspaceObjectType;
public GetWorkspaceObjectPermissionsRequest setWorkspaceObjectId(String workspaceObjectId) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsRequest.java
index 1ad8a644d..0aa5ab95f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsRequest.java
@@ -18,7 +18,11 @@ public class WorkspaceObjectPermissionsRequest {
/** The workspace object for which to get or manage permissions. */
@JsonIgnore private String workspaceObjectId;
- /** The workspace object type for which to get or manage permissions. */
+ /**
+ * The workspace object type for which to get or manage permissions. Could be one of the
+ * following: alerts, alertsv2, dashboards, dbsql-dashboards, directories, experiments, files,
+ * genie, notebooks, queries
+ */
@JsonIgnore private String workspaceObjectType;
public WorkspaceObjectPermissionsRequest setAccessControlList(
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/HttpPathTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/HttpPathTest.java
index 9da7d1adf..5132dcc6c 100755
--- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/HttpPathTest.java
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/HttpPathTest.java
@@ -15,6 +15,7 @@
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.protobuf.FieldMask;
import java.io.IOException;
import java.net.URL;
import java.util.*;
@@ -173,7 +174,10 @@ static List testCases() throws JsonProcessingException, JsonMappingExc
.setQueryParamString("query_string_val")
.setQueryParamInt(999L)
.setQueryParamBool(true)
- .setFieldMask("field.mask.value"));
+ .setFieldMask(
+ FieldMask.newBuilder()
+ .addAllPaths(Arrays.asList("field.mask.value".split(", ")))
+ .build()));
} catch (Exception e) {
throw new RuntimeException(e);
}
@@ -353,7 +357,10 @@ static List testCases() throws JsonProcessingException, JsonMappingExc
.setQueryParamString("query_string_val")
.setQueryParamInt(999L)
.setQueryParamBool(true)
- .setFieldMask("field.mask.value"));
+ .setFieldMask(
+ FieldMask.newBuilder()
+ .addAllPaths(Arrays.asList("field.mask.value".split(", ")))
+ .build()));
} catch (Exception e) {
throw new RuntimeException(e);
}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/MarshallTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/MarshallTest.java
index e885ce52a..8ace443c7 100755
--- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/MarshallTest.java
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/MarshallTest.java
@@ -10,8 +10,11 @@
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.protobuf.FieldMask;
+import com.google.protobuf.util.Durations;
+import com.google.protobuf.util.Timestamps;
+import java.text.ParseException;
import java.util.*;
-import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
@@ -21,7 +24,7 @@
@ExtendWith(MockitoExtension.class)
public class MarshallTest {
@Mock private ApiClient mockApiClient;
- private ObjectMapper objectMapper;
+ private static final ObjectMapper objectMapper = SerDeUtils.createMapper();
static class MarshallTestCase {
String name;
@@ -35,13 +38,8 @@ static class MarshallTestCase {
}
}
- @BeforeEach
- void setUp() {
- objectMapper = SerDeUtils.createMapper();
- }
-
static List marshallTestCases()
- throws JsonProcessingException, JsonMappingException {
+ throws JsonProcessingException, JsonMappingException, ParseException {
List cases = new ArrayList<>();
cases.add(
new MarshallTestCase(
@@ -98,17 +96,21 @@ static List marshallTestCases()
cases.add(
new MarshallTestCase(
"OptionalDuration",
- new OptionalFields().setDuration("3600s"),
+ new OptionalFields().setDuration(Durations.parse("3600s")),
"{" + " \"duration\": \"3600s\"" + "}"));
cases.add(
new MarshallTestCase(
"OptionalFieldMask",
- new OptionalFields().setFieldMask("optional_string,optional_int32"),
+ new OptionalFields()
+ .setFieldMask(
+ FieldMask.newBuilder()
+ .addAllPaths(Arrays.asList("optional_string,optional_int32".split(", ")))
+ .build()),
"{" + " \"field_mask\": \"optional_string,optional_int32\"" + "}"));
cases.add(
new MarshallTestCase(
"OptionalTimestamp",
- new OptionalFields().setTimestamp("2023-01-01T00:00:00Z"),
+ new OptionalFields().setTimestamp(Timestamps.parse("2023-01-01T00:00:00Z")),
"{" + " \"timestamp\": \"2023-01-01T00:00:00Z\"" + "}"));
cases.add(
new MarshallTestCase(
@@ -128,56 +130,60 @@ static List marshallTestCases()
"RequiredFieldsExplicitDefaults",
new RequiredFields()
.setRequiredBool(false)
- .setRequiredDuration("0s")
- .setRequiredFieldMask("")
+ .setRequiredDuration(Durations.parse("0s"))
+ .setRequiredFieldMask(
+ FieldMask.newBuilder().addAllPaths(Arrays.asList("".split(", "))).build())
.setRequiredInt32(0L)
.setRequiredInt64(0L)
.setRequiredListValue(new ArrayList<>())
.setRequiredMessage(new NestedMessage())
.setRequiredString("")
.setRequiredStruct(new HashMap<>())
- .setRequiredTimestamp("1970-01-01T00:00:00Z")
- .setRequiredValue("{}")
+ .setRequiredTimestamp(Timestamps.parse("1970-01-01T00:00:00Z"))
+ .setRequiredValue(objectMapper.readTree("{}"))
.setTestRequiredEnum(TestEnum.TEST_ENUM_ONE),
"{"
- + " \"required_string\": \"\","
- + " \"required_int32\": 0,"
- + " \"required_int64\": 0,"
- + " \"required_bool\": false,"
- + " \"required_value\": \"{}\","
- + " \"required_list_value\": [],"
- + " \"required_struct\": {},"
- + " \"required_message\": {},"
- + " \"test_required_enum\": \"TEST_ENUM_ONE\","
- + " \"required_duration\": \"0s\","
- + " \"required_field_mask\": \"\","
- + " \"required_timestamp\": \"1970-01-01T00:00:00Z\""
- + " }"));
+ + " \"required_string\": \"\","
+ + " \"required_int32\": 0,"
+ + " \"required_int64\": 0,"
+ + " \"required_bool\": false,"
+ + " \"required_value\": {},"
+ + " \"required_list_value\": [],"
+ + " \"required_struct\": {},"
+ + " \"required_message\": {},"
+ + " \"test_required_enum\": \"TEST_ENUM_ONE\","
+ + " \"required_duration\": \"0s\","
+ + " \"required_field_mask\": \"\","
+ + " \"required_timestamp\": \"1970-01-01T00:00:00Z\""
+ + "}"));
cases.add(
new MarshallTestCase(
"RequiredFieldsNonDefaults",
new RequiredFields()
.setRequiredBool(true)
- .setRequiredDuration("7200s")
- .setRequiredFieldMask("required_string,required_int32")
+ .setRequiredDuration(Durations.parse("7200s"))
+ .setRequiredFieldMask(
+ FieldMask.newBuilder()
+ .addAllPaths(Arrays.asList("required_string,required_int32".split(", ")))
+ .build())
.setRequiredInt32(42L)
.setRequiredInt64(1234567890123456789L)
.setRequiredListValue(new ArrayList<>())
.setRequiredMessage(new NestedMessage())
.setRequiredString("non_default_string")
.setRequiredStruct(new HashMap<>())
- .setRequiredTimestamp("2023-12-31T23:59:59Z")
- .setRequiredValue("{}")
+ .setRequiredTimestamp(Timestamps.parse("2023-12-31T23:59:59Z"))
+ .setRequiredValue(objectMapper.readTree("{\"key\": \"value\"}"))
.setTestRequiredEnum(TestEnum.TEST_ENUM_TWO),
"{"
+ " \"required_string\": \"non_default_string\","
+ " \"required_int32\": 42,"
+ " \"required_int64\": 1234567890123456789,"
+ " \"required_bool\": true,"
- + " \"required_message\": {},"
- + " \"required_value\": \"{}\","
+ + " \"required_value\": {\"key\": \"value\"},"
+ " \"required_list_value\": [],"
+ " \"required_struct\": {},"
+ + " \"required_message\": {},"
+ " \"test_required_enum\": \"TEST_ENUM_TWO\","
+ " \"required_duration\": \"7200s\","
+ " \"required_field_mask\": \"required_string,required_int32\","
@@ -241,19 +247,32 @@ static List marshallTestCases()
cases.add(
new MarshallTestCase(
"RepeatedDuration",
- new RepeatedFields().setRepeatedDuration(Arrays.asList("60s", "120s", "180s")),
+ new RepeatedFields()
+ .setRepeatedDuration(
+ Arrays.asList(
+ Durations.parse("60s"), Durations.parse("120s"), Durations.parse("180s"))),
"{" + " \"repeated_duration\": [\"60s\", \"120s\", \"180s\"]" + "}"));
cases.add(
new MarshallTestCase(
"RepeatedFieldMask",
- new RepeatedFields().setRepeatedFieldMask(Arrays.asList("field1", "field2,field3")),
+ new RepeatedFields()
+ .setRepeatedFieldMask(
+ Arrays.asList(
+ FieldMask.newBuilder()
+ .addAllPaths(Arrays.asList("field1".split(", ")))
+ .build(),
+ FieldMask.newBuilder()
+ .addAllPaths(Arrays.asList("field2,field3".split(", ")))
+ .build())),
"{" + " \"repeated_field_mask\": [\"field1\", \"field2,field3\"]" + "}"));
cases.add(
new MarshallTestCase(
"RepeatedTimestamp",
new RepeatedFields()
.setRepeatedTimestamp(
- Arrays.asList("2023-01-01T00:00:00Z", "2023-01-02T00:00:00Z")),
+ Arrays.asList(
+ Timestamps.parse("2023-01-01T00:00:00Z"),
+ Timestamps.parse("2023-01-02T00:00:00Z"))),
"{"
+ " \"repeated_timestamp\": [\"2023-01-01T00:00:00Z\", \"2023-01-02T00:00:00Z\"]"
+ "}"));
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/GetResourceRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/GetResourceRequest.java
index ac2e90ec1..cca3bc159 100755
--- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/GetResourceRequest.java
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/GetResourceRequest.java
@@ -6,6 +6,7 @@
import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.google.protobuf.FieldMask;
import java.util.Collection;
import java.util.Objects;
@@ -20,7 +21,7 @@ public class GetResourceRequest {
*/
@JsonIgnore
@QueryParam("field_mask")
- private String fieldMask;
+ private FieldMask fieldMask;
/** */
@JsonIgnore
@@ -61,12 +62,12 @@ public class GetResourceRequest {
@QueryParam("repeated_query_param")
private Collection repeatedQueryParam;
- public GetResourceRequest setFieldMask(String fieldMask) {
+ public GetResourceRequest setFieldMask(FieldMask fieldMask) {
this.fieldMask = fieldMask;
return this;
}
- public String getFieldMask() {
+ public FieldMask getFieldMask() {
return fieldMask;
}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/UpdateResourceRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/UpdateResourceRequest.java
index 96ccd80c3..201670a77 100755
--- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/UpdateResourceRequest.java
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/UpdateResourceRequest.java
@@ -7,6 +7,7 @@
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.FieldMask;
import java.util.Collection;
import java.util.Objects;
@@ -21,7 +22,7 @@ public class UpdateResourceRequest {
*/
@JsonIgnore
@QueryParam("field_mask")
- private String fieldMask;
+ private FieldMask fieldMask;
/** */
@JsonIgnore private Boolean nestedPathParamBool;
@@ -66,12 +67,12 @@ public class UpdateResourceRequest {
@JsonProperty("resource")
private Resource resource;
- public UpdateResourceRequest setFieldMask(String fieldMask) {
+ public UpdateResourceRequest setFieldMask(FieldMask fieldMask) {
this.fieldMask = fieldMask;
return this;
}
- public String getFieldMask() {
+ public FieldMask getFieldMask() {
return fieldMask;
}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/NestedMessage.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/NestedMessage.java
index d045ceb25..764a29858 100755
--- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/NestedMessage.java
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/NestedMessage.java
@@ -6,6 +6,8 @@
import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.protobuf.Duration;
+import com.google.protobuf.Timestamp;
import java.util.Objects;
@Generated
@@ -13,7 +15,7 @@ public class NestedMessage {
/** */
@JsonProperty("optional_duration")
@QueryParam("optional_duration")
- private String optionalDuration;
+ private Duration optionalDuration;
/** */
@JsonProperty("optional_string")
@@ -23,14 +25,14 @@ public class NestedMessage {
/** */
@JsonProperty("optional_timestamp")
@QueryParam("optional_timestamp")
- private String optionalTimestamp;
+ private Timestamp optionalTimestamp;
- public NestedMessage setOptionalDuration(String optionalDuration) {
+ public NestedMessage setOptionalDuration(Duration optionalDuration) {
this.optionalDuration = optionalDuration;
return this;
}
- public String getOptionalDuration() {
+ public Duration getOptionalDuration() {
return optionalDuration;
}
@@ -43,12 +45,12 @@ public String getOptionalString() {
return optionalString;
}
- public NestedMessage setOptionalTimestamp(String optionalTimestamp) {
+ public NestedMessage setOptionalTimestamp(Timestamp optionalTimestamp) {
this.optionalTimestamp = optionalTimestamp;
return this;
}
- public String getOptionalTimestamp() {
+ public Timestamp getOptionalTimestamp() {
return optionalTimestamp;
}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/OptionalFields.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/OptionalFields.java
index 19caee741..f86cb107e 100755
--- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/OptionalFields.java
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/OptionalFields.java
@@ -6,6 +6,10 @@
import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.google.protobuf.Duration;
+import com.google.protobuf.FieldMask;
+import com.google.protobuf.Timestamp;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
@@ -15,7 +19,7 @@ public class OptionalFields {
/** */
@JsonProperty("duration")
@QueryParam("duration")
- private String duration;
+ private Duration duration;
/**
* The field mask must be a single string, with multiple fields separated by commas (no spaces).
@@ -26,7 +30,7 @@ public class OptionalFields {
*/
@JsonProperty("field_mask")
@QueryParam("field_mask")
- private String fieldMask;
+ private FieldMask fieldMask;
/** Legacy Well Known types */
@JsonProperty("legacy_duration")
@@ -52,7 +56,7 @@ public class OptionalFields {
/** */
@JsonProperty("list_value")
@QueryParam("list_value")
- private Collection listValue;
+ private Collection listValue;
/** Lint disable reason: This is a dummy field used to test SDK Generation logic. */
@JsonProperty("map")
@@ -87,7 +91,7 @@ public class OptionalFields {
/** */
@JsonProperty("struct")
@QueryParam("struct")
- private Map structValue;
+ private Map structValue;
/** */
@JsonProperty("test_enum")
@@ -97,28 +101,28 @@ public class OptionalFields {
/** */
@JsonProperty("timestamp")
@QueryParam("timestamp")
- private String timestamp;
+ private Timestamp timestamp;
/** */
@JsonProperty("value")
@QueryParam("value")
- private Object /* MISSING TYPE */ value;
+ private JsonNode value;
- public OptionalFields setDuration(String duration) {
+ public OptionalFields setDuration(Duration duration) {
this.duration = duration;
return this;
}
- public String getDuration() {
+ public Duration getDuration() {
return duration;
}
- public OptionalFields setFieldMask(String fieldMask) {
+ public OptionalFields setFieldMask(FieldMask fieldMask) {
this.fieldMask = fieldMask;
return this;
}
- public String getFieldMask() {
+ public FieldMask getFieldMask() {
return fieldMask;
}
@@ -149,12 +153,12 @@ public String getLegacyTimestamp() {
return legacyTimestamp;
}
- public OptionalFields setListValue(Collection listValue) {
+ public OptionalFields setListValue(Collection listValue) {
this.listValue = listValue;
return this;
}
- public Collection getListValue() {
+ public Collection getListValue() {
return listValue;
}
@@ -212,12 +216,12 @@ public String getOptionalString() {
return optionalString;
}
- public OptionalFields setStruct(Map structValue) {
+ public OptionalFields setStruct(Map structValue) {
this.structValue = structValue;
return this;
}
- public Map getStruct() {
+ public Map getStruct() {
return structValue;
}
@@ -230,21 +234,21 @@ public TestEnum getTestEnum() {
return testEnum;
}
- public OptionalFields setTimestamp(String timestamp) {
+ public OptionalFields setTimestamp(Timestamp timestamp) {
this.timestamp = timestamp;
return this;
}
- public String getTimestamp() {
+ public Timestamp getTimestamp() {
return timestamp;
}
- public OptionalFields setValue(Object /* MISSING TYPE */ value) {
+ public OptionalFields setValue(JsonNode value) {
this.value = value;
return this;
}
- public Object /* MISSING TYPE */ getValue() {
+ public JsonNode getValue() {
return value;
}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RepeatedFields.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RepeatedFields.java
index 4595b4a73..bfed3a4ac 100755
--- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RepeatedFields.java
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RepeatedFields.java
@@ -6,6 +6,10 @@
import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.google.protobuf.Duration;
+import com.google.protobuf.FieldMask;
+import com.google.protobuf.Timestamp;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
@@ -20,12 +24,12 @@ public class RepeatedFields {
/** */
@JsonProperty("repeated_duration")
@QueryParam("repeated_duration")
- private Collection repeatedDuration;
+ private Collection repeatedDuration;
/** */
@JsonProperty("repeated_field_mask")
@QueryParam("repeated_field_mask")
- private Collection repeatedFieldMask;
+ private Collection repeatedFieldMask;
/** */
@JsonProperty("repeated_int32")
@@ -40,7 +44,7 @@ public class RepeatedFields {
/** */
@JsonProperty("repeated_list_value")
@QueryParam("repeated_list_value")
- private Collection> repeatedListValue;
+ private Collection> repeatedListValue;
/** */
@JsonProperty("repeated_message")
@@ -55,17 +59,17 @@ public class RepeatedFields {
/** */
@JsonProperty("repeated_struct")
@QueryParam("repeated_struct")
- private Collection> repeatedStruct;
+ private Collection