diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 8cd956362..e7f752fb5 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-06a18b97d7996d6cd8dd88bfdb0f2c2792739e46
\ No newline at end of file
+ce962ccd0a078a5a9d89494fe38d237ce377d5f3
\ No newline at end of file
diff --git a/.gitattributes b/.gitattributes
index 20de597f1..afaec1365 100755
--- a/.gitattributes
+++ b/.gitattributes
@@ -1897,7 +1897,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntit
databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntitySpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java linguist-generated=true
-databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadSize.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelSpec.java linguist-generated=true
@@ -1963,11 +1962,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Config.jav
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessList.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateIpAccessListResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfiguration.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNotificationDestinationRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java linguist-generated=true
-databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequestGroupId.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerAPI.java linguist-generated=true
@@ -2032,6 +2032,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetw
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyRestrictionMode.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyStorageDestination.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressResourceType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmailConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnableExportNotebook.java linguist-generated=true
@@ -2109,7 +2110,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/MicrosoftT
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAwsStableIpRule.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRuleConnectionState.java linguist-generated=true
-databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRuleGroupId.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzureServiceEndpointRule.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java linguist-generated=true
@@ -2178,8 +2178,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnab
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNotificationDestinationRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePersonalComputeSettingRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateRestrictWorkspaceAdminsSettingRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceConfAPI.java linguist-generated=true
@@ -2270,10 +2272,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateShare
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Volume.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/VolumeInternalAttributes.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AccessControl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Aggregation.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Alert.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertCondition.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionOperand.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertConditionThreshold.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertEvaluationState.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperandColumn.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperandValue.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOperator.java linguist-generated=true
@@ -2281,12 +2285,22 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOptions.ja
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertOptionsEmptyResultState.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertQuery.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertState.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Evaluation.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Notification.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Operand.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandColumn.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandValue.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Subscription.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsLegacyService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Service.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/BaseChunkInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecutionResponse.java linguist-generated=true
@@ -2296,9 +2310,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.jav
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ClientConfig.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ColumnInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ColumnInfoTypeName.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ComparisonOperator.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlert.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlert.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2Request.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequestQuery.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryVisualizationsLegacyRequest.java linguist-generated=true
@@ -2308,6 +2324,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouse
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequestWarehouseType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWidget.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CronSchedule.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Dashboard.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardEditContent.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/DashboardOptions.java linguist-generated=true
@@ -2360,6 +2377,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySo
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySourceJobInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Format.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertV2Request.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertsLegacyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetDashboardRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetDbsqlPermissionRequest.java linguist-generated=true
@@ -2384,6 +2402,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/LifecycleState.
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponseAlert.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Request.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Response.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2ResponseAlert.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListDashboardsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListOrder.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListQueriesLegacyRequest.java linguist-generated=true
@@ -2446,6 +2467,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultManifest.
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultSchema.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RunAsMode.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RunAsRole.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SchedulePauseStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ServiceError.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ServiceErrorCode.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetRequest.java linguist-generated=true
@@ -2477,9 +2499,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TimeRange.java
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TransferOwnershipObjectId.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TransferOwnershipRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertV2Request.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashQueryRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestAlert.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateQueryRequestQuery.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateResponse.java linguist-generated=true
diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md
index f51d9f9b5..aafb04159 100644
--- a/NEXT_CHANGELOG.md
+++ b/NEXT_CHANGELOG.md
@@ -15,3 +15,22 @@
### Internal Changes
### API Changes
+* Added `workspaceClient.alertsV2()` service.
+* Added `updateNccAzurePrivateEndpointRulePublic()` method for `accountClient.networkConnectivity()` service.
+* Added `createdAt`, `createdBy` and `metastoreId` fields for `com.databricks.sdk.service.catalog.SetArtifactAllowlist`.
+* [Breaking] Added `networkConnectivityConfig` field for `com.databricks.sdk.service.settings.CreateNetworkConnectivityConfigRequest`.
+* [Breaking] Added `privateEndpointRule` field for `com.databricks.sdk.service.settings.CreatePrivateEndpointRuleRequest`.
+* Added `domainNames` field for `com.databricks.sdk.service.settings.NccAzurePrivateEndpointRule`.
+* Added `autoResolveDisplayName` field for `com.databricks.sdk.service.sql.CreateAlertRequest`.
+* Added `autoResolveDisplayName` field for `com.databricks.sdk.service.sql.CreateQueryRequest`.
+* Added `CREATE_CLEAN_ROOM`, `EXECUTE_CLEAN_ROOM_TASK` and `MODIFY_CLEAN_ROOM` enum values for `com.databricks.sdk.service.catalog.Privilege`.
+* Added `DNS_RESOLUTION_ERROR` and `GCP_DENIED_BY_ORG_POLICY` enum values for `com.databricks.sdk.service.compute.TerminationReasonCode`.
+* Added `EXPIRED` enum value for `com.databricks.sdk.service.settings.NccAzurePrivateEndpointRuleConnectionState`.
+* [Breaking] Changed `createNetworkConnectivityConfiguration()` and `createPrivateEndpointRule()` methods for `accountClient.networkConnectivity()` service with new required argument order.
+* [Breaking] Changed `workloadSize` field for `com.databricks.sdk.service.serving.ServedModelInput` to type `String` class.
+* [Breaking] Changed `groupId` field for `com.databricks.sdk.service.settings.NccAzurePrivateEndpointRule` to type `String` class.
+* [Breaking] Changed `targetServices` field for `com.databricks.sdk.service.settings.NccAzureServiceEndpointRule` to type `com.databricks.sdk.service.settings.EgressResourceTypeList` class.
+* [Breaking] Removed `name` and `region` fields for `com.databricks.sdk.service.settings.CreateNetworkConnectivityConfigRequest`.
+* [Breaking] Removed `groupId` and `resourceId` fields for `com.databricks.sdk.service.settings.CreatePrivateEndpointRuleRequest`.
+* [Breaking] Removed `LARGE`, `MEDIUM` and `SMALL` enum values for `com.databricks.sdk.service.serving.ServedModelInputWorkloadSize`.
+* [Breaking] Removed `BLOB`, `DFS`, `MYSQL_SERVER` and `SQL_SERVER` enum values for `com.databricks.sdk.service.settings.NccAzurePrivateEndpointRuleGroupId`.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
index 1ff268bf4..85597b682 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
@@ -371,7 +371,14 @@ public AccountMetastoresAPI metastores() {
/**
* These APIs provide configurations for the network connectivity of your workspaces for
- * serverless compute resources.
+ * serverless compute resources. This API provides stable subnets for your workspace so that you
+ * can configure your firewalls on your Azure Storage accounts to allow access from Databricks.
+ * You can also use the API to provision private endpoints for Databricks to privately connect
+ * serverless compute resources to your Azure resources using Azure Private Link. See [configure
+ * serverless secure connectivity].
+ *
+ *
[configure serverless secure connectivity]:
+ * https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security
*/
public NetworkConnectivityAPI networkConnectivity() {
return networkConnectivityAPI;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
index 84bc0cd44..8e3a89c79 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
@@ -166,6 +166,8 @@
import com.databricks.sdk.service.sql.AlertsLegacyAPI;
import com.databricks.sdk.service.sql.AlertsLegacyService;
import com.databricks.sdk.service.sql.AlertsService;
+import com.databricks.sdk.service.sql.AlertsV2API;
+import com.databricks.sdk.service.sql.AlertsV2Service;
import com.databricks.sdk.service.sql.DashboardWidgetsAPI;
import com.databricks.sdk.service.sql.DashboardWidgetsService;
import com.databricks.sdk.service.sql.DashboardsAPI;
@@ -213,6 +215,7 @@ public class WorkspaceClient {
private AccountAccessControlProxyAPI accountAccessControlProxyAPI;
private AlertsAPI alertsAPI;
private AlertsLegacyAPI alertsLegacyAPI;
+ private AlertsV2API alertsV2API;
private AppsAPI appsAPI;
private ArtifactAllowlistsAPI artifactAllowlistsAPI;
private CatalogsAPI catalogsAPI;
@@ -320,6 +323,7 @@ public WorkspaceClient(DatabricksConfig config) {
accountAccessControlProxyAPI = new AccountAccessControlProxyAPI(apiClient);
alertsAPI = new AlertsAPI(apiClient);
alertsLegacyAPI = new AlertsLegacyAPI(apiClient);
+ alertsV2API = new AlertsV2API(apiClient);
appsAPI = new AppsAPI(apiClient);
artifactAllowlistsAPI = new ArtifactAllowlistsAPI(apiClient);
catalogsAPI = new CatalogsAPI(apiClient);
@@ -467,6 +471,11 @@ public AlertsLegacyAPI alertsLegacy() {
return alertsLegacyAPI;
}
+ /** TODO: Add description */
+ public AlertsV2API alertsV2() {
+ return alertsV2API;
+ }
+
/**
* Apps run directly on a customer’s Databricks instance, integrate with their data, use and
* extend Databricks services, and enable users to interact through single sign-on.
@@ -1783,6 +1792,17 @@ public WorkspaceClient withAlertsLegacyAPI(AlertsLegacyAPI alertsLegacy) {
return this;
}
+ /** Replace the default AlertsV2Service with a custom implementation. */
+ public WorkspaceClient withAlertsV2Impl(AlertsV2Service alertsV2) {
+ return this.withAlertsV2API(new AlertsV2API(alertsV2));
+ }
+
+ /** Replace the default AlertsV2API with a custom implementation. */
+ public WorkspaceClient withAlertsV2API(AlertsV2API alertsV2) {
+ this.alertsV2API = alertsV2;
+ return this;
+ }
+
/** Replace the default AppsService with a custom implementation. */
public WorkspaceClient withAppsImpl(AppsService apps) {
return this.withAppsAPI(new AppsAPI(apps));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java
index 4b611f216..e6e710494 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java
@@ -179,6 +179,10 @@ public App waitGetAppStopped(String name, Duration timeout, Consumer callba
throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage));
}
+ public Wait create(App app) {
+ return create(new CreateAppRequest().setApp(app));
+ }
+
/**
* Create an app.
*
@@ -203,8 +207,9 @@ public App delete(DeleteAppRequest request) {
return impl.delete(request);
}
- public Wait deploy(String appName) {
- return deploy(new CreateAppDeploymentRequest().setAppName(appName));
+ public Wait deploy(String appName, AppDeployment appDeployment) {
+ return deploy(
+ new CreateAppDeploymentRequest().setAppName(appName).setAppDeployment(appDeployment));
}
/**
@@ -361,8 +366,8 @@ public Wait stop(StopAppRequest request) {
(timeout, callback) -> waitGetAppStopped(response.getName(), timeout, callback), response);
}
- public App update(String name) {
- return update(new UpdateAppRequest().setName(name));
+ public App update(String name, App app) {
+ return update(new UpdateAppRequest().setName(name).setApp(app));
}
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyAPI.java
index 6934a3443..39b840571 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicyAPI.java
@@ -79,8 +79,8 @@ public Iterable list(ListBudgetPoliciesRequest request) {
});
}
- public BudgetPolicy update(String policyId) {
- return update(new UpdateBudgetPolicyRequest().setPolicyId(policyId));
+ public BudgetPolicy update(String policyId, BudgetPolicy policy) {
+ return update(new UpdateBudgetPolicyRequest().setPolicyId(policyId).setPolicy(policy));
}
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java
index 0a3f68864..30ca3e788 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java
@@ -75,6 +75,10 @@ public OnlineTable waitGetOnlineTableActive(
throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage));
}
+ public Wait create(OnlineTable table) {
+ return create(new CreateOnlineTableRequest().setTable(table));
+ }
+
/**
* Create an Online Table.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java
index 204671a6b..9d4ea5c05 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java
@@ -12,6 +12,7 @@ public enum Privilege {
BROWSE,
CREATE,
CREATE_CATALOG,
+ CREATE_CLEAN_ROOM,
CREATE_CONNECTION,
CREATE_EXTERNAL_LOCATION,
CREATE_EXTERNAL_TABLE,
@@ -32,9 +33,11 @@ public enum Privilege {
CREATE_VIEW,
CREATE_VOLUME,
EXECUTE,
+ EXECUTE_CLEAN_ROOM_TASK,
MANAGE,
MANAGE_ALLOWLIST,
MODIFY,
+ MODIFY_CLEAN_ROOM,
READ_FILES,
READ_PRIVATE_FILES,
READ_VOLUME,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlist.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlist.java
index 475811b98..61fc05806 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlist.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlist.java
@@ -18,6 +18,18 @@ public class SetArtifactAllowlist {
/** The artifact type of the allowlist. */
@JsonIgnore private ArtifactType artifactType;
+ /** Time at which this artifact allowlist was set, in epoch milliseconds. */
+ @JsonProperty("created_at")
+ private Long createdAt;
+
+ /** Username of the user who set the artifact allowlist. */
+ @JsonProperty("created_by")
+ private String createdBy;
+
+ /** Unique identifier of parent metastore. */
+ @JsonProperty("metastore_id")
+ private String metastoreId;
+
public SetArtifactAllowlist setArtifactMatchers(Collection artifactMatchers) {
this.artifactMatchers = artifactMatchers;
return this;
@@ -36,18 +48,48 @@ public ArtifactType getArtifactType() {
return artifactType;
}
+ public SetArtifactAllowlist setCreatedAt(Long createdAt) {
+ this.createdAt = createdAt;
+ return this;
+ }
+
+ public Long getCreatedAt() {
+ return createdAt;
+ }
+
+ public SetArtifactAllowlist setCreatedBy(String createdBy) {
+ this.createdBy = createdBy;
+ return this;
+ }
+
+ public String getCreatedBy() {
+ return createdBy;
+ }
+
+ public SetArtifactAllowlist setMetastoreId(String metastoreId) {
+ this.metastoreId = metastoreId;
+ return this;
+ }
+
+ public String getMetastoreId() {
+ return metastoreId;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SetArtifactAllowlist that = (SetArtifactAllowlist) o;
return Objects.equals(artifactMatchers, that.artifactMatchers)
- && Objects.equals(artifactType, that.artifactType);
+ && Objects.equals(artifactType, that.artifactType)
+ && Objects.equals(createdAt, that.createdAt)
+ && Objects.equals(createdBy, that.createdBy)
+ && Objects.equals(metastoreId, that.metastoreId);
}
@Override
public int hashCode() {
- return Objects.hash(artifactMatchers, artifactType);
+ return Objects.hash(artifactMatchers, artifactType, createdAt, createdBy, metastoreId);
}
@Override
@@ -55,6 +97,9 @@ public String toString() {
return new ToStringer(SetArtifactAllowlist.class)
.add("artifactMatchers", artifactMatchers)
.add("artifactType", artifactType)
+ .add("createdAt", createdAt)
+ .add("createdBy", createdBy)
+ .add("metastoreId", metastoreId)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java
index 9516e83ba..5bd9d49f6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomAssetsAPI.java
@@ -27,8 +27,9 @@ public CleanRoomAssetsAPI(CleanRoomAssetsService mock) {
impl = mock;
}
- public CleanRoomAsset create(String cleanRoomName) {
- return create(new CreateCleanRoomAssetRequest().setCleanRoomName(cleanRoomName));
+ public CleanRoomAsset create(String cleanRoomName, CleanRoomAsset asset) {
+ return create(
+ new CreateCleanRoomAssetRequest().setCleanRoomName(cleanRoomName).setAsset(asset));
}
/**
@@ -100,12 +101,13 @@ public Iterable list(ListCleanRoomAssetsRequest request) {
}
public CleanRoomAsset update(
- String cleanRoomName, CleanRoomAssetAssetType assetType, String name) {
+ String cleanRoomName, CleanRoomAssetAssetType assetType, String name, CleanRoomAsset asset) {
return update(
new UpdateCleanRoomAssetRequest()
.setCleanRoomName(cleanRoomName)
.setAssetType(assetType)
- .setName(name));
+ .setName(name)
+ .setAsset(asset));
}
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java
index 729eefc55..be34cc9db 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/cleanrooms/CleanRoomsAPI.java
@@ -28,6 +28,10 @@ public CleanRoomsAPI(CleanRoomsService mock) {
impl = mock;
}
+ public CleanRoom create(CleanRoom cleanRoom) {
+ return create(new CreateCleanRoomRequest().setCleanRoom(cleanRoom));
+ }
+
/**
* Create a clean room.
*
@@ -44,9 +48,12 @@ public CleanRoom create(CreateCleanRoomRequest request) {
return impl.create(request);
}
- public CreateCleanRoomOutputCatalogResponse createOutputCatalog(String cleanRoomName) {
+ public CreateCleanRoomOutputCatalogResponse createOutputCatalog(
+ String cleanRoomName, CleanRoomOutputCatalog outputCatalog) {
return createOutputCatalog(
- new CreateCleanRoomOutputCatalogRequest().setCleanRoomName(cleanRoomName));
+ new CreateCleanRoomOutputCatalogRequest()
+ .setCleanRoomName(cleanRoomName)
+ .setOutputCatalog(outputCatalog));
}
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java
index a02e39cb4..25779f5c7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java
@@ -9,8 +9,11 @@
import java.util.Objects;
/**
- * The environment entity used to preserve serverless environment side panel and jobs' environment
- * for non-notebook task. In this minimal environment spec, only pip dependencies are supported.
+ * The environment entity used to preserve serverless environment side panel, jobs' environment for
+ * non-notebook task, and DLT's environment for classic and serverless pipelines. (Note: DLT uses a
+ * copied version of the Environment proto below, at
+ * //spark/pipelines/api/protos/copied/libraries-environments-copy.proto) In this minimal
+ * environment spec, only pip dependencies are supported.
*/
@Generated
public class Environment {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java
index 173d3492b..6e8973914 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java
@@ -64,6 +64,7 @@ public enum TerminationReasonCode {
DATA_ACCESS_CONFIG_CHANGED,
DBFS_COMPONENT_UNHEALTHY,
DISASTER_RECOVERY_REPLICATION,
+ DNS_RESOLUTION_ERROR,
DOCKER_CONTAINER_CREATION_EXCEPTION,
DOCKER_IMAGE_PULL_FAILURE,
DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION,
@@ -82,6 +83,7 @@ public enum TerminationReasonCode {
EXECUTION_COMPONENT_UNHEALTHY,
EXECUTOR_POD_UNSCHEDULED,
GCP_API_RATE_QUOTA_EXCEEDED,
+ GCP_DENIED_BY_ORG_POLICY,
GCP_FORBIDDEN,
GCP_IAM_TIMEOUT,
GCP_INACCESSIBLE_KMS_KEY_FAILURE,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
index 6f4978b04..8ceb4c401 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java
@@ -27,6 +27,10 @@ public LakeviewAPI(LakeviewService mock) {
impl = mock;
}
+ public Dashboard create(Dashboard dashboard) {
+ return create(new CreateDashboardRequest().setDashboard(dashboard));
+ }
+
/**
* Create dashboard.
*
@@ -36,8 +40,9 @@ public Dashboard create(CreateDashboardRequest request) {
return impl.create(request);
}
- public Schedule createSchedule(String dashboardId) {
- return createSchedule(new CreateScheduleRequest().setDashboardId(dashboardId));
+ public Schedule createSchedule(String dashboardId, Schedule schedule) {
+ return createSchedule(
+ new CreateScheduleRequest().setDashboardId(dashboardId).setSchedule(schedule));
}
/** Create dashboard schedule. */
@@ -45,9 +50,13 @@ public Schedule createSchedule(CreateScheduleRequest request) {
return impl.createSchedule(request);
}
- public Subscription createSubscription(String dashboardId, String scheduleId) {
+ public Subscription createSubscription(
+ String dashboardId, String scheduleId, Subscription subscription) {
return createSubscription(
- new CreateSubscriptionRequest().setDashboardId(dashboardId).setScheduleId(scheduleId));
+ new CreateSubscriptionRequest()
+ .setDashboardId(dashboardId)
+ .setScheduleId(scheduleId)
+ .setSubscription(subscription));
}
/** Create schedule subscription. */
@@ -234,8 +243,8 @@ public void unpublish(UnpublishDashboardRequest request) {
impl.unpublish(request);
}
- public Dashboard update(String dashboardId) {
- return update(new UpdateDashboardRequest().setDashboardId(dashboardId));
+ public Dashboard update(String dashboardId, Dashboard dashboard) {
+ return update(new UpdateDashboardRequest().setDashboardId(dashboardId).setDashboard(dashboard));
}
/**
@@ -247,9 +256,12 @@ public Dashboard update(UpdateDashboardRequest request) {
return impl.update(request);
}
- public Schedule updateSchedule(String dashboardId, String scheduleId) {
+ public Schedule updateSchedule(String dashboardId, String scheduleId, Schedule schedule) {
return updateSchedule(
- new UpdateScheduleRequest().setDashboardId(dashboardId).setScheduleId(scheduleId));
+ new UpdateScheduleRequest()
+ .setDashboardId(dashboardId)
+ .setScheduleId(scheduleId)
+ .setSchedule(schedule));
}
/** Update dashboard schedule. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
index a01c273e7..527597bec 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
@@ -128,8 +128,8 @@ public class CreateJob {
private Collection parameters;
/**
- * The performance mode on a serverless job. The performance target determines the level of
- * compute performance or cost-efficiency for the run.
+ * The performance mode on a serverless job. This field determines the level of compute
+ * performance or cost-efficiency for the run.
*
* * `STANDARD`: Enables cost-efficient execution of serverless workloads. *
* `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java
index 764152939..188e17126 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java
@@ -14,8 +14,11 @@ public class JobEnvironment {
private String environmentKey;
/**
- * The environment entity used to preserve serverless environment side panel and jobs' environment
- * for non-notebook task. In this minimal environment spec, only pip dependencies are supported.
+ * The environment entity used to preserve serverless environment side panel, jobs' environment
+ * for non-notebook task, and DLT's environment for classic and serverless pipelines. (Note: DLT
+ * uses a copied version of the Environment proto below, at
+ * //spark/pipelines/api/protos/copied/libraries-environments-copy.proto) In this minimal
+ * environment spec, only pip dependencies are supported.
*/
@JsonProperty("spec")
private com.databricks.sdk.service.compute.Environment spec;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
index 668e15467..a79bee35f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
@@ -124,8 +124,8 @@ public class JobSettings {
private Collection parameters;
/**
- * The performance mode on a serverless job. The performance target determines the level of
- * compute performance or cost-efficiency for the run.
+ * The performance mode on a serverless job. This field determines the level of compute
+ * performance or cost-efficiency for the run.
*
* * `STANDARD`: Enables cost-efficient execution of serverless workloads. *
* `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyAPI.java
index 1c39c300c..d45430d6d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/AccountFederationPolicyAPI.java
@@ -66,6 +66,10 @@ public AccountFederationPolicyAPI(AccountFederationPolicyService mock) {
impl = mock;
}
+ public FederationPolicy create(FederationPolicy policy) {
+ return create(new CreateAccountFederationPolicyRequest().setPolicy(policy));
+ }
+
/** Create account federation policy. */
public FederationPolicy create(CreateAccountFederationPolicyRequest request) {
return impl.create(request);
@@ -104,8 +108,9 @@ public Iterable list(ListAccountFederationPoliciesRequest requ
});
}
- public FederationPolicy update(String policyId) {
- return update(new UpdateAccountFederationPolicyRequest().setPolicyId(policyId));
+ public FederationPolicy update(String policyId, FederationPolicy policy) {
+ return update(
+ new UpdateAccountFederationPolicyRequest().setPolicyId(policyId).setPolicy(policy));
}
/** Update account federation policy. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyAPI.java
index 24b8d3051..0db7cfd6f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalFederationPolicyAPI.java
@@ -69,10 +69,11 @@ public ServicePrincipalFederationPolicyAPI(ServicePrincipalFederationPolicyServi
impl = mock;
}
- public FederationPolicy create(long servicePrincipalId) {
+ public FederationPolicy create(long servicePrincipalId, FederationPolicy policy) {
return create(
new CreateServicePrincipalFederationPolicyRequest()
- .setServicePrincipalId(servicePrincipalId));
+ .setServicePrincipalId(servicePrincipalId)
+ .setPolicy(policy));
}
/** Create service principal federation policy. */
@@ -125,11 +126,13 @@ public Iterable list(ListServicePrincipalFederationPoliciesReq
});
}
- public FederationPolicy update(long servicePrincipalId, String policyId) {
+ public FederationPolicy update(
+ long servicePrincipalId, String policyId, FederationPolicy policy) {
return update(
new UpdateServicePrincipalFederationPolicyRequest()
.setServicePrincipalId(servicePrincipalId)
- .setPolicyId(policyId));
+ .setPolicyId(policyId)
+ .setPolicy(policy));
}
/** Update service principal federation policy. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInput.java
index c0d50fa56..e323cbca0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityInput.java
@@ -74,8 +74,9 @@ public class ServedEntityInput {
* concurrency that the compute autoscales between. A single unit of provisioned concurrency can
* process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
* concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
- * concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
- * each workload size is 0.
+ * concurrency). Additional custom workload sizes can also be used when available in the
+ * workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each
+ * workload size is 0.
*/
@JsonProperty("workload_size")
private String workloadSize;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java
index 270d02af2..9c656687b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedEntityOutput.java
@@ -93,8 +93,9 @@ public class ServedEntityOutput {
* concurrency that the compute autoscales between. A single unit of provisioned concurrency can
* process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
* concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
- * concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
- * each workload size is 0.
+ * concurrency). Additional custom workload sizes can also be used when available in the
+ * workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each
+ * workload size is 0.
*/
@JsonProperty("workload_size")
private String workloadSize;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java
index 4cdd5876e..f03eec3a4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInput.java
@@ -57,11 +57,12 @@ public class ServedModelInput {
* concurrency that the compute autoscales between. A single unit of provisioned concurrency can
* process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
* concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
- * concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
- * each workload size is 0.
+ * concurrency). Additional custom workload sizes can also be used when available in the
+ * workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each
+ * workload size is 0.
*/
@JsonProperty("workload_size")
- private ServedModelInputWorkloadSize workloadSize;
+ private String workloadSize;
/**
* The workload type of the served entity. The workload type selects which type of compute to use
@@ -147,12 +148,12 @@ public Boolean getScaleToZeroEnabled() {
return scaleToZeroEnabled;
}
- public ServedModelInput setWorkloadSize(ServedModelInputWorkloadSize workloadSize) {
+ public ServedModelInput setWorkloadSize(String workloadSize) {
this.workloadSize = workloadSize;
return this;
}
- public ServedModelInputWorkloadSize getWorkloadSize() {
+ public String getWorkloadSize() {
return workloadSize;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadSize.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadSize.java
deleted file mode 100755
index db3122951..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadSize.java
+++ /dev/null
@@ -1,18 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.serving;
-
-import com.databricks.sdk.support.Generated;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-@Generated
-public enum ServedModelInputWorkloadSize {
- @JsonProperty("Large")
- LARGE,
-
- @JsonProperty("Medium")
- MEDIUM,
-
- @JsonProperty("Small")
- SMALL,
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java
index 36b67562d..a170e70bc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelOutput.java
@@ -61,8 +61,9 @@ public class ServedModelOutput {
* concurrency that the compute autoscales between. A single unit of provisioned concurrency can
* process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
* concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
- * concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
- * each workload size is 0.
+ * concurrency). Additional custom workload sizes can also be used when available in the
+ * workspace. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each
+ * workload size is 0.
*/
@JsonProperty("workload_size")
private String workloadSize;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java
index dcec78377..3188d16c2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java
@@ -7,39 +7,21 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Create a network connectivity configuration */
@Generated
public class CreateNetworkConnectivityConfigRequest {
- /**
- * The name of the network connectivity configuration. The name can contain alphanumeric
- * characters, hyphens, and underscores. The length must be between 3 and 30 characters. The name
- * must match the regular expression `^[0-9a-zA-Z-_]{3,30}$`.
- */
- @JsonProperty("name")
- private String name;
+ /** Properties of the new network connectivity configuration. */
+ @JsonProperty("network_connectivity_config")
+ private CreateNetworkConnectivityConfiguration networkConnectivityConfig;
- /**
- * The region for the network connectivity configuration. Only workspaces in the same region can
- * be attached to the network connectivity configuration.
- */
- @JsonProperty("region")
- private String region;
-
- public CreateNetworkConnectivityConfigRequest setName(String name) {
- this.name = name;
- return this;
- }
-
- public String getName() {
- return name;
- }
-
- public CreateNetworkConnectivityConfigRequest setRegion(String region) {
- this.region = region;
+ public CreateNetworkConnectivityConfigRequest setNetworkConnectivityConfig(
+ CreateNetworkConnectivityConfiguration networkConnectivityConfig) {
+ this.networkConnectivityConfig = networkConnectivityConfig;
return this;
}
- public String getRegion() {
- return region;
+ public CreateNetworkConnectivityConfiguration getNetworkConnectivityConfig() {
+ return networkConnectivityConfig;
}
@Override
@@ -47,19 +29,18 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CreateNetworkConnectivityConfigRequest that = (CreateNetworkConnectivityConfigRequest) o;
- return Objects.equals(name, that.name) && Objects.equals(region, that.region);
+ return Objects.equals(networkConnectivityConfig, that.networkConnectivityConfig);
}
@Override
public int hashCode() {
- return Objects.hash(name, region);
+ return Objects.hash(networkConnectivityConfig);
}
@Override
public String toString() {
return new ToStringer(CreateNetworkConnectivityConfigRequest.class)
- .add("name", name)
- .add("region", region)
+ .add("networkConnectivityConfig", networkConnectivityConfig)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfiguration.java
new file mode 100755
index 000000000..d8d868468
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfiguration.java
@@ -0,0 +1,66 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Properties of the new network connectivity configuration. */
+@Generated
+public class CreateNetworkConnectivityConfiguration {
+ /**
+ * The name of the network connectivity configuration. The name can contain alphanumeric
+ * characters, hyphens, and underscores. The length must be between 3 and 30 characters. The name
+ * must match the regular expression ^[0-9a-zA-Z-_]{3,30}$
+ */
+ @JsonProperty("name")
+ private String name;
+
+ /**
+ * The region for the network connectivity configuration. Only workspaces in the same region can
+ * be attached to the network connectivity configuration.
+ */
+ @JsonProperty("region")
+ private String region;
+
+ public CreateNetworkConnectivityConfiguration setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public CreateNetworkConnectivityConfiguration setRegion(String region) {
+ this.region = region;
+ return this;
+ }
+
+ public String getRegion() {
+ return region;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateNetworkConnectivityConfiguration that = (CreateNetworkConnectivityConfiguration) o;
+ return Objects.equals(name, that.name) && Objects.equals(region, that.region);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, region);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateNetworkConnectivityConfiguration.class)
+ .add("name", name)
+ .add("region", region)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java
new file mode 100755
index 000000000..ea50df387
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java
@@ -0,0 +1,90 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/**
+ * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure
+ * portal after initialization.
+ */
+@Generated
+public class CreatePrivateEndpointRule {
+ /**
+ * Only used by private endpoints to customer-managed resources.
+ *
+ * Domain names of target private link service. When updating this field, the full list of
+ * target domain_names must be specified.
+ */
+ @JsonProperty("domain_names")
+ private Collection domainNames;
+
+ /**
+ * Only used by private endpoints to Azure first-party services. Enum: blob | dfs | sqlServer |
+ * mysqlServer
+ *
+ * The sub-resource type (group ID) of the target resource. Note that to connect to workspace
+ * root storage (root DBFS), you need two endpoints, one for blob and one for dfs.
+ */
+ @JsonProperty("group_id")
+ private String groupId;
+
+ /** The Azure resource ID of the target resource. */
+ @JsonProperty("resource_id")
+ private String resourceId;
+
+ public CreatePrivateEndpointRule setDomainNames(Collection domainNames) {
+ this.domainNames = domainNames;
+ return this;
+ }
+
+ public Collection getDomainNames() {
+ return domainNames;
+ }
+
+ public CreatePrivateEndpointRule setGroupId(String groupId) {
+ this.groupId = groupId;
+ return this;
+ }
+
+ public String getGroupId() {
+ return groupId;
+ }
+
+ public CreatePrivateEndpointRule setResourceId(String resourceId) {
+ this.resourceId = resourceId;
+ return this;
+ }
+
+ public String getResourceId() {
+ return resourceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreatePrivateEndpointRule that = (CreatePrivateEndpointRule) o;
+ return Objects.equals(domainNames, that.domainNames)
+ && Objects.equals(groupId, that.groupId)
+ && Objects.equals(resourceId, that.resourceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(domainNames, groupId, resourceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreatePrivateEndpointRule.class)
+ .add("domainNames", domainNames)
+ .add("groupId", groupId)
+ .add("resourceId", resourceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java
index b5a9f5af6..1afe88442 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequest.java
@@ -8,31 +8,18 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Create a private endpoint rule */
@Generated
public class CreatePrivateEndpointRuleRequest {
- /**
- * The sub-resource type (group ID) of the target resource. Note that to connect to workspace root
- * storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`.
- */
- @JsonProperty("group_id")
- private CreatePrivateEndpointRuleRequestGroupId groupId;
-
- /** Your Network Connectvity Configuration ID. */
+ /** Your Network Connectivity Configuration ID. */
@JsonIgnore private String networkConnectivityConfigId;
- /** The Azure resource ID of the target resource. */
- @JsonProperty("resource_id")
- private String resourceId;
-
- public CreatePrivateEndpointRuleRequest setGroupId(
- CreatePrivateEndpointRuleRequestGroupId groupId) {
- this.groupId = groupId;
- return this;
- }
-
- public CreatePrivateEndpointRuleRequestGroupId getGroupId() {
- return groupId;
- }
+ /**
+ * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure
+ * portal after initialization.
+ */
+ @JsonProperty("private_endpoint_rule")
+ private CreatePrivateEndpointRule privateEndpointRule;
public CreatePrivateEndpointRuleRequest setNetworkConnectivityConfigId(
String networkConnectivityConfigId) {
@@ -44,13 +31,14 @@ public String getNetworkConnectivityConfigId() {
return networkConnectivityConfigId;
}
- public CreatePrivateEndpointRuleRequest setResourceId(String resourceId) {
- this.resourceId = resourceId;
+ public CreatePrivateEndpointRuleRequest setPrivateEndpointRule(
+ CreatePrivateEndpointRule privateEndpointRule) {
+ this.privateEndpointRule = privateEndpointRule;
return this;
}
- public String getResourceId() {
- return resourceId;
+ public CreatePrivateEndpointRule getPrivateEndpointRule() {
+ return privateEndpointRule;
}
@Override
@@ -58,22 +46,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CreatePrivateEndpointRuleRequest that = (CreatePrivateEndpointRuleRequest) o;
- return Objects.equals(groupId, that.groupId)
- && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId)
- && Objects.equals(resourceId, that.resourceId);
+ return Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId)
+ && Objects.equals(privateEndpointRule, that.privateEndpointRule);
}
@Override
public int hashCode() {
- return Objects.hash(groupId, networkConnectivityConfigId, resourceId);
+ return Objects.hash(networkConnectivityConfigId, privateEndpointRule);
}
@Override
public String toString() {
return new ToStringer(CreatePrivateEndpointRuleRequest.class)
- .add("groupId", groupId)
.add("networkConnectivityConfigId", networkConnectivityConfigId)
- .add("resourceId", resourceId)
+ .add("privateEndpointRule", privateEndpointRule)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequestGroupId.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequestGroupId.java
deleted file mode 100755
index e50a9a616..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRuleRequestGroupId.java
+++ /dev/null
@@ -1,25 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.settings;
-
-import com.databricks.sdk.support.Generated;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-/**
- * The sub-resource type (group ID) of the target resource. Note that to connect to workspace root
- * storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`.
- */
-@Generated
-public enum CreatePrivateEndpointRuleRequestGroupId {
- @JsonProperty("blob")
- BLOB,
-
- @JsonProperty("dfs")
- DFS,
-
- @JsonProperty("mysqlServer")
- MYSQL_SERVER,
-
- @JsonProperty("sqlServer")
- SQL_SERVER,
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationRequest.java
index dd095262a..b4a5f6e09 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationRequest.java
@@ -10,7 +10,7 @@
/** Delete a network connectivity configuration */
@Generated
public class DeleteNetworkConnectivityConfigurationRequest {
- /** Your Network Connectvity Configuration ID. */
+ /** Your Network Connectivity Configuration ID. */
@JsonIgnore private String networkConnectivityConfigId;
public DeleteNetworkConnectivityConfigurationRequest setNetworkConnectivityConfigId(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressResourceType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressResourceType.java
new file mode 100755
index 000000000..5bdf0c98f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EgressResourceType.java
@@ -0,0 +1,15 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * The target resources that are supported by Network Connectivity Config. Note: some egress types
+ * can support general types that are not defined in EgressResourceType. E.g.: Azure private
+ * endpoint supports private link enabled Azure services.
+ */
+@Generated
+public enum EgressResourceType {
+ AZURE_BLOB_STORAGE,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkConnectivityConfigurationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkConnectivityConfigurationRequest.java
index 5f01c080c..af3c9a1c5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkConnectivityConfigurationRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetNetworkConnectivityConfigurationRequest.java
@@ -10,7 +10,7 @@
/** Get a network connectivity configuration */
@Generated
public class GetNetworkConnectivityConfigurationRequest {
- /** Your Network Connectvity Configuration ID. */
+ /** Your Network Connectivity Configuration ID. */
@JsonIgnore private String networkConnectivityConfigId;
public GetNetworkConnectivityConfigurationRequest setNetworkConnectivityConfigId(
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPrivateEndpointRuleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPrivateEndpointRuleRequest.java
index 478a31821..e34e82d41 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPrivateEndpointRuleRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPrivateEndpointRuleRequest.java
@@ -7,7 +7,7 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
-/** Get a private endpoint rule */
+/** Gets a private endpoint rule */
@Generated
public class GetPrivateEndpointRuleRequest {
/** Your Network Connectvity Configuration ID. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java
index 23094c9fa..03ccf6398 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java
@@ -8,6 +8,7 @@
import java.util.Collection;
import java.util.Objects;
+/** The private endpoint rule list was successfully retrieved. */
@Generated
public class ListNccAzurePrivateEndpointRulesResponse {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java
index ac6100830..1dc5b5042 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java
@@ -8,6 +8,7 @@
import java.util.Collection;
import java.util.Objects;
+/** The network connectivity configuration list was successfully retrieved. */
@Generated
public class ListNetworkConnectivityConfigurationsResponse {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java
index 89807c140..6228e6f9b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java
@@ -5,21 +5,25 @@
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
import java.util.Objects;
+/**
+ * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure
+ * portal after initialization.
+ */
@Generated
public class NccAzurePrivateEndpointRule {
/**
* The current status of this private endpoint. The private endpoint rules are effective only if
- * the connection state is `ESTABLISHED`. Remember that you must approve new endpoints on your
- * resources in the Azure portal before they take effect.
- *
- * The possible values are: - INIT: (deprecated) The endpoint has been created and pending
- * approval. - PENDING: The endpoint has been created and pending approval. - ESTABLISHED: The
- * endpoint has been approved and is ready to use in your serverless compute resources. -
- * REJECTED: Connection was rejected by the private link resource owner. - DISCONNECTED:
- * Connection was removed by the private link resource owner, the private endpoint becomes
- * informative and should be deleted for clean-up.
+ * the connection state is ESTABLISHED. Remember that you must approve new endpoints on your
+ * resources in the Azure portal before they take effect. The possible values are: - INIT:
+ * (deprecated) The endpoint has been created and pending approval. - PENDING: The endpoint has
+ * been created and pending approval. - ESTABLISHED: The endpoint has been approved and is ready
+ * to use in your serverless compute resources. - REJECTED: Connection was rejected by the private
+ * link resource owner. - DISCONNECTED: Connection was removed by the private link resource owner,
+ * the private endpoint becomes informative and should be deleted for clean-up. - EXPIRED: If the
+ * endpoint was created but not approved in 14 days, it will be EXPIRED.
*/
@JsonProperty("connection_state")
private NccAzurePrivateEndpointRuleConnectionState connectionState;
@@ -36,16 +40,28 @@ public class NccAzurePrivateEndpointRule {
@JsonProperty("deactivated_at")
private Long deactivatedAt;
+ /**
+ * Only used by private endpoints to customer-managed resources.
+ *
+ *
Domain names of target private link service. When updating this field, the full list of
+ * target domain_names must be specified.
+ */
+ @JsonProperty("domain_names")
+ private Collection domainNames;
+
/** The name of the Azure private endpoint resource. */
@JsonProperty("endpoint_name")
private String endpointName;
/**
- * The sub-resource type (group ID) of the target resource. Note that to connect to workspace root
- * storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`.
+ * Only used by private endpoints to Azure first-party services. Enum: blob | dfs | sqlServer |
+ * mysqlServer
+ *
+ * The sub-resource type (group ID) of the target resource. Note that to connect to workspace
+ * root storage (root DBFS), you need two endpoints, one for blob and one for dfs.
*/
@JsonProperty("group_id")
- private NccAzurePrivateEndpointRuleGroupId groupId;
+ private String groupId;
/**
* The ID of a network connectivity configuration, which is the parent resource of this private
@@ -103,6 +119,15 @@ public Long getDeactivatedAt() {
return deactivatedAt;
}
+ public NccAzurePrivateEndpointRule setDomainNames(Collection domainNames) {
+ this.domainNames = domainNames;
+ return this;
+ }
+
+ public Collection getDomainNames() {
+ return domainNames;
+ }
+
public NccAzurePrivateEndpointRule setEndpointName(String endpointName) {
this.endpointName = endpointName;
return this;
@@ -112,12 +137,12 @@ public String getEndpointName() {
return endpointName;
}
- public NccAzurePrivateEndpointRule setGroupId(NccAzurePrivateEndpointRuleGroupId groupId) {
+ public NccAzurePrivateEndpointRule setGroupId(String groupId) {
this.groupId = groupId;
return this;
}
- public NccAzurePrivateEndpointRuleGroupId getGroupId() {
+ public String getGroupId() {
return groupId;
}
@@ -167,6 +192,7 @@ public boolean equals(Object o) {
&& Objects.equals(creationTime, that.creationTime)
&& Objects.equals(deactivated, that.deactivated)
&& Objects.equals(deactivatedAt, that.deactivatedAt)
+ && Objects.equals(domainNames, that.domainNames)
&& Objects.equals(endpointName, that.endpointName)
&& Objects.equals(groupId, that.groupId)
&& Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId)
@@ -182,6 +208,7 @@ public int hashCode() {
creationTime,
deactivated,
deactivatedAt,
+ domainNames,
endpointName,
groupId,
networkConnectivityConfigId,
@@ -197,6 +224,7 @@ public String toString() {
.add("creationTime", creationTime)
.add("deactivated", deactivated)
.add("deactivatedAt", deactivatedAt)
+ .add("domainNames", domainNames)
.add("endpointName", endpointName)
.add("groupId", groupId)
.add("networkConnectivityConfigId", networkConnectivityConfigId)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRuleConnectionState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRuleConnectionState.java
index 189293ede..2331f89ed 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRuleConnectionState.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRuleConnectionState.java
@@ -4,22 +4,11 @@
import com.databricks.sdk.support.Generated;
-/**
- * The current status of this private endpoint. The private endpoint rules are effective only if the
- * connection state is `ESTABLISHED`. Remember that you must approve new endpoints on your resources
- * in the Azure portal before they take effect.
- *
- * The possible values are: - INIT: (deprecated) The endpoint has been created and pending
- * approval. - PENDING: The endpoint has been created and pending approval. - ESTABLISHED: The
- * endpoint has been approved and is ready to use in your serverless compute resources. - REJECTED:
- * Connection was rejected by the private link resource owner. - DISCONNECTED: Connection was
- * removed by the private link resource owner, the private endpoint becomes informative and should
- * be deleted for clean-up.
- */
@Generated
public enum NccAzurePrivateEndpointRuleConnectionState {
DISCONNECTED,
ESTABLISHED,
+ EXPIRED,
INIT,
PENDING,
REJECTED,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRuleGroupId.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRuleGroupId.java
deleted file mode 100755
index 4ab7a01a0..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRuleGroupId.java
+++ /dev/null
@@ -1,25 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.settings;
-
-import com.databricks.sdk.support.Generated;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-/**
- * The sub-resource type (group ID) of the target resource. Note that to connect to workspace root
- * storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`.
- */
-@Generated
-public enum NccAzurePrivateEndpointRuleGroupId {
- @JsonProperty("blob")
- BLOB,
-
- @JsonProperty("dfs")
- DFS,
-
- @JsonProperty("mysqlServer")
- MYSQL_SERVER,
-
- @JsonProperty("sqlServer")
- SQL_SERVER,
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzureServiceEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzureServiceEndpointRule.java
index a83d687f7..b0a6607e6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzureServiceEndpointRule.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzureServiceEndpointRule.java
@@ -21,13 +21,13 @@ public class NccAzureServiceEndpointRule {
@JsonProperty("subnets")
private Collection subnets;
- /** The Azure region in which this service endpoint rule applies. */
+ /** The Azure region in which this service endpoint rule applies.. */
@JsonProperty("target_region")
private String targetRegion;
/** The Azure services to which this service endpoint rule applies to. */
@JsonProperty("target_services")
- private Collection targetServices;
+ private Collection targetServices;
public NccAzureServiceEndpointRule setSubnets(Collection subnets) {
this.subnets = subnets;
@@ -47,12 +47,13 @@ public String getTargetRegion() {
return targetRegion;
}
- public NccAzureServiceEndpointRule setTargetServices(Collection targetServices) {
+ public NccAzureServiceEndpointRule setTargetServices(
+ Collection targetServices) {
this.targetServices = targetServices;
return this;
}
- public Collection getTargetServices() {
+ public Collection getTargetServices() {
return targetServices;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressConfig.java
index a7f6f9ed1..1d5d18154 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressConfig.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressConfig.java
@@ -7,10 +7,6 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/**
- * The network connectivity rules that apply to network traffic from your serverless compute
- * resources.
- */
@Generated
public class NccEgressConfig {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java
index 8b697413b..e46162f5d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java
@@ -7,11 +7,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/**
- * The network connectivity rules that are applied by default without resource specific
- * configurations. You can find the stable network information of your serverless compute resources
- * here.
- */
+/** Default rules don't have specific targets. */
@Generated
public class NccEgressDefaultRules {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java
index af074e73d..4cb399bdf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressTargetRules.java
@@ -8,10 +8,7 @@
import java.util.Collection;
import java.util.Objects;
-/**
- * The network connectivity rules that configured for each destinations. These rules override
- * default rules.
- */
+/** Target rule controls the egress rules that are dedicated to specific resources. */
@Generated
public class NccEgressTargetRules {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java
index 5c8009be5..72ae3444f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java
@@ -9,7 +9,14 @@
/**
* These APIs provide configurations for the network connectivity of your workspaces for serverless
- * compute resources.
+ * compute resources. This API provides stable subnets for your workspace so that you can configure
+ * your firewalls on your Azure Storage accounts to allow access from Databricks. You can also use
+ * the API to provision private endpoints for Databricks to privately connect serverless compute
+ * resources to your Azure resources using Azure Private Link. See [configure serverless secure
+ * connectivity].
+ *
+ * [configure serverless secure connectivity]:
+ * https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security
*/
@Generated
public class NetworkConnectivityAPI {
@@ -28,26 +35,39 @@ public NetworkConnectivityAPI(NetworkConnectivityService mock) {
}
public NetworkConnectivityConfiguration createNetworkConnectivityConfiguration(
- String name, String region) {
+ CreateNetworkConnectivityConfiguration networkConnectivityConfig) {
return createNetworkConnectivityConfiguration(
- new CreateNetworkConnectivityConfigRequest().setName(name).setRegion(region));
+ new CreateNetworkConnectivityConfigRequest()
+ .setNetworkConnectivityConfig(networkConnectivityConfig));
}
- /** Create a network connectivity configuration. */
+ /**
+ * Create a network connectivity configuration.
+ *
+ *
Creates a network connectivity configuration (NCC), which provides stable Azure service
+ * subnets when accessing your Azure Storage accounts. You can also use a network connectivity
+ * configuration to create Databricks managed private endpoints so that Databricks serverless
+ * compute resources privately access your resources.
+ *
+ *
**IMPORTANT**: After you create the network connectivity configuration, you must assign one
+ * or more workspaces to the new network connectivity configuration. You can share one network
+ * connectivity configuration with multiple workspaces from the same Azure region within the same
+ * Databricks account. See [configure serverless secure connectivity].
+ *
+ *
[configure serverless secure connectivity]:
+ * https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security
+ */
public NetworkConnectivityConfiguration createNetworkConnectivityConfiguration(
CreateNetworkConnectivityConfigRequest request) {
return impl.createNetworkConnectivityConfiguration(request);
}
public NccAzurePrivateEndpointRule createPrivateEndpointRule(
- String networkConnectivityConfigId,
- String resourceId,
- CreatePrivateEndpointRuleRequestGroupId groupId) {
+ String networkConnectivityConfigId, CreatePrivateEndpointRule privateEndpointRule) {
return createPrivateEndpointRule(
new CreatePrivateEndpointRuleRequest()
.setNetworkConnectivityConfigId(networkConnectivityConfigId)
- .setResourceId(resourceId)
- .setGroupId(groupId));
+ .setPrivateEndpointRule(privateEndpointRule));
}
/**
@@ -133,7 +153,7 @@ public NccAzurePrivateEndpointRule getPrivateEndpointRule(
}
/**
- * Get a private endpoint rule.
+ * Gets a private endpoint rule.
*
*
Gets the private endpoint rule.
*/
@@ -188,6 +208,30 @@ public Iterable listPrivateEndpointRules(
});
}
+ public NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic(
+ String networkConnectivityConfigId,
+ String privateEndpointRuleId,
+ UpdatePrivateEndpointRule privateEndpointRule,
+ String updateMask) {
+ return updateNccAzurePrivateEndpointRulePublic(
+ new UpdateNccAzurePrivateEndpointRulePublicRequest()
+ .setNetworkConnectivityConfigId(networkConnectivityConfigId)
+ .setPrivateEndpointRuleId(privateEndpointRuleId)
+ .setPrivateEndpointRule(privateEndpointRule)
+ .setUpdateMask(updateMask));
+ }
+
+ /**
+ * Update a private endpoint rule.
+ *
+ * Updates a private endpoint rule. Currently only a private endpoint rule to customer-managed
+ * resources is allowed to be updated.
+ */
+ public NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic(
+ UpdateNccAzurePrivateEndpointRulePublicRequest request) {
+ return impl.updateNccAzurePrivateEndpointRulePublic(request);
+ }
+
public NetworkConnectivityService impl() {
return impl;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java
index 398b70d30..6c03595d4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java
@@ -7,6 +7,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/** Properties of the new network connectivity configuration. */
@Generated
public class NetworkConnectivityConfiguration {
/** The Databricks account ID that hosts the credential. */
@@ -27,7 +28,7 @@ public class NetworkConnectivityConfiguration {
/**
* The name of the network connectivity configuration. The name can contain alphanumeric
* characters, hyphens, and underscores. The length must be between 3 and 30 characters. The name
- * must match the regular expression `^[0-9a-zA-Z-_]{3,30}$`.
+ * must match the regular expression ^[0-9a-zA-Z-_]{3,30}$
*/
@JsonProperty("name")
private String name;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java
index 966a6bc80..16b4dd419 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java
@@ -23,7 +23,8 @@ public NetworkConnectivityConfiguration createNetworkConnectivityConfiguration(
String.format(
"/api/2.0/accounts/%s/network-connectivity-configs", apiClient.configuredAccountID());
try {
- Request req = new Request("POST", path, apiClient.serialize(request));
+ Request req =
+ new Request("POST", path, apiClient.serialize(request.getNetworkConnectivityConfig()));
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
@@ -41,7 +42,8 @@ public NccAzurePrivateEndpointRule createPrivateEndpointRule(
"/api/2.0/accounts/%s/network-connectivity-configs/%s/private-endpoint-rules",
apiClient.configuredAccountID(), request.getNetworkConnectivityConfigId());
try {
- Request req = new Request("POST", path, apiClient.serialize(request));
+ Request req =
+ new Request("POST", path, apiClient.serialize(request.getPrivateEndpointRule()));
ApiClient.setQuery(req, request);
req.withHeader("Accept", "application/json");
req.withHeader("Content-Type", "application/json");
@@ -154,4 +156,25 @@ public ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules(
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
+
+ @Override
+ public NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic(
+ UpdateNccAzurePrivateEndpointRulePublicRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/accounts/%s/network-connectivity-configs/%s/private-endpoint-rules/%s",
+ apiClient.configuredAccountID(),
+ request.getNetworkConnectivityConfigId(),
+ request.getPrivateEndpointRuleId());
+ try {
+ Request req =
+ new Request("PATCH", path, apiClient.serialize(request.getPrivateEndpointRule()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, NccAzurePrivateEndpointRule.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java
index 80ba453f5..55abae74d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java
@@ -5,7 +5,14 @@
/**
* These APIs provide configurations for the network connectivity of your workspaces for serverless
- * compute resources.
+ * compute resources. This API provides stable subnets for your workspace so that you can configure
+ * your firewalls on your Azure Storage accounts to allow access from Databricks. You can also use
+ * the API to provision private endpoints for Databricks to privately connect serverless compute
+ * resources to your Azure resources using Azure Private Link. See [configure serverless secure
+ * connectivity].
+ *
+ *
[configure serverless secure connectivity]:
+ * https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security
*
*
This is the high-level interface, that contains generated methods.
*
@@ -13,7 +20,22 @@
*/
@Generated
public interface NetworkConnectivityService {
- /** Create a network connectivity configuration. */
+ /**
+ * Create a network connectivity configuration.
+ *
+ *
Creates a network connectivity configuration (NCC), which provides stable Azure service
+ * subnets when accessing your Azure Storage accounts. You can also use a network connectivity
+ * configuration to create Databricks managed private endpoints so that Databricks serverless
+ * compute resources privately access your resources.
+ *
+ *
**IMPORTANT**: After you create the network connectivity configuration, you must assign one
+ * or more workspaces to the new network connectivity configuration. You can share one network
+ * connectivity configuration with multiple workspaces from the same Azure region within the same
+ * Databricks account. See [configure serverless secure connectivity].
+ *
+ *
[configure serverless secure connectivity]:
+ * https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security
+ */
NetworkConnectivityConfiguration createNetworkConnectivityConfiguration(
CreateNetworkConnectivityConfigRequest createNetworkConnectivityConfigRequest);
@@ -63,7 +85,7 @@ NetworkConnectivityConfiguration getNetworkConnectivityConfiguration(
GetNetworkConnectivityConfigurationRequest getNetworkConnectivityConfigurationRequest);
/**
- * Get a private endpoint rule.
+ * Gets a private endpoint rule.
*
*
Gets the private endpoint rule.
*/
@@ -85,4 +107,14 @@ ListNetworkConnectivityConfigurationsResponse listNetworkConnectivityConfigurati
*/
ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules(
ListPrivateEndpointRulesRequest listPrivateEndpointRulesRequest);
+
+ /**
+ * Update a private endpoint rule.
+ *
+ *
Updates a private endpoint rule. Currently only a private endpoint rule to customer-managed
+ * resources is allowed to be updated.
+ */
+ NccAzurePrivateEndpointRule updateNccAzurePrivateEndpointRulePublic(
+ UpdateNccAzurePrivateEndpointRulePublicRequest
+ updateNccAzurePrivateEndpointRulePublicRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java
new file mode 100755
index 000000000..666de476e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateNccAzurePrivateEndpointRulePublicRequest.java
@@ -0,0 +1,105 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Update a private endpoint rule */
+@Generated
+public class UpdateNccAzurePrivateEndpointRulePublicRequest {
+ /** Your Network Connectivity Configuration ID. */
+ @JsonIgnore private String networkConnectivityConfigId;
+
+ /**
+ * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure
+ * portal after initialization.
+ */
+ @JsonProperty("private_endpoint_rule")
+ private UpdatePrivateEndpointRule privateEndpointRule;
+
+ /** Your private endpoint rule ID. */
+ @JsonIgnore private String privateEndpointRuleId;
+
+ /**
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateNccAzurePrivateEndpointRulePublicRequest setNetworkConnectivityConfigId(
+ String networkConnectivityConfigId) {
+ this.networkConnectivityConfigId = networkConnectivityConfigId;
+ return this;
+ }
+
+ public String getNetworkConnectivityConfigId() {
+ return networkConnectivityConfigId;
+ }
+
+ public UpdateNccAzurePrivateEndpointRulePublicRequest setPrivateEndpointRule(
+ UpdatePrivateEndpointRule privateEndpointRule) {
+ this.privateEndpointRule = privateEndpointRule;
+ return this;
+ }
+
+ public UpdatePrivateEndpointRule getPrivateEndpointRule() {
+ return privateEndpointRule;
+ }
+
+ public UpdateNccAzurePrivateEndpointRulePublicRequest setPrivateEndpointRuleId(
+ String privateEndpointRuleId) {
+ this.privateEndpointRuleId = privateEndpointRuleId;
+ return this;
+ }
+
+ public String getPrivateEndpointRuleId() {
+ return privateEndpointRuleId;
+ }
+
+ public UpdateNccAzurePrivateEndpointRulePublicRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateNccAzurePrivateEndpointRulePublicRequest that =
+ (UpdateNccAzurePrivateEndpointRulePublicRequest) o;
+ return Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId)
+ && Objects.equals(privateEndpointRule, that.privateEndpointRule)
+ && Objects.equals(privateEndpointRuleId, that.privateEndpointRuleId)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ networkConnectivityConfigId, privateEndpointRule, privateEndpointRuleId, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateNccAzurePrivateEndpointRulePublicRequest.class)
+ .add("networkConnectivityConfigId", networkConnectivityConfigId)
+ .add("privateEndpointRule", privateEndpointRule)
+ .add("privateEndpointRuleId", privateEndpointRuleId)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java
new file mode 100755
index 000000000..f7df95078
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java
@@ -0,0 +1,54 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/**
+ * Properties of the new private endpoint rule. Note that you must approve the endpoint in Azure
+ * portal after initialization.
+ */
+@Generated
+public class UpdatePrivateEndpointRule {
+ /**
+ * Only used by private endpoints to customer-managed resources.
+ *
+ *
Domain names of target private link service. When updating this field, the full list of
+ * target domain_names must be specified.
+ */
+ @JsonProperty("domain_names")
+ private Collection domainNames;
+
+ public UpdatePrivateEndpointRule setDomainNames(Collection domainNames) {
+ this.domainNames = domainNames;
+ return this;
+ }
+
+ public Collection getDomainNames() {
+ return domainNames;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdatePrivateEndpointRule that = (UpdatePrivateEndpointRule) o;
+ return Objects.equals(domainNames, that.domainNames);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(domainNames);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdatePrivateEndpointRule.class)
+ .add("domainNames", domainNames)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Aggregation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Aggregation.java
new file mode 100755
index 000000000..74b1d02ca
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Aggregation.java
@@ -0,0 +1,17 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum Aggregation {
+ AVG,
+ COUNT,
+ COUNT_DISTINCT,
+ MAX,
+ MEDIAN,
+ MIN,
+ STDDEV,
+ SUM,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertEvaluationState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertEvaluationState.java
new file mode 100755
index 000000000..47e7f0491
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertEvaluationState.java
@@ -0,0 +1,18 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * UNSPECIFIED - default unspecify value for proto enum, do not use it in the code UNKNOWN - alert
+ * not yet evaluated TRIGGERED - alert is triggered OK - alert is not triggered ERROR - alert
+ * evaluation failed
+ */
+@Generated
+public enum AlertEvaluationState {
+ ERROR,
+ OK,
+ TRIGGERED,
+ UNKNOWN,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2.java
new file mode 100755
index 000000000..cbe402a72
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2.java
@@ -0,0 +1,256 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AlertV2 {
+ /** The timestamp indicating when the alert was created. */
+ @JsonProperty("create_time")
+ private String createTime;
+
+ /** Custom description for the alert. support mustache template. */
+ @JsonProperty("custom_description")
+ private String customDescription;
+
+ /** Custom summary for the alert. support mustache template. */
+ @JsonProperty("custom_summary")
+ private String customSummary;
+
+ /** The display name of the alert. */
+ @JsonProperty("display_name")
+ private String displayName;
+
+ /** */
+ @JsonProperty("evaluation")
+ private AlertV2Evaluation evaluation;
+
+ /** UUID identifying the alert. */
+ @JsonProperty("id")
+ private String id;
+
+ /** Indicates whether the query is trashed. */
+ @JsonProperty("lifecycle_state")
+ private LifecycleState lifecycleState;
+
+ /** The owner's username. This field is set to "Unavailable" if the user has been deleted. */
+ @JsonProperty("owner_user_name")
+ private String ownerUserName;
+
+ /**
+ * The workspace path of the folder containing the alert. Can only be set on create, and cannot be
+ * updated.
+ */
+ @JsonProperty("parent_path")
+ private String parentPath;
+
+ /** Text of the query to be run. */
+ @JsonProperty("query_text")
+ private String queryText;
+
+ /** The run as username. This field is set to "Unavailable" if the user has been deleted. */
+ @JsonProperty("run_as_user_name")
+ private String runAsUserName;
+
+ /** */
+ @JsonProperty("schedule")
+ private CronSchedule schedule;
+
+ /** The timestamp indicating when the alert was updated. */
+ @JsonProperty("update_time")
+ private String updateTime;
+
+ /** ID of the SQL warehouse attached to the alert. */
+ @JsonProperty("warehouse_id")
+ private String warehouseId;
+
+ public AlertV2 setCreateTime(String createTime) {
+ this.createTime = createTime;
+ return this;
+ }
+
+ public String getCreateTime() {
+ return createTime;
+ }
+
+ public AlertV2 setCustomDescription(String customDescription) {
+ this.customDescription = customDescription;
+ return this;
+ }
+
+ public String getCustomDescription() {
+ return customDescription;
+ }
+
+ public AlertV2 setCustomSummary(String customSummary) {
+ this.customSummary = customSummary;
+ return this;
+ }
+
+ public String getCustomSummary() {
+ return customSummary;
+ }
+
+ public AlertV2 setDisplayName(String displayName) {
+ this.displayName = displayName;
+ return this;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public AlertV2 setEvaluation(AlertV2Evaluation evaluation) {
+ this.evaluation = evaluation;
+ return this;
+ }
+
+ public AlertV2Evaluation getEvaluation() {
+ return evaluation;
+ }
+
+ public AlertV2 setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public AlertV2 setLifecycleState(LifecycleState lifecycleState) {
+ this.lifecycleState = lifecycleState;
+ return this;
+ }
+
+ public LifecycleState getLifecycleState() {
+ return lifecycleState;
+ }
+
+ public AlertV2 setOwnerUserName(String ownerUserName) {
+ this.ownerUserName = ownerUserName;
+ return this;
+ }
+
+ public String getOwnerUserName() {
+ return ownerUserName;
+ }
+
+ public AlertV2 setParentPath(String parentPath) {
+ this.parentPath = parentPath;
+ return this;
+ }
+
+ public String getParentPath() {
+ return parentPath;
+ }
+
+ public AlertV2 setQueryText(String queryText) {
+ this.queryText = queryText;
+ return this;
+ }
+
+ public String getQueryText() {
+ return queryText;
+ }
+
+ public AlertV2 setRunAsUserName(String runAsUserName) {
+ this.runAsUserName = runAsUserName;
+ return this;
+ }
+
+ public String getRunAsUserName() {
+ return runAsUserName;
+ }
+
+ public AlertV2 setSchedule(CronSchedule schedule) {
+ this.schedule = schedule;
+ return this;
+ }
+
+ public CronSchedule getSchedule() {
+ return schedule;
+ }
+
+ public AlertV2 setUpdateTime(String updateTime) {
+ this.updateTime = updateTime;
+ return this;
+ }
+
+ public String getUpdateTime() {
+ return updateTime;
+ }
+
+ public AlertV2 setWarehouseId(String warehouseId) {
+ this.warehouseId = warehouseId;
+ return this;
+ }
+
+ public String getWarehouseId() {
+ return warehouseId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AlertV2 that = (AlertV2) o;
+ return Objects.equals(createTime, that.createTime)
+ && Objects.equals(customDescription, that.customDescription)
+ && Objects.equals(customSummary, that.customSummary)
+ && Objects.equals(displayName, that.displayName)
+ && Objects.equals(evaluation, that.evaluation)
+ && Objects.equals(id, that.id)
+ && Objects.equals(lifecycleState, that.lifecycleState)
+ && Objects.equals(ownerUserName, that.ownerUserName)
+ && Objects.equals(parentPath, that.parentPath)
+ && Objects.equals(queryText, that.queryText)
+ && Objects.equals(runAsUserName, that.runAsUserName)
+ && Objects.equals(schedule, that.schedule)
+ && Objects.equals(updateTime, that.updateTime)
+ && Objects.equals(warehouseId, that.warehouseId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ createTime,
+ customDescription,
+ customSummary,
+ displayName,
+ evaluation,
+ id,
+ lifecycleState,
+ ownerUserName,
+ parentPath,
+ queryText,
+ runAsUserName,
+ schedule,
+ updateTime,
+ warehouseId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AlertV2.class)
+ .add("createTime", createTime)
+ .add("customDescription", customDescription)
+ .add("customSummary", customSummary)
+ .add("displayName", displayName)
+ .add("evaluation", evaluation)
+ .add("id", id)
+ .add("lifecycleState", lifecycleState)
+ .add("ownerUserName", ownerUserName)
+ .add("parentPath", parentPath)
+ .add("queryText", queryText)
+ .add("runAsUserName", runAsUserName)
+ .add("schedule", schedule)
+ .add("updateTime", updateTime)
+ .add("warehouseId", warehouseId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Evaluation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Evaluation.java
new file mode 100755
index 000000000..ad55cc8ea
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Evaluation.java
@@ -0,0 +1,141 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AlertV2Evaluation {
+ /** Operator used for comparison in alert evaluation. */
+ @JsonProperty("comparison_operator")
+ private ComparisonOperator comparisonOperator;
+
+ /** Alert state if result is empty. */
+ @JsonProperty("empty_result_state")
+ private AlertEvaluationState emptyResultState;
+
+ /** Timestamp of the last evaluation. */
+ @JsonProperty("last_evaluated_at")
+ private String lastEvaluatedAt;
+
+ /** User or Notification Destination to notify when alert is triggered. */
+ @JsonProperty("notification")
+ private AlertV2Notification notification;
+
+ /** Source column from result to use to evaluate alert */
+ @JsonProperty("source")
+ private AlertV2OperandColumn source;
+
+ /** Latest state of alert evaluation. */
+ @JsonProperty("state")
+ private AlertEvaluationState state;
+
+ /** Threshold to user for alert evaluation, can be a column or a value. */
+ @JsonProperty("threshold")
+ private AlertV2Operand threshold;
+
+ public AlertV2Evaluation setComparisonOperator(ComparisonOperator comparisonOperator) {
+ this.comparisonOperator = comparisonOperator;
+ return this;
+ }
+
+ public ComparisonOperator getComparisonOperator() {
+ return comparisonOperator;
+ }
+
+ public AlertV2Evaluation setEmptyResultState(AlertEvaluationState emptyResultState) {
+ this.emptyResultState = emptyResultState;
+ return this;
+ }
+
+ public AlertEvaluationState getEmptyResultState() {
+ return emptyResultState;
+ }
+
+ public AlertV2Evaluation setLastEvaluatedAt(String lastEvaluatedAt) {
+ this.lastEvaluatedAt = lastEvaluatedAt;
+ return this;
+ }
+
+ public String getLastEvaluatedAt() {
+ return lastEvaluatedAt;
+ }
+
+ public AlertV2Evaluation setNotification(AlertV2Notification notification) {
+ this.notification = notification;
+ return this;
+ }
+
+ public AlertV2Notification getNotification() {
+ return notification;
+ }
+
+ public AlertV2Evaluation setSource(AlertV2OperandColumn source) {
+ this.source = source;
+ return this;
+ }
+
+ public AlertV2OperandColumn getSource() {
+ return source;
+ }
+
+ public AlertV2Evaluation setState(AlertEvaluationState state) {
+ this.state = state;
+ return this;
+ }
+
+ public AlertEvaluationState getState() {
+ return state;
+ }
+
+ public AlertV2Evaluation setThreshold(AlertV2Operand threshold) {
+ this.threshold = threshold;
+ return this;
+ }
+
+ public AlertV2Operand getThreshold() {
+ return threshold;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AlertV2Evaluation that = (AlertV2Evaluation) o;
+ return Objects.equals(comparisonOperator, that.comparisonOperator)
+ && Objects.equals(emptyResultState, that.emptyResultState)
+ && Objects.equals(lastEvaluatedAt, that.lastEvaluatedAt)
+ && Objects.equals(notification, that.notification)
+ && Objects.equals(source, that.source)
+ && Objects.equals(state, that.state)
+ && Objects.equals(threshold, that.threshold);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ comparisonOperator,
+ emptyResultState,
+ lastEvaluatedAt,
+ notification,
+ source,
+ state,
+ threshold);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AlertV2Evaluation.class)
+ .add("comparisonOperator", comparisonOperator)
+ .add("emptyResultState", emptyResultState)
+ .add("lastEvaluatedAt", lastEvaluatedAt)
+ .add("notification", notification)
+ .add("source", source)
+ .add("state", state)
+ .add("threshold", threshold)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Notification.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Notification.java
new file mode 100755
index 000000000..6fa20c245
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Notification.java
@@ -0,0 +1,78 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class AlertV2Notification {
+ /** Whether to notify alert subscribers when alert returns back to normal. */
+ @JsonProperty("notify_on_ok")
+ private Boolean notifyOnOk;
+
+ /**
+ * Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it
+ * can be triggered again. If 0 or not specified, the alert will not be triggered again.
+ */
+ @JsonProperty("retrigger_seconds")
+ private Long retriggerSeconds;
+
+ /** */
+ @JsonProperty("subscriptions")
+ private Collection subscriptions;
+
+ public AlertV2Notification setNotifyOnOk(Boolean notifyOnOk) {
+ this.notifyOnOk = notifyOnOk;
+ return this;
+ }
+
+ public Boolean getNotifyOnOk() {
+ return notifyOnOk;
+ }
+
+ public AlertV2Notification setRetriggerSeconds(Long retriggerSeconds) {
+ this.retriggerSeconds = retriggerSeconds;
+ return this;
+ }
+
+ public Long getRetriggerSeconds() {
+ return retriggerSeconds;
+ }
+
+ public AlertV2Notification setSubscriptions(Collection subscriptions) {
+ this.subscriptions = subscriptions;
+ return this;
+ }
+
+ public Collection getSubscriptions() {
+ return subscriptions;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AlertV2Notification that = (AlertV2Notification) o;
+ return Objects.equals(notifyOnOk, that.notifyOnOk)
+ && Objects.equals(retriggerSeconds, that.retriggerSeconds)
+ && Objects.equals(subscriptions, that.subscriptions);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(notifyOnOk, retriggerSeconds, subscriptions);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AlertV2Notification.class)
+ .add("notifyOnOk", notifyOnOk)
+ .add("retriggerSeconds", retriggerSeconds)
+ .add("subscriptions", subscriptions)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Operand.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Operand.java
new file mode 100755
index 000000000..947ee092f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Operand.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AlertV2Operand {
+ /** */
+ @JsonProperty("column")
+ private AlertV2OperandColumn column;
+
+ /** */
+ @JsonProperty("value")
+ private AlertV2OperandValue value;
+
+ public AlertV2Operand setColumn(AlertV2OperandColumn column) {
+ this.column = column;
+ return this;
+ }
+
+ public AlertV2OperandColumn getColumn() {
+ return column;
+ }
+
+ public AlertV2Operand setValue(AlertV2OperandValue value) {
+ this.value = value;
+ return this;
+ }
+
+ public AlertV2OperandValue getValue() {
+ return value;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AlertV2Operand that = (AlertV2Operand) o;
+ return Objects.equals(column, that.column) && Objects.equals(value, that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(column, value);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AlertV2Operand.class)
+ .add("column", column)
+ .add("value", value)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandColumn.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandColumn.java
new file mode 100755
index 000000000..2e8776e18
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandColumn.java
@@ -0,0 +1,74 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AlertV2OperandColumn {
+ /** */
+ @JsonProperty("aggregation")
+ private Aggregation aggregation;
+
+ /** */
+ @JsonProperty("display")
+ private String display;
+
+ /** */
+ @JsonProperty("name")
+ private String name;
+
+ public AlertV2OperandColumn setAggregation(Aggregation aggregation) {
+ this.aggregation = aggregation;
+ return this;
+ }
+
+ public Aggregation getAggregation() {
+ return aggregation;
+ }
+
+ public AlertV2OperandColumn setDisplay(String display) {
+ this.display = display;
+ return this;
+ }
+
+ public String getDisplay() {
+ return display;
+ }
+
+ public AlertV2OperandColumn setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AlertV2OperandColumn that = (AlertV2OperandColumn) o;
+ return Objects.equals(aggregation, that.aggregation)
+ && Objects.equals(display, that.display)
+ && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(aggregation, display, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AlertV2OperandColumn.class)
+ .add("aggregation", aggregation)
+ .add("display", display)
+ .add("name", name)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandValue.java
new file mode 100755
index 000000000..c1e883802
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2OperandValue.java
@@ -0,0 +1,74 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AlertV2OperandValue {
+ /** */
+ @JsonProperty("bool_value")
+ private Boolean boolValue;
+
+ /** */
+ @JsonProperty("double_value")
+ private Double doubleValue;
+
+ /** */
+ @JsonProperty("string_value")
+ private String stringValue;
+
+ public AlertV2OperandValue setBoolValue(Boolean boolValue) {
+ this.boolValue = boolValue;
+ return this;
+ }
+
+ public Boolean getBoolValue() {
+ return boolValue;
+ }
+
+ public AlertV2OperandValue setDoubleValue(Double doubleValue) {
+ this.doubleValue = doubleValue;
+ return this;
+ }
+
+ public Double getDoubleValue() {
+ return doubleValue;
+ }
+
+ public AlertV2OperandValue setStringValue(String stringValue) {
+ this.stringValue = stringValue;
+ return this;
+ }
+
+ public String getStringValue() {
+ return stringValue;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AlertV2OperandValue that = (AlertV2OperandValue) o;
+ return Objects.equals(boolValue, that.boolValue)
+ && Objects.equals(doubleValue, that.doubleValue)
+ && Objects.equals(stringValue, that.stringValue);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(boolValue, doubleValue, stringValue);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AlertV2OperandValue.class)
+ .add("boolValue", boolValue)
+ .add("doubleValue", doubleValue)
+ .add("stringValue", stringValue)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Subscription.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Subscription.java
new file mode 100755
index 000000000..cb0d96a42
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertV2Subscription.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class AlertV2Subscription {
+ /** */
+ @JsonProperty("destination_id")
+ private String destinationId;
+
+ /** */
+ @JsonProperty("user_email")
+ private String userEmail;
+
+ public AlertV2Subscription setDestinationId(String destinationId) {
+ this.destinationId = destinationId;
+ return this;
+ }
+
+ public String getDestinationId() {
+ return destinationId;
+ }
+
+ public AlertV2Subscription setUserEmail(String userEmail) {
+ this.userEmail = userEmail;
+ return this;
+ }
+
+ public String getUserEmail() {
+ return userEmail;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AlertV2Subscription that = (AlertV2Subscription) o;
+ return Objects.equals(destinationId, that.destinationId)
+ && Objects.equals(userEmail, that.userEmail);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(destinationId, userEmail);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AlertV2Subscription.class)
+ .add("destinationId", destinationId)
+ .add("userEmail", userEmail)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java
new file mode 100755
index 000000000..d509781c9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java
@@ -0,0 +1,99 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** TODO: Add description */
+@Generated
+public class AlertsV2API {
+ private static final Logger LOG = LoggerFactory.getLogger(AlertsV2API.class);
+
+ private final AlertsV2Service impl;
+
+ /** Regular-use constructor */
+ public AlertsV2API(ApiClient apiClient) {
+ impl = new AlertsV2Impl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public AlertsV2API(AlertsV2Service mock) {
+ impl = mock;
+ }
+
+ /**
+ * Create an alert.
+ *
+ * Create Alert
+ */
+ public AlertV2 createAlert(CreateAlertV2Request request) {
+ return impl.createAlert(request);
+ }
+
+ public AlertV2 getAlert(String id) {
+ return getAlert(new GetAlertV2Request().setId(id));
+ }
+
+ /**
+ * Get an alert.
+ *
+ *
Gets an alert.
+ */
+ public AlertV2 getAlert(GetAlertV2Request request) {
+ return impl.getAlert(request);
+ }
+
+ /**
+ * List alerts.
+ *
+ *
Gets a list of alerts accessible to the user, ordered by creation time.
+ */
+ public Iterable listAlerts(ListAlertsV2Request request) {
+ return new Paginator<>(
+ request,
+ impl::listAlerts,
+ ListAlertsV2Response::getResults,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ public void trashAlert(String id) {
+ trashAlert(new TrashAlertV2Request().setId(id));
+ }
+
+ /**
+ * Delete an alert.
+ *
+ * Moves an alert to the trash. Trashed alerts immediately disappear from list views, and can
+ * no longer trigger. You can restore a trashed alert through the UI. A trashed alert is
+ * permanently deleted after 30 days.
+ */
+ public void trashAlert(TrashAlertV2Request request) {
+ impl.trashAlert(request);
+ }
+
+ public AlertV2 updateAlert(String id, String updateMask) {
+ return updateAlert(new UpdateAlertV2Request().setId(id).setUpdateMask(updateMask));
+ }
+
+ /**
+ * Update an alert.
+ *
+ *
Update alert
+ */
+ public AlertV2 updateAlert(UpdateAlertV2Request request) {
+ return impl.updateAlert(request);
+ }
+
+ public AlertsV2Service impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java
new file mode 100755
index 000000000..42c268b80
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Impl.java
@@ -0,0 +1,85 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of AlertsV2 */
+@Generated
+class AlertsV2Impl implements AlertsV2Service {
+ private final ApiClient apiClient;
+
+ public AlertsV2Impl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public AlertV2 createAlert(CreateAlertV2Request request) {
+ String path = "/api/2.0/alerts";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, AlertV2.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public AlertV2 getAlert(GetAlertV2Request request) {
+ String path = String.format("/api/2.0/alerts/%s", request.getId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, AlertV2.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public ListAlertsV2Response listAlerts(ListAlertsV2Request request) {
+ String path = "/api/2.0/alerts";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListAlertsV2Response.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void trashAlert(TrashAlertV2Request request) {
+ String path = String.format("/api/2.0/alerts/%s", request.getId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Empty.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public AlertV2 updateAlert(UpdateAlertV2Request request) {
+ String path = String.format("/api/2.0/alerts/%s", request.getId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, AlertV2.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Service.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Service.java
new file mode 100755
index 000000000..f8740fa39
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2Service.java
@@ -0,0 +1,51 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * TODO: Add description
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface AlertsV2Service {
+ /**
+ * Create an alert.
+ *
+ *
Create Alert
+ */
+ AlertV2 createAlert(CreateAlertV2Request createAlertV2Request);
+
+ /**
+ * Get an alert.
+ *
+ *
Gets an alert.
+ */
+ AlertV2 getAlert(GetAlertV2Request getAlertV2Request);
+
+ /**
+ * List alerts.
+ *
+ *
Gets a list of alerts accessible to the user, ordered by creation time.
+ */
+ ListAlertsV2Response listAlerts(ListAlertsV2Request listAlertsV2Request);
+
+ /**
+ * Delete an alert.
+ *
+ *
Moves an alert to the trash. Trashed alerts immediately disappear from list views, and can
+ * no longer trigger. You can restore a trashed alert through the UI. A trashed alert is
+ * permanently deleted after 30 days.
+ */
+ void trashAlert(TrashAlertV2Request trashAlertV2Request);
+
+ /**
+ * Update an alert.
+ *
+ *
Update alert
+ */
+ AlertV2 updateAlert(UpdateAlertV2Request updateAlertV2Request);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ComparisonOperator.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ComparisonOperator.java
new file mode 100755
index 000000000..3b144a4ca
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ComparisonOperator.java
@@ -0,0 +1,17 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum ComparisonOperator {
+ EQUAL,
+ GREATER_THAN,
+ GREATER_THAN_OR_EQUAL,
+ IS_NOT_NULL,
+ IS_NULL,
+ LESS_THAN,
+ LESS_THAN_OR_EQUAL,
+ NOT_EQUAL,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequest.java
index 59bd25171..3bed24fef 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequest.java
@@ -13,6 +13,13 @@ public class CreateAlertRequest {
@JsonProperty("alert")
private CreateAlertRequestAlert alert;
+ /**
+ * If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the
+ * alert's display name conflicts with an existing alert's display name.
+ */
+ @JsonProperty("auto_resolve_display_name")
+ private Boolean autoResolveDisplayName;
+
public CreateAlertRequest setAlert(CreateAlertRequestAlert alert) {
this.alert = alert;
return this;
@@ -22,21 +29,34 @@ public CreateAlertRequestAlert getAlert() {
return alert;
}
+ public CreateAlertRequest setAutoResolveDisplayName(Boolean autoResolveDisplayName) {
+ this.autoResolveDisplayName = autoResolveDisplayName;
+ return this;
+ }
+
+ public Boolean getAutoResolveDisplayName() {
+ return autoResolveDisplayName;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CreateAlertRequest that = (CreateAlertRequest) o;
- return Objects.equals(alert, that.alert);
+ return Objects.equals(alert, that.alert)
+ && Objects.equals(autoResolveDisplayName, that.autoResolveDisplayName);
}
@Override
public int hashCode() {
- return Objects.hash(alert);
+ return Objects.hash(alert, autoResolveDisplayName);
}
@Override
public String toString() {
- return new ToStringer(CreateAlertRequest.class).add("alert", alert).toString();
+ return new ToStringer(CreateAlertRequest.class)
+ .add("alert", alert)
+ .add("autoResolveDisplayName", autoResolveDisplayName)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2Request.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2Request.java
new file mode 100755
index 000000000..e9dce84d6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertV2Request.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateAlertV2Request {
+ /** */
+ @JsonProperty("alert")
+ private AlertV2 alert;
+
+ public CreateAlertV2Request setAlert(AlertV2 alert) {
+ this.alert = alert;
+ return this;
+ }
+
+ public AlertV2 getAlert() {
+ return alert;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateAlertV2Request that = (CreateAlertV2Request) o;
+ return Objects.equals(alert, that.alert);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(alert);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateAlertV2Request.class).add("alert", alert).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequest.java
index 950421431..99a698477 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateQueryRequest.java
@@ -9,10 +9,26 @@
@Generated
public class CreateQueryRequest {
+ /**
+ * If true, automatically resolve query display name conflicts. Otherwise, fail the request if the
+ * query's display name conflicts with an existing query's display name.
+ */
+ @JsonProperty("auto_resolve_display_name")
+ private Boolean autoResolveDisplayName;
+
/** */
@JsonProperty("query")
private CreateQueryRequestQuery query;
+ public CreateQueryRequest setAutoResolveDisplayName(Boolean autoResolveDisplayName) {
+ this.autoResolveDisplayName = autoResolveDisplayName;
+ return this;
+ }
+
+ public Boolean getAutoResolveDisplayName() {
+ return autoResolveDisplayName;
+ }
+
public CreateQueryRequest setQuery(CreateQueryRequestQuery query) {
this.query = query;
return this;
@@ -27,16 +43,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CreateQueryRequest that = (CreateQueryRequest) o;
- return Objects.equals(query, that.query);
+ return Objects.equals(autoResolveDisplayName, that.autoResolveDisplayName)
+ && Objects.equals(query, that.query);
}
@Override
public int hashCode() {
- return Objects.hash(query);
+ return Objects.hash(autoResolveDisplayName, query);
}
@Override
public String toString() {
- return new ToStringer(CreateQueryRequest.class).add("query", query).toString();
+ return new ToStringer(CreateQueryRequest.class)
+ .add("autoResolveDisplayName", autoResolveDisplayName)
+ .add("query", query)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CronSchedule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CronSchedule.java
new file mode 100755
index 000000000..d2245c416
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CronSchedule.java
@@ -0,0 +1,83 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CronSchedule {
+ /** Indicate whether this schedule is paused or not. */
+ @JsonProperty("pause_status")
+ private SchedulePauseStatus pauseStatus;
+
+ /**
+ * A cron expression using quartz syntax that specifies the schedule for this pipeline. Should use
+ * the quartz format described here:
+ * http://www.quartz-scheduler.org/documentation/quartz-2.1.7/tutorials/tutorial-lesson-06.html
+ */
+ @JsonProperty("quartz_cron_schedule")
+ private String quartzCronSchedule;
+
+ /**
+ * A Java timezone id. The schedule will be resolved using this timezone. This will be combined
+ * with the quartz_cron_schedule to determine the schedule. See
+ * https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html
+ * for details.
+ */
+ @JsonProperty("timezone_id")
+ private String timezoneId;
+
+ public CronSchedule setPauseStatus(SchedulePauseStatus pauseStatus) {
+ this.pauseStatus = pauseStatus;
+ return this;
+ }
+
+ public SchedulePauseStatus getPauseStatus() {
+ return pauseStatus;
+ }
+
+ public CronSchedule setQuartzCronSchedule(String quartzCronSchedule) {
+ this.quartzCronSchedule = quartzCronSchedule;
+ return this;
+ }
+
+ public String getQuartzCronSchedule() {
+ return quartzCronSchedule;
+ }
+
+ public CronSchedule setTimezoneId(String timezoneId) {
+ this.timezoneId = timezoneId;
+ return this;
+ }
+
+ public String getTimezoneId() {
+ return timezoneId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CronSchedule that = (CronSchedule) o;
+ return Objects.equals(pauseStatus, that.pauseStatus)
+ && Objects.equals(quartzCronSchedule, that.quartzCronSchedule)
+ && Objects.equals(timezoneId, that.timezoneId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pauseStatus, quartzCronSchedule, timezoneId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CronSchedule.class)
+ .add("pauseStatus", pauseStatus)
+ .add("quartzCronSchedule", quartzCronSchedule)
+ .add("timezoneId", timezoneId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java
index e29b45bc9..bd56e2472 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java
@@ -98,7 +98,7 @@ public class EndpointInfo {
@JsonProperty("name")
private String name;
- /** current number of active sessions for the warehouse */
+ /** Deprecated. current number of active sessions for the warehouse */
@JsonProperty("num_active_sessions")
private Long numActiveSessions;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertV2Request.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertV2Request.java
new file mode 100755
index 000000000..b5c4f94f8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertV2Request.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Get an alert */
+@Generated
+public class GetAlertV2Request {
+ /** */
+ @JsonIgnore private String id;
+
+ public GetAlertV2Request setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetAlertV2Request that = (GetAlertV2Request) o;
+ return Objects.equals(id, that.id);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetAlertV2Request.class).add("id", id).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponse.java
index 885829a07..e0a414c5b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetWarehouseResponse.java
@@ -98,7 +98,7 @@ public class GetWarehouseResponse {
@JsonProperty("name")
private String name;
- /** current number of active sessions for the warehouse */
+ /** Deprecated. current number of active sessions for the warehouse */
@JsonProperty("num_active_sessions")
private Long numActiveSessions;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Request.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Request.java
new file mode 100755
index 000000000..dc51f962e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Request.java
@@ -0,0 +1,62 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** List alerts */
+@Generated
+public class ListAlertsV2Request {
+ /** */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListAlertsV2Request setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListAlertsV2Request setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListAlertsV2Request that = (ListAlertsV2Request) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListAlertsV2Request.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Response.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Response.java
new file mode 100755
index 000000000..a8bd76201
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Response.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListAlertsV2Response {
+ /** */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ /** */
+ @JsonProperty("results")
+ private Collection results;
+
+ public ListAlertsV2Response setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ public ListAlertsV2Response setResults(Collection results) {
+ this.results = results;
+ return this;
+ }
+
+ public Collection getResults() {
+ return results;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListAlertsV2Response that = (ListAlertsV2Response) o;
+ return Objects.equals(nextPageToken, that.nextPageToken)
+ && Objects.equals(results, that.results);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(nextPageToken, results);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListAlertsV2Response.class)
+ .add("nextPageToken", nextPageToken)
+ .add("results", results)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2ResponseAlert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2ResponseAlert.java
new file mode 100755
index 000000000..1bc072eb7
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2ResponseAlert.java
@@ -0,0 +1,237 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class ListAlertsV2ResponseAlert {
+ /** The timestamp indicating when the alert was created. */
+ @JsonProperty("create_time")
+ private String createTime;
+
+ /** Custom description for the alert. support mustache template. */
+ @JsonProperty("custom_description")
+ private String customDescription;
+
+ /** Custom summary for the alert. support mustache template. */
+ @JsonProperty("custom_summary")
+ private String customSummary;
+
+ /** The display name of the alert. */
+ @JsonProperty("display_name")
+ private String displayName;
+
+ /** */
+ @JsonProperty("evaluation")
+ private AlertV2Evaluation evaluation;
+
+ /** UUID identifying the alert. */
+ @JsonProperty("id")
+ private String id;
+
+ /** Indicates whether the query is trashed. */
+ @JsonProperty("lifecycle_state")
+ private LifecycleState lifecycleState;
+
+ /** The owner's username. This field is set to "Unavailable" if the user has been deleted. */
+ @JsonProperty("owner_user_name")
+ private String ownerUserName;
+
+ /** Text of the query to be run. */
+ @JsonProperty("query_text")
+ private String queryText;
+
+ /** The run as username. This field is set to "Unavailable" if the user has been deleted. */
+ @JsonProperty("run_as_user_name")
+ private String runAsUserName;
+
+ /** */
+ @JsonProperty("schedule")
+ private CronSchedule schedule;
+
+ /** The timestamp indicating when the alert was updated. */
+ @JsonProperty("update_time")
+ private String updateTime;
+
+ /** ID of the SQL warehouse attached to the alert. */
+ @JsonProperty("warehouse_id")
+ private String warehouseId;
+
+ public ListAlertsV2ResponseAlert setCreateTime(String createTime) {
+ this.createTime = createTime;
+ return this;
+ }
+
+ public String getCreateTime() {
+ return createTime;
+ }
+
+ public ListAlertsV2ResponseAlert setCustomDescription(String customDescription) {
+ this.customDescription = customDescription;
+ return this;
+ }
+
+ public String getCustomDescription() {
+ return customDescription;
+ }
+
+ public ListAlertsV2ResponseAlert setCustomSummary(String customSummary) {
+ this.customSummary = customSummary;
+ return this;
+ }
+
+ public String getCustomSummary() {
+ return customSummary;
+ }
+
+ public ListAlertsV2ResponseAlert setDisplayName(String displayName) {
+ this.displayName = displayName;
+ return this;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public ListAlertsV2ResponseAlert setEvaluation(AlertV2Evaluation evaluation) {
+ this.evaluation = evaluation;
+ return this;
+ }
+
+ public AlertV2Evaluation getEvaluation() {
+ return evaluation;
+ }
+
+ public ListAlertsV2ResponseAlert setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public ListAlertsV2ResponseAlert setLifecycleState(LifecycleState lifecycleState) {
+ this.lifecycleState = lifecycleState;
+ return this;
+ }
+
+ public LifecycleState getLifecycleState() {
+ return lifecycleState;
+ }
+
+ public ListAlertsV2ResponseAlert setOwnerUserName(String ownerUserName) {
+ this.ownerUserName = ownerUserName;
+ return this;
+ }
+
+ public String getOwnerUserName() {
+ return ownerUserName;
+ }
+
+ public ListAlertsV2ResponseAlert setQueryText(String queryText) {
+ this.queryText = queryText;
+ return this;
+ }
+
+ public String getQueryText() {
+ return queryText;
+ }
+
+ public ListAlertsV2ResponseAlert setRunAsUserName(String runAsUserName) {
+ this.runAsUserName = runAsUserName;
+ return this;
+ }
+
+ public String getRunAsUserName() {
+ return runAsUserName;
+ }
+
+ public ListAlertsV2ResponseAlert setSchedule(CronSchedule schedule) {
+ this.schedule = schedule;
+ return this;
+ }
+
+ public CronSchedule getSchedule() {
+ return schedule;
+ }
+
+ public ListAlertsV2ResponseAlert setUpdateTime(String updateTime) {
+ this.updateTime = updateTime;
+ return this;
+ }
+
+ public String getUpdateTime() {
+ return updateTime;
+ }
+
+ public ListAlertsV2ResponseAlert setWarehouseId(String warehouseId) {
+ this.warehouseId = warehouseId;
+ return this;
+ }
+
+ public String getWarehouseId() {
+ return warehouseId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListAlertsV2ResponseAlert that = (ListAlertsV2ResponseAlert) o;
+ return Objects.equals(createTime, that.createTime)
+ && Objects.equals(customDescription, that.customDescription)
+ && Objects.equals(customSummary, that.customSummary)
+ && Objects.equals(displayName, that.displayName)
+ && Objects.equals(evaluation, that.evaluation)
+ && Objects.equals(id, that.id)
+ && Objects.equals(lifecycleState, that.lifecycleState)
+ && Objects.equals(ownerUserName, that.ownerUserName)
+ && Objects.equals(queryText, that.queryText)
+ && Objects.equals(runAsUserName, that.runAsUserName)
+ && Objects.equals(schedule, that.schedule)
+ && Objects.equals(updateTime, that.updateTime)
+ && Objects.equals(warehouseId, that.warehouseId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ createTime,
+ customDescription,
+ customSummary,
+ displayName,
+ evaluation,
+ id,
+ lifecycleState,
+ ownerUserName,
+ queryText,
+ runAsUserName,
+ schedule,
+ updateTime,
+ warehouseId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListAlertsV2ResponseAlert.class)
+ .add("createTime", createTime)
+ .add("customDescription", customDescription)
+ .add("customSummary", customSummary)
+ .add("displayName", displayName)
+ .add("evaluation", evaluation)
+ .add("id", id)
+ .add("lifecycleState", lifecycleState)
+ .add("ownerUserName", ownerUserName)
+ .add("queryText", queryText)
+ .add("runAsUserName", runAsUserName)
+ .add("schedule", schedule)
+ .add("updateTime", updateTime)
+ .add("warehouseId", warehouseId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SchedulePauseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SchedulePauseStatus.java
new file mode 100755
index 000000000..1757af813
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SchedulePauseStatus.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum SchedulePauseStatus {
+ PAUSED,
+ UNPAUSED,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertV2Request.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertV2Request.java
new file mode 100755
index 000000000..819a17a3f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TrashAlertV2Request.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Delete an alert */
+@Generated
+public class TrashAlertV2Request {
+ /** */
+ @JsonIgnore private String id;
+
+ public TrashAlertV2Request setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ TrashAlertV2Request that = (TrashAlertV2Request) o;
+ return Objects.equals(id, that.id);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(TrashAlertV2Request.class).add("id", id).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java
new file mode 100755
index 000000000..a1df8b791
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertV2Request.java
@@ -0,0 +1,84 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.sql;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateAlertV2Request {
+ /** */
+ @JsonProperty("alert")
+ private AlertV2 alert;
+
+ /** UUID identifying the alert. */
+ @JsonIgnore private String id;
+
+ /**
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ * A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
+ */
+ @JsonProperty("update_mask")
+ private String updateMask;
+
+ public UpdateAlertV2Request setAlert(AlertV2 alert) {
+ this.alert = alert;
+ return this;
+ }
+
+ public AlertV2 getAlert() {
+ return alert;
+ }
+
+ public UpdateAlertV2Request setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public UpdateAlertV2Request setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateAlertV2Request that = (UpdateAlertV2Request) o;
+ return Objects.equals(alert, that.alert)
+ && Objects.equals(id, that.id)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(alert, id, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateAlertV2Request.class)
+ .add("alert", alert)
+ .add("id", id)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}