diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 20842dced..1989860d0 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-c4784cea599325a13472b1455e7434d639362d8b
\ No newline at end of file
+2304d9e46d27bb60c2ba3a3185adf302a0ecd17a
\ No newline at end of file
diff --git a/.gitattributes b/.gitattributes
index 97a0fa447..820ada772 100755
--- a/.gitattributes
+++ b/.gitattributes
@@ -1,3 +1,4 @@
+LroTestingAPITest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorMapper.java linguist-generated=true
@@ -2708,7 +2709,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesServi
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Table.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributes.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/TableInternalAttributesSharedTableType.java linguist-generated=true
-databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateFederationPolicyRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateProvider.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateRecipient.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateShare.java linguist-generated=true
@@ -3116,3 +3116,39 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/Workspace
databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsDescription.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectPermissionsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/ComplexQueryParam.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/CreateResourceRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/GetResourceRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2API.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Impl.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Service.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/Resource.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/UpdateResourceRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/CreateTestResourceRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/IdempotencyTestingAPI.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/IdempotencyTestingImpl.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/IdempotencyTestingService.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/TestResource.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/GetResourceRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2API.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Impl.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Service.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/NestedMessage.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/OptionalFields.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RepeatedFields.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RequiredFields.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/Resource.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/TestEnum.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CancelOperationRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceOperation.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DatabricksServiceExceptionWithDetailsProto.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/ErrorCode.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/GetOperationRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/GetTestResourceRequest.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingAPI.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingImpl.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingService.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/Operation.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/TestResource.java linguist-generated=true
+databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/TestResourceOperationMetadata.java linguist-generated=true
diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md
index 6ba141908..8473647f2 100644
--- a/NEXT_CHANGELOG.md
+++ b/NEXT_CHANGELOG.md
@@ -14,3 +14,10 @@
### Internal Changes
### API Changes
+* Add `absoluteSessionLifetimeInMinutes` and `enableSingleUseRefreshTokens` fields for `com.databricks.sdk.service.oauth2.TokenAccessPolicy`.
+* Add `networkConnectivityConfigId` field for `com.databricks.sdk.service.provisioning.CreateWorkspaceRequest`.
+* Add `OAUTH_MTLS` enum value for `com.databricks.sdk.service.catalog.CredentialType`.
+* Add `NETWORK_CHECK_NIC_FAILURE_DUE_TO_MISCONFIG`, `NETWORK_CHECK_DNS_SERVER_FAILURE_DUE_TO_MISCONFIG`, `NETWORK_CHECK_STORAGE_FAILURE_DUE_TO_MISCONFIG`, `NETWORK_CHECK_METADATA_ENDPOINT_FAILURE_DUE_TO_MISCONFIG`, `NETWORK_CHECK_CONTROL_PLANE_FAILURE_DUE_TO_MISCONFIG` and `NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE_DUE_TO_MISCONFIG` enum values for `com.databricks.sdk.service.compute.TerminationReasonCode`.
+* Add `CREATING` and `CREATE_FAILED` enum values for `com.databricks.sdk.service.settings.NccPrivateEndpointRulePrivateLinkConnectionState`.
+* Add `NETWORK_CHECK_NIC_FAILURE_DUE_TO_MISCONFIG`, `NETWORK_CHECK_DNS_SERVER_FAILURE_DUE_TO_MISCONFIG`, `NETWORK_CHECK_STORAGE_FAILURE_DUE_TO_MISCONFIG`, `NETWORK_CHECK_METADATA_ENDPOINT_FAILURE_DUE_TO_MISCONFIG`, `NETWORK_CHECK_CONTROL_PLANE_FAILURE_DUE_TO_MISCONFIG` and `NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE_DUE_TO_MISCONFIG` enum values for `com.databricks.sdk.service.sql.TerminationReasonCode`.
+* [Breaking] Remove `update()` method for `workspaceClient.recipientFederationPolicies()` service.
\ No newline at end of file
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java
index 3d660a890..f852713c7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java
@@ -4,7 +4,7 @@
import com.databricks.sdk.support.Generated;
-/** Next Id: 38 */
+/** Next Id: 46 */
@Generated
public enum ConnectionType {
BIGQUERY,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java
index 6172c00db..6deab1199 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java
@@ -4,13 +4,14 @@
import com.databricks.sdk.support.Generated;
-/** Next Id: 13 */
+/** Next Id: 14 */
@Generated
public enum CredentialType {
ANY_STATIC_CREDENTIAL,
BEARER_TOKEN,
OAUTH_ACCESS_TOKEN,
OAUTH_M2M,
+ OAUTH_MTLS,
OAUTH_REFRESH_TOKEN,
OAUTH_RESOURCE_OWNER_PASSWORD,
OAUTH_U2M,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java
index 6f7da5149..4bd7689e0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java
@@ -8,7 +8,7 @@
import java.util.Map;
import java.util.Objects;
-/** Next ID: 40 */
+/** Next ID: 41 */
@Generated
public class SchemaInfo {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
index a02ad9204..c42d7e06e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java
@@ -4,7 +4,7 @@
import com.databricks.sdk.support.Generated;
-/** Latest kind: CONNECTION_REDSHIFT_IAM = 265; Next id:266 */
+/** Latest kind: CONNECTION_SALESFORCE_OAUTH_MTLS = 268; Next id:269 */
@Generated
public enum SecurableKind {
TABLE_DB_STORAGE,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java
index 2dcf7125a..4c7861f27 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java
@@ -132,11 +132,17 @@ public enum TerminationReasonCode {
NEPHOS_RESOURCE_MANAGEMENT,
NETVISOR_SETUP_TIMEOUT,
NETWORK_CHECK_CONTROL_PLANE_FAILURE,
+ NETWORK_CHECK_CONTROL_PLANE_FAILURE_DUE_TO_MISCONFIG,
NETWORK_CHECK_DNS_SERVER_FAILURE,
+ NETWORK_CHECK_DNS_SERVER_FAILURE_DUE_TO_MISCONFIG,
NETWORK_CHECK_METADATA_ENDPOINT_FAILURE,
+ NETWORK_CHECK_METADATA_ENDPOINT_FAILURE_DUE_TO_MISCONFIG,
NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE,
+ NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE_DUE_TO_MISCONFIG,
NETWORK_CHECK_NIC_FAILURE,
+ NETWORK_CHECK_NIC_FAILURE_DUE_TO_MISCONFIG,
NETWORK_CHECK_STORAGE_FAILURE,
+ NETWORK_CHECK_STORAGE_FAILURE_DUE_TO_MISCONFIG,
NETWORK_CONFIGURATION_FAILURE,
NFS_MOUNT_FAILURE,
NO_ACTIVATED_K8S,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshRequest.java
index 7d24f64ad..405bbcb69 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CancelRefreshRequest.java
@@ -10,7 +10,19 @@
/** Request to cancel a refresh. */
@Generated
public class CancelRefreshRequest {
- /** The UUID of the request object. For example, schema id. */
+ /**
+ * The UUID of the request object. It is `schema_id` for `schema`, and `table_id` for `table`.
+ *
+ *
Find the `schema_id` from either: 1. The [schema_id] of the `Schemas` resource. 2. In
+ * [Catalog Explorer] > select the `schema` > go to the `Details` tab > the `Schema ID` field.
+ *
+ *
Find the `table_id` from either: 1. The [table_id] of the `Tables` resource. 2. In [Catalog
+ * Explorer] > select the `table` > go to the `Details` tab > the `Table ID` field.
+ *
+ *
[Catalog Explorer]: https://docs.databricks.com/aws/en/catalog-explorer/ [schema_id]:
+ * https://docs.databricks.com/api/workspace/schemas/get#schema_id [table_id]:
+ * https://docs.databricks.com/api/workspace/tables/get#table_id
+ */
@JsonIgnore private String objectId;
/** The type of the monitored object. Can be one of the following: `schema` or `table`. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateRefreshRequest.java
index 25ba06bc9..a907fea44 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateRefreshRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/CreateRefreshRequest.java
@@ -10,7 +10,19 @@
@Generated
public class CreateRefreshRequest {
- /** The UUID of the request object. For example, table id. */
+ /**
+ * The UUID of the request object. It is `schema_id` for `schema`, and `table_id` for `table`.
+ *
+ *
Find the `schema_id` from either: 1. The [schema_id] of the `Schemas` resource. 2. In
+ * [Catalog Explorer] > select the `schema` > go to the `Details` tab > the `Schema ID` field.
+ *
+ *
Find the `table_id` from either: 1. The [table_id] of the `Tables` resource. 2. In [Catalog
+ * Explorer] > select the `table` > go to the `Details` tab > the `Table ID` field.
+ *
+ *
[Catalog Explorer]: https://docs.databricks.com/aws/en/catalog-explorer/ [schema_id]:
+ * https://docs.databricks.com/api/workspace/schemas/get#schema_id [table_id]:
+ * https://docs.databricks.com/api/workspace/tables/get#table_id
+ */
@JsonIgnore private String objectId;
/** The type of the monitored object. Can be one of the following: `schema`or `table`. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingConfig.java
index 24c38e762..0442dfb8b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingConfig.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataProfilingConfig.java
@@ -44,7 +44,7 @@ public class DataProfilingConfig {
@JsonProperty("effective_warehouse_id")
private String effectiveWarehouseId;
- /** Configuration for monitoring inference log tables. */
+ /** `Analysis Configuration` for monitoring inference log tables. */
@JsonProperty("inference_log")
private InferenceLogConfig inferenceLog;
@@ -94,7 +94,7 @@ public class DataProfilingConfig {
@JsonProperty("slicing_exprs")
private Collection slicingExprs;
- /** Configuration for monitoring snapshot tables. */
+ /** `Analysis Configuration` for monitoring snapshot tables. */
@JsonProperty("snapshot")
private SnapshotConfig snapshot;
@@ -102,7 +102,7 @@ public class DataProfilingConfig {
@JsonProperty("status")
private DataProfilingStatus status;
- /** Configuration for monitoring time series tables. */
+ /** `Analysis Configuration` for monitoring time series tables. */
@JsonProperty("time_series")
private TimeSeriesConfig timeSeries;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityAPI.java
index 0226e6c69..48dfedd07 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityAPI.java
@@ -26,6 +26,13 @@ public DataQualityAPI(DataQualityService mock) {
/**
* Cancels a data quality monitor refresh. Currently only supported for the `table` `object_type`.
+ * The call must be made in the same workspace as where the monitor was created.
+ *
+ *
The caller must have either of the following sets of permissions: 1. **MANAGE** and
+ * **USE_CATALOG** on the table's parent catalog. 2. **USE_CATALOG** on the table's parent
+ * catalog, and **MANAGE** and **USE_SCHEMA** on the table's parent schema. 3. **USE_CATALOG** on
+ * the table's parent catalog, **USE_SCHEMA** on the table's parent schema, and **MANAGE** on the
+ * table.
*/
public CancelRefreshResponse cancelRefresh(CancelRefreshRequest request) {
return impl.cancelRefresh(request);
@@ -35,27 +42,34 @@ public CancelRefreshResponse cancelRefresh(CancelRefreshRequest request) {
* Create a data quality monitor on a Unity Catalog object. The caller must provide either
* `anomaly_detection_config` for a schema monitor or `data_profiling_config` for a table monitor.
*
- *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
- * catalog, have **USE_SCHEMA** on the table's parent schema, and have **SELECT** access on the
- * table 2. have **USE_CATALOG** on the table's parent catalog, be an owner of the table's parent
- * schema, and have **SELECT** access on the table. 3. have the following permissions: -
- * **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema -
- * be an owner of the table.
+ *
For the `table` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the table's parent catalog, **USE_SCHEMA** on
+ * the table's parent schema, and **SELECT** on the table 2. **USE_CATALOG** on the table's parent
+ * catalog, **MANAGE** and **USE_SCHEMA** on the table's parent schema, and **SELECT** on the
+ * table. 3. **USE_CATALOG** on the table's parent catalog, **USE_SCHEMA** on the table's parent
+ * schema, and **MANAGE** and **SELECT** on the table.
*
*
Workspace assets, such as the dashboard, will be created in the workspace where this call
* was made.
+ *
+ *
For the `schema` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the schema's parent catalog. 2.
+ * **USE_CATALOG** on the schema's parent catalog, and **MANAGE** and **USE_SCHEMA** on the
+ * schema.
*/
public Monitor createMonitor(CreateMonitorRequest request) {
return impl.createMonitor(request);
}
/**
- * Creates a refresh. Currently only supported for the `table` `object_type`.
+ * Creates a refresh. Currently only supported for the `table` `object_type`. The call must be
+ * made in the same workspace as where the monitor was created.
*
- *
The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
- * on the table's parent catalog and be an owner of the table's parent schema 3. have the
- * following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the
- * table's parent schema - be an owner of the table
+ *
The caller must have either of the following sets of permissions: 1. **MANAGE** and
+ * **USE_CATALOG** on the table's parent catalog. 2. **USE_CATALOG** on the table's parent
+ * catalog, and **MANAGE** and **USE_SCHEMA** on the table's parent schema. 3. **USE_CATALOG** on
+ * the table's parent catalog, **USE_SCHEMA** on the table's parent schema, and **MANAGE** on the
+ * table.
*/
public Refresh createRefresh(CreateRefreshRequest request) {
return impl.createRefresh(request);
@@ -68,13 +82,19 @@ public void deleteMonitor(String objectType, String objectId) {
/**
* Delete a data quality monitor on Unity Catalog object.
*
- *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
- * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
- * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent
- * catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table.
+ *
For the `table` `object_type`, the caller must have either of the following sets of
+ * permissions: **MANAGE** and **USE_CATALOG** on the table's parent catalog. **USE_CATALOG** on
+ * the table's parent catalog, and **MANAGE** and **USE_SCHEMA** on the table's parent schema.
+ * **USE_CATALOG** on the table's parent catalog, **USE_SCHEMA** on the table's parent schema, and
+ * **MANAGE** on the table.
*
*
Note that the metric tables and dashboard will not be deleted as part of this call; those
* assets must be manually cleaned up (if desired).
+ *
+ *
For the `schema` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the schema's parent catalog. 2.
+ * **USE_CATALOG** on the schema's parent catalog, and **MANAGE** and **USE_SCHEMA** on the
+ * schema.
*/
public void deleteMonitor(DeleteMonitorRequest request) {
impl.deleteMonitor(request);
@@ -98,16 +118,21 @@ public Monitor getMonitor(String objectType, String objectId) {
}
/**
- * Read a data quality monitor on Unity Catalog object.
+ * Read a data quality monitor on a Unity Catalog object.
+ *
+ *
For the `table` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the table's parent catalog. 2.
+ * **USE_CATALOG** on the table's parent catalog, and **MANAGE** and **USE_SCHEMA** on the table's
+ * parent schema. 3. **USE_CATALOG** on the table's parent catalog, **USE_SCHEMA** on the table's
+ * parent schema, and **SELECT** on the table.
*
- *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
- * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
- * parent schema. 3. have the following permissions: - **USE_CATALOG** on the table's parent
- * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table.
+ *
For the `schema` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the schema's parent catalog. 2.
+ * **USE_CATALOG** on the schema's parent catalog, and **USE_SCHEMA** on the schema.
*
- *
The returned information includes configuration values, as well as information on assets
- * created by the monitor. Some information (e.g., dashboard) may be filtered out if the caller is
- * in a different workspace than where the monitor was created.
+ *
The returned information includes configuration values on the entity and parent entity as
+ * well as information on assets created by the monitor. Some information (e.g. dashboard) may be
+ * filtered out if the caller is in a different workspace than where the monitor was created.
*/
public Monitor getMonitor(GetMonitorRequest request) {
return impl.getMonitor(request);
@@ -122,12 +147,18 @@ public Refresh getRefresh(String objectType, String objectId, long refreshId) {
}
/**
- * Get data quality monitor refresh.
+ * Get data quality monitor refresh. The call must be made in the same workspace as where the
+ * monitor was created.
*
- *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
- * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
- * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent
- * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table.
+ *
For the `table` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the table's parent catalog. 2.
+ * **USE_CATALOG** on the table's parent catalog, and **MANAGE** and **USE_SCHEMA** on the table's
+ * parent schema. 3. **USE_CATALOG** on the table's parent catalog, **USE_SCHEMA** on the table's
+ * parent schema, and **SELECT** on the table.
+ *
+ *
For the `schema` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the schema's parent catalog. 2.
+ * **USE_CATALOG** on the schema's parent catalog, and **USE_SCHEMA** on the schema.
*/
public Refresh getRefresh(GetRefreshRequest request) {
return impl.getRefresh(request);
@@ -153,12 +184,18 @@ public Iterable listRefresh(String objectType, String objectId) {
}
/**
- * List data quality monitor refreshes.
+ * List data quality monitor refreshes. The call must be made in the same workspace as where the
+ * monitor was created.
+ *
+ *
For the `table` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the table's parent catalog. 2.
+ * **USE_CATALOG** on the table's parent catalog, and **MANAGE** and **USE_SCHEMA** on the table's
+ * parent schema. 3. **USE_CATALOG** on the table's parent catalog, **USE_SCHEMA** on the table's
+ * parent schema, and **SELECT** on the table.
*
- *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
- * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
- * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent
- * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table.
+ *
For the `schema` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the schema's parent catalog. 2.
+ * **USE_CATALOG** on the schema's parent catalog, and **USE_SCHEMA** on the schema.
*/
public Iterable listRefresh(ListRefreshRequest request) {
return new Paginator<>(
@@ -177,10 +214,16 @@ public Iterable listRefresh(ListRefreshRequest request) {
/**
* Update a data quality monitor on Unity Catalog object.
*
- *
For the `table` `object_type`, The caller must either: 1. be an owner of the table's parent
- * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
- * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent
- * catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table.
+ *
For the `table` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the table's parent catalog. 2.
+ * **USE_CATALOG** on the table's parent catalog, and **MANAGE** and **USE_SCHEMA** on the table's
+ * parent schema. 3. **USE_CATALOG** on the table's parent catalog, **USE_SCHEMA** on the table's
+ * parent schema, and **MANAGE** on the table.
+ *
+ *
For the `schema` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the schema's parent catalog. 2.
+ * **USE_CATALOG** on the schema's parent catalog, and **MANAGE** and **USE_SCHEMA** on the
+ * schema.
*/
public Monitor updateMonitor(UpdateMonitorRequest request) {
return impl.updateMonitor(request);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityService.java
index 1e5487768..b460c692f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DataQualityService.java
@@ -14,6 +14,13 @@
public interface DataQualityService {
/**
* Cancels a data quality monitor refresh. Currently only supported for the `table` `object_type`.
+ * The call must be made in the same workspace as where the monitor was created.
+ *
+ *
The caller must have either of the following sets of permissions: 1. **MANAGE** and
+ * **USE_CATALOG** on the table's parent catalog. 2. **USE_CATALOG** on the table's parent
+ * catalog, and **MANAGE** and **USE_SCHEMA** on the table's parent schema. 3. **USE_CATALOG** on
+ * the table's parent catalog, **USE_SCHEMA** on the table's parent schema, and **MANAGE** on the
+ * table.
*/
CancelRefreshResponse cancelRefresh(CancelRefreshRequest cancelRefreshRequest);
@@ -21,38 +28,51 @@ public interface DataQualityService {
* Create a data quality monitor on a Unity Catalog object. The caller must provide either
* `anomaly_detection_config` for a schema monitor or `data_profiling_config` for a table monitor.
*
- *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
- * catalog, have **USE_SCHEMA** on the table's parent schema, and have **SELECT** access on the
- * table 2. have **USE_CATALOG** on the table's parent catalog, be an owner of the table's parent
- * schema, and have **SELECT** access on the table. 3. have the following permissions: -
- * **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema -
- * be an owner of the table.
+ *
For the `table` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the table's parent catalog, **USE_SCHEMA** on
+ * the table's parent schema, and **SELECT** on the table 2. **USE_CATALOG** on the table's parent
+ * catalog, **MANAGE** and **USE_SCHEMA** on the table's parent schema, and **SELECT** on the
+ * table. 3. **USE_CATALOG** on the table's parent catalog, **USE_SCHEMA** on the table's parent
+ * schema, and **MANAGE** and **SELECT** on the table.
*
*
Workspace assets, such as the dashboard, will be created in the workspace where this call
* was made.
+ *
+ *
For the `schema` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the schema's parent catalog. 2.
+ * **USE_CATALOG** on the schema's parent catalog, and **MANAGE** and **USE_SCHEMA** on the
+ * schema.
*/
Monitor createMonitor(CreateMonitorRequest createMonitorRequest);
/**
- * Creates a refresh. Currently only supported for the `table` `object_type`.
+ * Creates a refresh. Currently only supported for the `table` `object_type`. The call must be
+ * made in the same workspace as where the monitor was created.
*
- *
The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG**
- * on the table's parent catalog and be an owner of the table's parent schema 3. have the
- * following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the
- * table's parent schema - be an owner of the table
+ *
The caller must have either of the following sets of permissions: 1. **MANAGE** and
+ * **USE_CATALOG** on the table's parent catalog. 2. **USE_CATALOG** on the table's parent
+ * catalog, and **MANAGE** and **USE_SCHEMA** on the table's parent schema. 3. **USE_CATALOG** on
+ * the table's parent catalog, **USE_SCHEMA** on the table's parent schema, and **MANAGE** on the
+ * table.
*/
Refresh createRefresh(CreateRefreshRequest createRefreshRequest);
/**
* Delete a data quality monitor on Unity Catalog object.
*
- *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
- * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
- * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent
- * catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table.
+ *
For the `table` `object_type`, the caller must have either of the following sets of
+ * permissions: **MANAGE** and **USE_CATALOG** on the table's parent catalog. **USE_CATALOG** on
+ * the table's parent catalog, and **MANAGE** and **USE_SCHEMA** on the table's parent schema.
+ * **USE_CATALOG** on the table's parent catalog, **USE_SCHEMA** on the table's parent schema, and
+ * **MANAGE** on the table.
*
*
Note that the metric tables and dashboard will not be deleted as part of this call; those
* assets must be manually cleaned up (if desired).
+ *
+ *
For the `schema` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the schema's parent catalog. 2.
+ * **USE_CATALOG** on the schema's parent catalog, and **MANAGE** and **USE_SCHEMA** on the
+ * schema.
*/
void deleteMonitor(DeleteMonitorRequest deleteMonitorRequest);
@@ -60,26 +80,37 @@ public interface DataQualityService {
void deleteRefresh(DeleteRefreshRequest deleteRefreshRequest);
/**
- * Read a data quality monitor on Unity Catalog object.
+ * Read a data quality monitor on a Unity Catalog object.
+ *
+ *
For the `table` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the table's parent catalog. 2.
+ * **USE_CATALOG** on the table's parent catalog, and **MANAGE** and **USE_SCHEMA** on the table's
+ * parent schema. 3. **USE_CATALOG** on the table's parent catalog, **USE_SCHEMA** on the table's
+ * parent schema, and **SELECT** on the table.
*
- *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
- * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
- * parent schema. 3. have the following permissions: - **USE_CATALOG** on the table's parent
- * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table.
+ *
For the `schema` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the schema's parent catalog. 2.
+ * **USE_CATALOG** on the schema's parent catalog, and **USE_SCHEMA** on the schema.
*
- *
The returned information includes configuration values, as well as information on assets
- * created by the monitor. Some information (e.g., dashboard) may be filtered out if the caller is
- * in a different workspace than where the monitor was created.
+ *
The returned information includes configuration values on the entity and parent entity as
+ * well as information on assets created by the monitor. Some information (e.g. dashboard) may be
+ * filtered out if the caller is in a different workspace than where the monitor was created.
*/
Monitor getMonitor(GetMonitorRequest getMonitorRequest);
/**
- * Get data quality monitor refresh.
+ * Get data quality monitor refresh. The call must be made in the same workspace as where the
+ * monitor was created.
*
- *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
- * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
- * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent
- * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table.
+ *
For the `table` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the table's parent catalog. 2.
+ * **USE_CATALOG** on the table's parent catalog, and **MANAGE** and **USE_SCHEMA** on the table's
+ * parent schema. 3. **USE_CATALOG** on the table's parent catalog, **USE_SCHEMA** on the table's
+ * parent schema, and **SELECT** on the table.
+ *
+ *
For the `schema` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the schema's parent catalog. 2.
+ * **USE_CATALOG** on the schema's parent catalog, and **USE_SCHEMA** on the schema.
*/
Refresh getRefresh(GetRefreshRequest getRefreshRequest);
@@ -87,22 +118,34 @@ public interface DataQualityService {
ListMonitorResponse listMonitor(ListMonitorRequest listMonitorRequest);
/**
- * List data quality monitor refreshes.
+ * List data quality monitor refreshes. The call must be made in the same workspace as where the
+ * monitor was created.
+ *
+ *
For the `table` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the table's parent catalog. 2.
+ * **USE_CATALOG** on the table's parent catalog, and **MANAGE** and **USE_SCHEMA** on the table's
+ * parent schema. 3. **USE_CATALOG** on the table's parent catalog, **USE_SCHEMA** on the table's
+ * parent schema, and **SELECT** on the table.
*
- *
For the `table` `object_type`, the caller must either: 1. be an owner of the table's parent
- * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
- * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent
- * catalog - **USE_SCHEMA** on the table's parent schema - **SELECT** privilege on the table.
+ *
For the `schema` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the schema's parent catalog. 2.
+ * **USE_CATALOG** on the schema's parent catalog, and **USE_SCHEMA** on the schema.
*/
ListRefreshResponse listRefresh(ListRefreshRequest listRefreshRequest);
/**
* Update a data quality monitor on Unity Catalog object.
*
- *
For the `table` `object_type`, The caller must either: 1. be an owner of the table's parent
- * catalog 2. have **USE_CATALOG** on the table's parent catalog and be an owner of the table's
- * parent schema 3. have the following permissions: - **USE_CATALOG** on the table's parent
- * catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table.
+ *
For the `table` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the table's parent catalog. 2.
+ * **USE_CATALOG** on the table's parent catalog, and **MANAGE** and **USE_SCHEMA** on the table's
+ * parent schema. 3. **USE_CATALOG** on the table's parent catalog, **USE_SCHEMA** on the table's
+ * parent schema, and **MANAGE** on the table.
+ *
+ *
For the `schema` `object_type`, the caller must have either of the following sets of
+ * permissions: 1. **MANAGE** and **USE_CATALOG** on the schema's parent catalog. 2.
+ * **USE_CATALOG** on the schema's parent catalog, and **MANAGE** and **USE_SCHEMA** on the
+ * schema.
*/
Monitor updateMonitor(UpdateMonitorRequest updateMonitorRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteMonitorRequest.java
index 0479ce355..e9454af2a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteMonitorRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteMonitorRequest.java
@@ -9,7 +9,19 @@
@Generated
public class DeleteMonitorRequest {
- /** The UUID of the request object. For example, schema id. */
+ /**
+ * The UUID of the request object. It is `schema_id` for `schema`, and `table_id` for `table`.
+ *
+ *
Find the `schema_id` from either: 1. The [schema_id] of the `Schemas` resource. 2. In
+ * [Catalog Explorer] > select the `schema` > go to the `Details` tab > the `Schema ID` field.
+ *
+ *
Find the `table_id` from either: 1. The [table_id] of the `Tables` resource. 2. In [Catalog
+ * Explorer] > select the `table` > go to the `Details` tab > the `Table ID` field.
+ *
+ *
[Catalog Explorer]: https://docs.databricks.com/aws/en/catalog-explorer/ [schema_id]:
+ * https://docs.databricks.com/api/workspace/schemas/get#schema_id [table_id]:
+ * https://docs.databricks.com/api/workspace/tables/get#table_id
+ */
@JsonIgnore private String objectId;
/** The type of the monitored object. Can be one of the following: `schema` or `table`. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteRefreshRequest.java
index 6ec839ce9..eb9df5efd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteRefreshRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/DeleteRefreshRequest.java
@@ -9,7 +9,19 @@
@Generated
public class DeleteRefreshRequest {
- /** The UUID of the request object. For example, schema id. */
+ /**
+ * The UUID of the request object. It is `schema_id` for `schema`, and `table_id` for `table`.
+ *
+ *
Find the `schema_id` from either: 1. The [schema_id] of the `Schemas` resource. 2. In
+ * [Catalog Explorer] > select the `schema` > go to the `Details` tab > the `Schema ID` field.
+ *
+ *
Find the `table_id` from either: 1. The [table_id] of the `Tables` resource. 2. In [Catalog
+ * Explorer] > select the `table` > go to the `Details` tab > the `Table ID` field.
+ *
+ *
[Catalog Explorer]: https://docs.databricks.com/aws/en/catalog-explorer/ [schema_id]:
+ * https://docs.databricks.com/api/workspace/schemas/get#schema_id [table_id]:
+ * https://docs.databricks.com/api/workspace/tables/get#table_id
+ */
@JsonIgnore private String objectId;
/** The type of the monitored object. Can be one of the following: `schema` or `table`. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetMonitorRequest.java
index cdb1e5136..09e378f08 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetMonitorRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetMonitorRequest.java
@@ -9,7 +9,19 @@
@Generated
public class GetMonitorRequest {
- /** The UUID of the request object. For example, schema id. */
+ /**
+ * The UUID of the request object. It is `schema_id` for `schema`, and `table_id` for `table`.
+ *
+ *
Find the `schema_id` from either: 1. The [schema_id] of the `Schemas` resource. 2. In
+ * [Catalog Explorer] > select the `schema` > go to the `Details` tab > the `Schema ID` field.
+ *
+ *
Find the `table_id` from either: 1. The [table_id] of the `Tables` resource. 2. In [Catalog
+ * Explorer] > select the `table` > go to the `Details` tab > the `Table ID` field.
+ *
+ *
[Catalog Explorer]: https://docs.databricks.com/aws/en/catalog-explorer/ [schema_id]:
+ * https://docs.databricks.com/api/workspace/schemas/get#schema_id [table_id]:
+ * https://docs.databricks.com/api/workspace/tables/get#table_id
+ */
@JsonIgnore private String objectId;
/** The type of the monitored object. Can be one of the following: `schema` or `table`. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetRefreshRequest.java
index 9280dce0f..6d9c2ac78 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetRefreshRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/GetRefreshRequest.java
@@ -9,7 +9,19 @@
@Generated
public class GetRefreshRequest {
- /** The UUID of the request object. For example, schema id. */
+ /**
+ * The UUID of the request object. It is `schema_id` for `schema`, and `table_id` for `table`.
+ *
+ *
Find the `schema_id` from either: 1. The [schema_id] of the `Schemas` resource. 2. In
+ * [Catalog Explorer] > select the `schema` > go to the `Details` tab > the `Schema ID` field.
+ *
+ *
Find the `table_id` from either: 1. The [table_id] of the `Tables` resource. 2. In [Catalog
+ * Explorer] > select the `table` > go to the `Details` tab > the `Table ID` field.
+ *
+ *
[Catalog Explorer]: https://docs.databricks.com/aws/en/catalog-explorer/ [schema_id]:
+ * https://docs.databricks.com/api/workspace/schemas/get#schema_id [table_id]:
+ * https://docs.databricks.com/api/workspace/tables/get#table_id
+ */
@JsonIgnore private String objectId;
/** The type of the monitored object. Can be one of the following: `schema` or `table`. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshRequest.java
index e86705d05..01826e96b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/ListRefreshRequest.java
@@ -10,7 +10,19 @@
@Generated
public class ListRefreshRequest {
- /** The UUID of the request object. For example, schema id. */
+ /**
+ * The UUID of the request object. It is `schema_id` for `schema`, and `table_id` for `table`.
+ *
+ *
Find the `schema_id` from either: 1. The [schema_id] of the `Schemas` resource. 2. In
+ * [Catalog Explorer] > select the `schema` > go to the `Details` tab > the `Schema ID` field.
+ *
+ *
Find the `table_id` from either: 1. The [table_id] of the `Tables` resource. 2. In [Catalog
+ * Explorer] > select the `table` > go to the `Details` tab > the `Table ID` field.
+ *
+ *
[Catalog Explorer]: https://docs.databricks.com/aws/en/catalog-explorer/ [schema_id]:
+ * https://docs.databricks.com/api/workspace/schemas/get#schema_id [table_id]:
+ * https://docs.databricks.com/api/workspace/tables/get#table_id
+ */
@JsonIgnore private String objectId;
/** The type of the monitored object. Can be one of the following: `schema` or `table`. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Monitor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Monitor.java
index da034b6b9..c4b94c1fe 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Monitor.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Monitor.java
@@ -14,11 +14,26 @@ public class Monitor {
@JsonProperty("anomaly_detection_config")
private AnomalyDetectionConfig anomalyDetectionConfig;
- /** Data Profiling Configuration, applicable to `table` object types */
+ /**
+ * Data Profiling Configuration, applicable to `table` object types. Exactly one `Analysis
+ * Configuration` must be present.
+ */
@JsonProperty("data_profiling_config")
private DataProfilingConfig dataProfilingConfig;
- /** The UUID of the request object. For example, schema id. */
+ /**
+ * The UUID of the request object. It is `schema_id` for `schema`, and `table_id` for `table`.
+ *
+ *
Find the `schema_id` from either: 1. The [schema_id] of the `Schemas` resource. 2. In
+ * [Catalog Explorer] > select the `schema` > go to the `Details` tab > the `Schema ID` field.
+ *
+ *
Find the `table_id` from either: 1. The [table_id] of the `Tables` resource. 2. In [Catalog
+ * Explorer] > select the `table` > go to the `Details` tab > the `Table ID` field.
+ *
+ *
[Catalog Explorer]: https://docs.databricks.com/aws/en/catalog-explorer/ [schema_id]:
+ * https://docs.databricks.com/api/workspace/schemas/get#schema_id [table_id]:
+ * https://docs.databricks.com/api/workspace/tables/get#table_id
+ */
@JsonProperty("object_id")
private String objectId;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Refresh.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Refresh.java
index d2e0fb6b9..e5d1e7659 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Refresh.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/Refresh.java
@@ -21,7 +21,19 @@ public class Refresh {
@JsonProperty("message")
private String message;
- /** The UUID of the request object. For example, table id. */
+ /**
+ * The UUID of the request object. It is `schema_id` for `schema`, and `table_id` for `table`.
+ *
+ *
Find the `schema_id` from either: 1. The [schema_id] of the `Schemas` resource. 2. In
+ * [Catalog Explorer] > select the `schema` > go to the `Details` tab > the `Schema ID` field.
+ *
+ *
Find the `table_id` from either: 1. The [table_id] of the `Tables` resource. 2. In [Catalog
+ * Explorer] > select the `table` > go to the `Details` tab > the `Table ID` field.
+ *
+ *
[Catalog Explorer]: https://docs.databricks.com/aws/en/catalog-explorer/ [schema_id]:
+ * https://docs.databricks.com/api/workspace/schemas/get#schema_id [table_id]:
+ * https://docs.databricks.com/api/workspace/tables/get#table_id
+ */
@JsonProperty("object_id")
private String objectId;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateMonitorRequest.java
index 014f74350..8331e7d3f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateMonitorRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateMonitorRequest.java
@@ -15,7 +15,19 @@ public class UpdateMonitorRequest {
@JsonProperty("monitor")
private Monitor monitor;
- /** The UUID of the request object. For example, schema id. */
+ /**
+ * The UUID of the request object. It is `schema_id` for `schema`, and `table_id` for `table`.
+ *
+ *
Find the `schema_id` from either: 1. The [schema_id] of the `Schemas` resource. 2. In
+ * [Catalog Explorer] > select the `schema` > go to the `Details` tab > the `Schema ID` field.
+ *
+ *
Find the `table_id` from either: 1. The [table_id] of the `Tables` resource. 2. In [Catalog
+ * Explorer] > select the `table` > go to the `Details` tab > the `Table ID` field.
+ *
+ *
[Catalog Explorer]: https://docs.databricks.com/aws/en/catalog-explorer/ [schema_id]:
+ * https://docs.databricks.com/api/workspace/schemas/get#schema_id [table_id]:
+ * https://docs.databricks.com/api/workspace/tables/get#table_id
+ */
@JsonIgnore private String objectId;
/** The type of the monitored object. Can be one of the following: `schema` or `table`. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateRefreshRequest.java
index 057ac4706..56f63c1cd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateRefreshRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dataquality/UpdateRefreshRequest.java
@@ -11,7 +11,19 @@
@Generated
public class UpdateRefreshRequest {
- /** The UUID of the request object. For example, schema id. */
+ /**
+ * The UUID of the request object. It is `schema_id` for `schema`, and `table_id` for `table`.
+ *
+ *
Find the `schema_id` from either: 1. The [schema_id] of the `Schemas` resource. 2. In
+ * [Catalog Explorer] > select the `schema` > go to the `Details` tab > the `Schema ID` field.
+ *
+ *
Find the `table_id` from either: 1. The [table_id] of the `Tables` resource. 2. In [Catalog
+ * Explorer] > select the `table` > go to the `Details` tab > the `Table ID` field.
+ *
+ *
[Catalog Explorer]: https://docs.databricks.com/aws/en/catalog-explorer/ [schema_id]:
+ * https://docs.databricks.com/api/workspace/schemas/get#schema_id [table_id]:
+ * https://docs.databricks.com/api/workspace/tables/get#table_id
+ */
@JsonIgnore private String objectId;
/** The type of the monitored object. Can be one of the following: `schema` or `table`. */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java
index 450831d23..c31d6f4fb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java
@@ -14,6 +14,11 @@ public class RepairRun {
/**
* An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
* deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
+ *
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
+ *
+ *
[job parameters]:
+ * https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("dbt_commands")
private Collection dbtCommands;
@@ -25,9 +30,10 @@ public class RepairRun {
* cannot be specified in conjunction with notebook_params. The JSON representation of this field
* (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
*
- *
Use [Task parameter variables] to set parameters containing information about job runs.
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
*
- *
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html [job
+ * parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("notebook_params")
private Map notebookParams;
@@ -89,7 +95,7 @@ public class RepairRun {
* `run-now`, it would overwrite the parameters specified in job setting. The JSON representation
* of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
*
- *
Use [Task parameter variables] to set parameters containing information about job runs.
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
*
*
Important
*
@@ -97,7 +103,8 @@ public class RepairRun {
* characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese
* kanjis, and emojis.
*
- *
[job parameters]:
+ * https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("python_params")
private Collection pythonParams;
@@ -131,7 +138,7 @@ public class RepairRun {
* parameters specified in job setting. The JSON representation of this field (for example
* `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
*
- *
Use [Task parameter variables] to set parameters containing information about job runs
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
*
*
Important
*
@@ -139,7 +146,8 @@ public class RepairRun {
* characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese
* kanjis, and emojis.
*
- *
[job parameters]:
+ * https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("spark_submit_params")
private Collection sparkSubmitParams;
@@ -147,6 +155,11 @@ public class RepairRun {
/**
* A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
* doe", "age": "35"}`. The SQL alert task does not support custom parameters.
+ *
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
+ *
+ *
[job parameters]:
+ * https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("sql_params")
private Map sqlParams;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java
index 593a3dc97..709e28637 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java
@@ -14,6 +14,11 @@ public class RunJobTask {
/**
* An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
* deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
+ *
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
+ *
+ *
[job parameters]:
+ * https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("dbt_commands")
private Collection dbtCommands;
@@ -25,9 +30,10 @@ public class RunJobTask {
* cannot be specified in conjunction with notebook_params. The JSON representation of this field
* (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
*
- *
Use [Task parameter variables] to set parameters containing information about job runs.
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
*
- *
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html [job
+ * parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("notebook_params")
private Map notebookParams;
@@ -74,7 +80,7 @@ public class RunJobTask {
* `run-now`, it would overwrite the parameters specified in job setting. The JSON representation
* of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
*
- *
Use [Task parameter variables] to set parameters containing information about job runs.
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
*
*
Important
*
@@ -82,7 +88,8 @@ public class RunJobTask {
* characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese
* kanjis, and emojis.
*
- *
[job parameters]:
+ * https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("python_params")
private Collection pythonParams;
@@ -94,7 +101,7 @@ public class RunJobTask {
* parameters specified in job setting. The JSON representation of this field (for example
* `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
*
- *
Use [Task parameter variables] to set parameters containing information about job runs
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
*
*
Important
*
@@ -102,7 +109,8 @@ public class RunJobTask {
* characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese
* kanjis, and emojis.
*
- *
[job parameters]:
+ * https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("spark_submit_params")
private Collection sparkSubmitParams;
@@ -110,6 +118,11 @@ public class RunJobTask {
/**
* A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
* doe", "age": "35"}`. The SQL alert task does not support custom parameters.
+ *
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
+ *
+ *
[job parameters]:
+ * https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("sql_params")
private Map sqlParams;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java
index 4b2a28f7e..96b9a72fd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java
@@ -14,6 +14,11 @@ public class RunNow {
/**
* An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
* deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
+ *
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
+ *
+ *
[job parameters]:
+ * https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("dbt_commands")
private Collection dbtCommands;
@@ -42,9 +47,10 @@ public class RunNow {
* cannot be specified in conjunction with notebook_params. The JSON representation of this field
* (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
*
- *
Use [Task parameter variables] to set parameters containing information about job runs.
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
*
- *
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html [job
+ * parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("notebook_params")
private Map notebookParams;
@@ -110,7 +116,7 @@ public class RunNow {
* `run-now`, it would overwrite the parameters specified in job setting. The JSON representation
* of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
*
- *
Use [Task parameter variables] to set parameters containing information about job runs.
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
*
*
Important
*
@@ -118,7 +124,8 @@ public class RunNow {
* characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese
* kanjis, and emojis.
*
- *
[job parameters]:
+ * https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("python_params")
private Collection pythonParams;
@@ -134,7 +141,7 @@ public class RunNow {
* parameters specified in job setting. The JSON representation of this field (for example
* `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
*
- *
Use [Task parameter variables] to set parameters containing information about job runs
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
*
*
Important
*
@@ -142,7 +149,8 @@ public class RunNow {
* characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese
* kanjis, and emojis.
*
- *
[job parameters]:
+ * https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("spark_submit_params")
private Collection sparkSubmitParams;
@@ -150,6 +158,11 @@ public class RunNow {
/**
* A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
* doe", "age": "35"}`. The SQL alert task does not support custom parameters.
+ *
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
+ *
+ *
[job parameters]:
+ * https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("sql_params")
private Map sqlParams;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java
index be39a1c4e..817020b84 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java
@@ -14,6 +14,11 @@ public class RunParameters {
/**
* An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
* deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
+ *
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
+ *
+ *
[job parameters]:
+ * https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("dbt_commands")
private Collection dbtCommands;
@@ -25,9 +30,10 @@ public class RunParameters {
* cannot be specified in conjunction with notebook_params. The JSON representation of this field
* (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
*
- *
Use [Task parameter variables] to set parameters containing information about job runs.
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
*
- *
[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html [job
+ * parameters]: https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("notebook_params")
private Map notebookParams;
@@ -66,7 +72,7 @@ public class RunParameters {
* `run-now`, it would overwrite the parameters specified in job setting. The JSON representation
* of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
*
- *
Use [Task parameter variables] to set parameters containing information about job runs.
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
*
*
Important
*
@@ -74,7 +80,8 @@ public class RunParameters {
* characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese
* kanjis, and emojis.
*
- *
[job parameters]:
+ * https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("python_params")
private Collection pythonParams;
@@ -86,7 +93,7 @@ public class RunParameters {
* parameters specified in job setting. The JSON representation of this field (for example
* `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
*
- *
Use [Task parameter variables] to set parameters containing information about job runs
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
*
*
Important
*
@@ -94,7 +101,8 @@ public class RunParameters {
* characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese
* kanjis, and emojis.
*
- *
[job parameters]:
+ * https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("spark_submit_params")
private Collection sparkSubmitParams;
@@ -102,6 +110,11 @@ public class RunParameters {
/**
* A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john
* doe", "age": "35"}`. The SQL alert task does not support custom parameters.
+ *
+ *
⚠ **Deprecation note** Use [job parameters] to pass information down to tasks.
+ *
+ *
[job parameters]:
+ * https://docs.databricks.com/jobs/job-parameters.html#job-parameter-pushdown
*/
@JsonProperty("sql_params")
private Map sqlParams;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java
index ac3b6fd36..3b8602ead 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/TokenAccessPolicy.java
@@ -9,14 +9,48 @@
@Generated
public class TokenAccessPolicy {
+ /**
+ * Absolute OAuth session TTL in minutes. Effective only when the single-use refresh token feature
+ * is enabled. This is the absolute TTL of all refresh tokens issued in one OAuth session. When a
+ * new refresh token is issued during refresh token rotation, it will inherit the same absolute
+ * TTL as the old refresh token. In other words, this represents the maximum amount of time a user
+ * can stay logged in without re-authenticating.
+ */
+ @JsonProperty("absolute_session_lifetime_in_minutes")
+ private Long absoluteSessionLifetimeInMinutes;
+
/** access token time to live in minutes */
@JsonProperty("access_token_ttl_in_minutes")
private Long accessTokenTtlInMinutes;
- /** refresh token time to live in minutes */
+ /**
+ * Whether to enable single-use refresh tokens (refresh token rotation). If this feature is
+ * enabled, upon successfully getting a new access token using a refresh token, Databricks will
+ * issue a new refresh token along with the access token in the response and invalidate the old
+ * refresh token. The client should use the new refresh token to get access tokens in future
+ * requests.
+ */
+ @JsonProperty("enable_single_use_refresh_tokens")
+ private Boolean enableSingleUseRefreshTokens;
+
+ /**
+ * Refresh token time to live in minutes. When single-use refresh tokens are enabled, this
+ * represents the TTL of an individual refresh token. If the refresh token is used before it
+ * expires, a new one is issued with a renewed individual TTL.
+ */
@JsonProperty("refresh_token_ttl_in_minutes")
private Long refreshTokenTtlInMinutes;
+ public TokenAccessPolicy setAbsoluteSessionLifetimeInMinutes(
+ Long absoluteSessionLifetimeInMinutes) {
+ this.absoluteSessionLifetimeInMinutes = absoluteSessionLifetimeInMinutes;
+ return this;
+ }
+
+ public Long getAbsoluteSessionLifetimeInMinutes() {
+ return absoluteSessionLifetimeInMinutes;
+ }
+
public TokenAccessPolicy setAccessTokenTtlInMinutes(Long accessTokenTtlInMinutes) {
this.accessTokenTtlInMinutes = accessTokenTtlInMinutes;
return this;
@@ -26,6 +60,15 @@ public Long getAccessTokenTtlInMinutes() {
return accessTokenTtlInMinutes;
}
+ public TokenAccessPolicy setEnableSingleUseRefreshTokens(Boolean enableSingleUseRefreshTokens) {
+ this.enableSingleUseRefreshTokens = enableSingleUseRefreshTokens;
+ return this;
+ }
+
+ public Boolean getEnableSingleUseRefreshTokens() {
+ return enableSingleUseRefreshTokens;
+ }
+
public TokenAccessPolicy setRefreshTokenTtlInMinutes(Long refreshTokenTtlInMinutes) {
this.refreshTokenTtlInMinutes = refreshTokenTtlInMinutes;
return this;
@@ -40,19 +83,27 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TokenAccessPolicy that = (TokenAccessPolicy) o;
- return Objects.equals(accessTokenTtlInMinutes, that.accessTokenTtlInMinutes)
+ return Objects.equals(absoluteSessionLifetimeInMinutes, that.absoluteSessionLifetimeInMinutes)
+ && Objects.equals(accessTokenTtlInMinutes, that.accessTokenTtlInMinutes)
+ && Objects.equals(enableSingleUseRefreshTokens, that.enableSingleUseRefreshTokens)
&& Objects.equals(refreshTokenTtlInMinutes, that.refreshTokenTtlInMinutes);
}
@Override
public int hashCode() {
- return Objects.hash(accessTokenTtlInMinutes, refreshTokenTtlInMinutes);
+ return Objects.hash(
+ absoluteSessionLifetimeInMinutes,
+ accessTokenTtlInMinutes,
+ enableSingleUseRefreshTokens,
+ refreshTokenTtlInMinutes);
}
@Override
public String toString() {
return new ToStringer(TokenAccessPolicy.class)
+ .add("absoluteSessionLifetimeInMinutes", absoluteSessionLifetimeInMinutes)
.add("accessTokenTtlInMinutes", accessTokenTtlInMinutes)
+ .add("enableSingleUseRefreshTokens", enableSingleUseRefreshTokens)
.add("refreshTokenTtlInMinutes", refreshTokenTtlInMinutes)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java
index b478d7c6f..4051768bc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/CreateWorkspaceRequest.java
@@ -89,6 +89,14 @@ public class CreateWorkspaceRequest {
@JsonProperty("managed_services_customer_managed_key_id")
private String managedServicesCustomerManagedKeyId;
+ /**
+ * The object ID of network connectivity config. Once assigned, the workspace serverless compute
+ * resources use the same set of stable IP CIDR blocks and optional private link to access your
+ * resources.
+ */
+ @JsonProperty("network_connectivity_config_id")
+ private String networkConnectivityConfigId;
+
/**
* The ID of the workspace's network configuration object. To use AWS PrivateLink, this field is
* required.
@@ -231,6 +239,15 @@ public String getManagedServicesCustomerManagedKeyId() {
return managedServicesCustomerManagedKeyId;
}
+ public CreateWorkspaceRequest setNetworkConnectivityConfigId(String networkConnectivityConfigId) {
+ this.networkConnectivityConfigId = networkConnectivityConfigId;
+ return this;
+ }
+
+ public String getNetworkConnectivityConfigId() {
+ return networkConnectivityConfigId;
+ }
+
public CreateWorkspaceRequest setNetworkId(String networkId) {
this.networkId = networkId;
return this;
@@ -302,6 +319,7 @@ public boolean equals(Object o) {
&& Objects.equals(location, that.location)
&& Objects.equals(
managedServicesCustomerManagedKeyId, that.managedServicesCustomerManagedKeyId)
+ && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId)
&& Objects.equals(networkId, that.networkId)
&& Objects.equals(pricingTier, that.pricingTier)
&& Objects.equals(privateAccessSettingsId, that.privateAccessSettingsId)
@@ -324,6 +342,7 @@ public int hashCode() {
gkeConfig,
location,
managedServicesCustomerManagedKeyId,
+ networkConnectivityConfigId,
networkId,
pricingTier,
privateAccessSettingsId,
@@ -346,6 +365,7 @@ public String toString() {
.add("gkeConfig", gkeConfig)
.add("location", location)
.add("managedServicesCustomerManagedKeyId", managedServicesCustomerManagedKeyId)
+ .add("networkConnectivityConfigId", networkConnectivityConfigId)
.add("networkId", networkId)
.add("pricingTier", pricingTier)
.add("privateAccessSettingsId", privateAccessSettingsId)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java
index 886c4c627..b924a754a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Workspace.java
@@ -22,7 +22,7 @@ public class Workspace {
@JsonProperty("azure_workspace_info")
private AzureWorkspaceInfo azureWorkspaceInfo;
- /** The cloud name. This field always has the value `gcp`. */
+ /** The cloud name. This field can have values like `azure`, `gcp`. */
@JsonProperty("cloud")
private String cloud;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java
index 0b0bcdebd..d2c3d1407 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRulePrivateLinkConnectionState.java
@@ -6,6 +6,8 @@
@Generated
public enum NccPrivateEndpointRulePrivateLinkConnectionState {
+ CREATE_FAILED,
+ CREATING,
DISCONNECTED,
ESTABLISHED,
EXPIRED,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesAPI.java
index d74b57652..9f93d1a2d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesAPI.java
@@ -120,14 +120,6 @@ public Iterable list(ListFederationPoliciesRequest request) {
});
}
- /**
- * Updates an existing federation policy for an OIDC_RECIPIENT. The caller must be the owner of
- * the recipient.
- */
- public FederationPolicy update(UpdateFederationPolicyRequest request) {
- return impl.update(request);
- }
-
public RecipientFederationPoliciesService impl() {
return impl;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesImpl.java
index f7854d4c2..32860b32e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesImpl.java
@@ -78,21 +78,4 @@ public ListFederationPoliciesResponse list(ListFederationPoliciesRequest request
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
-
- @Override
- public FederationPolicy update(UpdateFederationPolicyRequest request) {
- String path =
- String.format(
- "/api/2.0/data-sharing/recipients/%s/federation-policies/%s",
- request.getRecipientName(), request.getName());
- try {
- Request req = new Request("PATCH", path, apiClient.serialize(request.getPolicy()));
- ApiClient.setQuery(req, request);
- req.withHeader("Accept", "application/json");
- req.withHeader("Content-Type", "application/json");
- return apiClient.execute(req, FederationPolicy.class);
- } catch (IOException e) {
- throw new DatabricksException("IO error: " + e.getMessage(), e);
- }
- }
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesService.java
index f0301935b..a39852ed6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientFederationPoliciesService.java
@@ -74,10 +74,4 @@ public interface RecipientFederationPoliciesService {
* non-Databricks recipients. The caller must have read access to the recipient.
*/
ListFederationPoliciesResponse list(ListFederationPoliciesRequest listFederationPoliciesRequest);
-
- /**
- * Updates an existing federation policy for an OIDC_RECIPIENT. The caller must be the owner of
- * the recipient.
- */
- FederationPolicy update(UpdateFederationPolicyRequest updateFederationPolicyRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateFederationPolicyRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateFederationPolicyRequest.java
deleted file mode 100755
index 75971d93e..000000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/UpdateFederationPolicyRequest.java
+++ /dev/null
@@ -1,98 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.sharing;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.QueryParam;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Objects;
-
-@Generated
-public class UpdateFederationPolicyRequest {
- /** Name of the policy. This is the name of the current name of the policy. */
- @JsonIgnore private String name;
-
- /** */
- @JsonProperty("policy")
- private FederationPolicy policy;
-
- /**
- * Name of the recipient. This is the name of the recipient for which the policy is being updated.
- */
- @JsonIgnore private String recipientName;
-
- /**
- * The field mask specifies which fields of the policy to update. To specify multiple fields in
- * the field mask, use comma as the separator (no space). The special value '*' indicates that all
- * fields should be updated (full replacement). If unspecified, all fields that are set in the
- * policy provided in the update request will overwrite the corresponding fields in the existing
- * policy. Example value: 'comment,oidc_policy.audiences'.
- */
- @JsonIgnore
- @QueryParam("update_mask")
- private String updateMask;
-
- public UpdateFederationPolicyRequest setName(String name) {
- this.name = name;
- return this;
- }
-
- public String getName() {
- return name;
- }
-
- public UpdateFederationPolicyRequest setPolicy(FederationPolicy policy) {
- this.policy = policy;
- return this;
- }
-
- public FederationPolicy getPolicy() {
- return policy;
- }
-
- public UpdateFederationPolicyRequest setRecipientName(String recipientName) {
- this.recipientName = recipientName;
- return this;
- }
-
- public String getRecipientName() {
- return recipientName;
- }
-
- public UpdateFederationPolicyRequest setUpdateMask(String updateMask) {
- this.updateMask = updateMask;
- return this;
- }
-
- public String getUpdateMask() {
- return updateMask;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- UpdateFederationPolicyRequest that = (UpdateFederationPolicyRequest) o;
- return Objects.equals(name, that.name)
- && Objects.equals(policy, that.policy)
- && Objects.equals(recipientName, that.recipientName)
- && Objects.equals(updateMask, that.updateMask);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(name, policy, recipientName, updateMask);
- }
-
- @Override
- public String toString() {
- return new ToStringer(UpdateFederationPolicyRequest.class)
- .add("name", name)
- .add("policy", policy)
- .add("recipientName", recipientName)
- .add("updateMask", updateMask)
- .toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java
index 8bfe1b758..cadee21b5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/TerminationReasonCode.java
@@ -132,11 +132,17 @@ public enum TerminationReasonCode {
NEPHOS_RESOURCE_MANAGEMENT,
NETVISOR_SETUP_TIMEOUT,
NETWORK_CHECK_CONTROL_PLANE_FAILURE,
+ NETWORK_CHECK_CONTROL_PLANE_FAILURE_DUE_TO_MISCONFIG,
NETWORK_CHECK_DNS_SERVER_FAILURE,
+ NETWORK_CHECK_DNS_SERVER_FAILURE_DUE_TO_MISCONFIG,
NETWORK_CHECK_METADATA_ENDPOINT_FAILURE,
+ NETWORK_CHECK_METADATA_ENDPOINT_FAILURE_DUE_TO_MISCONFIG,
NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE,
+ NETWORK_CHECK_MULTIPLE_COMPONENTS_FAILURE_DUE_TO_MISCONFIG,
NETWORK_CHECK_NIC_FAILURE,
+ NETWORK_CHECK_NIC_FAILURE_DUE_TO_MISCONFIG,
NETWORK_CHECK_STORAGE_FAILURE,
+ NETWORK_CHECK_STORAGE_FAILURE_DUE_TO_MISCONFIG,
NETWORK_CONFIGURATION_FAILURE,
NFS_MOUNT_FAILURE,
NO_ACTIVATED_K8S,
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/LroTestingAPITest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/LroTestingAPITest.java
new file mode 100755
index 000000000..5ba5e2732
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/LroTestingAPITest.java
@@ -0,0 +1,491 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.gentesting.unittests;
+
+import static org.junit.jupiter.api.Assertions.*;
+import static org.mockito.ArgumentMatchers.*;
+import static org.mockito.Mockito.*;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.service.common.lro.LroOptions;
+import com.databricks.sdk.service.lrotesting.*;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import java.time.Duration;
+import java.util.*;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+@ExtendWith(MockitoExtension.class)
+public class LroTestingAPITest {
+ @Mock private ApiClient mockApiClient;
+ private ObjectMapper objectMapper;
+
+ static class HTTPFixture {
+ String method;
+ String resource;
+ Operation response;
+
+ HTTPFixture(String method, String resource, Operation response) {
+ this.method = method;
+ this.resource = resource;
+ this.response = response;
+ }
+ }
+
+ static class WaitTestCase {
+ String name;
+ List fixtures;
+ TestResource wantResult;
+ boolean wantErr;
+
+ WaitTestCase(
+ String name, List fixtures, TestResource wantResult, boolean wantErr) {
+ this.name = name;
+ this.fixtures = fixtures;
+ this.wantResult = wantResult;
+ this.wantErr = wantErr;
+ }
+ }
+
+ static class CancelTestCase {
+ String name;
+ List fixtures;
+ boolean wantErr;
+
+ CancelTestCase(String name, List fixtures, boolean wantErr) {
+ this.name = name;
+ this.fixtures = fixtures;
+ this.wantErr = wantErr;
+ }
+ }
+
+ static class NameTestCase {
+ String name;
+ List fixtures;
+ String wantName;
+
+ NameTestCase(String name, List fixtures, String wantName) {
+ this.name = name;
+ this.fixtures = fixtures;
+ this.wantName = wantName;
+ }
+ }
+
+ static class MetadataTestCase {
+ String name;
+ List fixtures;
+ TestResourceOperationMetadata wantMetadata;
+ boolean wantErr;
+
+ MetadataTestCase(
+ String name,
+ List fixtures,
+ TestResourceOperationMetadata wantMetadata,
+ boolean wantErr) {
+ this.name = name;
+ this.fixtures = fixtures;
+ this.wantMetadata = wantMetadata;
+ this.wantErr = wantErr;
+ }
+ }
+
+ static class DoneTestCase {
+ String name;
+ List fixtures;
+ boolean wantDone;
+ boolean wantErr;
+
+ DoneTestCase(String name, List fixtures, boolean wantDone, boolean wantErr) {
+ this.name = name;
+ this.fixtures = fixtures;
+ this.wantDone = wantDone;
+ this.wantErr = wantErr;
+ }
+ }
+
+ @BeforeEach
+ void setUp() {
+ objectMapper = new ObjectMapper();
+ }
+
+ private void applyFixtures(List fixtures) throws Exception {
+ // Create a custom Answer that validates requests and returns responses in sequence.
+ final int[] callCount = {0};
+ when(mockApiClient.execute(any(Request.class), eq(Operation.class)))
+ .thenAnswer(
+ invocation -> {
+ Request request = invocation.getArgument(0);
+ if (callCount[0] >= fixtures.size()) {
+ throw new RuntimeException("More API calls than expected fixtures");
+ }
+ HTTPFixture expectedFixture = fixtures.get(callCount[0]);
+ if (!expectedFixture.method.equals(request.getMethod())) {
+ throw new AssertionError(
+ String.format(
+ "Call %d: Expected method %s but got %s",
+ callCount[0], expectedFixture.method, request.getMethod()));
+ }
+ String expectedPath = expectedFixture.resource;
+ if (!request.getUrl().equals(expectedPath)) {
+ throw new AssertionError(
+ String.format(
+ "Call %d: Expected exact URL %s but got %s",
+ callCount[0], expectedPath, request.getUrl()));
+ }
+ Operation response = expectedFixture.response;
+ callCount[0]++;
+ return response;
+ });
+ }
+
+ static List waitTestCases() throws JsonProcessingException, JsonMappingException {
+ return Arrays.asList(
+ new WaitTestCase(
+ "Success",
+ Arrays.asList(
+ new HTTPFixture(
+ "POST",
+ "/api/2.0/lro-testing/resources",
+ new Operation()
+ .setDone(false)
+ .setMetadata(
+ new ObjectMapper()
+ .readValue(
+ "{\n"
+ + " \"resource_id\": \"test-resource-123\",\n"
+ + " \"progress_percent\": 5\n"
+ + "}",
+ Object.class))
+ .setName("operations/test-resource-create-12345")),
+ new HTTPFixture(
+ "GET",
+ "/api/2.0/lro-testing/operations/operations/test-resource-create-12345",
+ new Operation()
+ .setDone(false)
+ .setMetadata(
+ new ObjectMapper()
+ .readValue(
+ "{\n"
+ + " \"resource_id\": \"test-resource-123\",\n"
+ + " \"progress_percent\": 75\n"
+ + "}",
+ Object.class))
+ .setName("operations/test-resource-create-12345")),
+ new HTTPFixture(
+ "GET",
+ "/api/2.0/lro-testing/operations/operations/test-resource-create-12345",
+ new Operation()
+ .setDone(true)
+ .setMetadata(
+ new ObjectMapper()
+ .readValue(
+ "{\n"
+ + " \"resource_id\": \"test-resource-123\",\n"
+ + " \"progress_percent\": 100\n"
+ + "}",
+ Object.class))
+ .setName("operations/test-resource-create-12345")
+ .setResponse(
+ new ObjectMapper()
+ .readValue(
+ "{\n"
+ + " \"id\": \"test-resource-123\",\n"
+ + " \"name\": \"test-resource\"\n"
+ + "}",
+ Object.class)))),
+ new TestResource().setId("test-resource-123").setName("test-resource"),
+ false),
+ new WaitTestCase(
+ "Error",
+ Arrays.asList(
+ new HTTPFixture(
+ "POST",
+ "/api/2.0/lro-testing/resources",
+ new Operation()
+ .setDone(false)
+ .setMetadata(
+ new ObjectMapper()
+ .readValue(
+ "{\n"
+ + " \"resource_id\": \"test-resource-123\",\n"
+ + " \"progress_percent\": 5\n"
+ + "}",
+ Object.class))
+ .setName("operations/test-resource-create-12345")),
+ new HTTPFixture(
+ "GET",
+ "/api/2.0/lro-testing/operations/operations/test-resource-create-12345",
+ new Operation()
+ .setDone(true)
+ .setError(
+ new DatabricksServiceExceptionWithDetailsProto()
+ .setErrorCode(ErrorCode.INTERNAL_ERROR)
+ .setMessage("Test error message"))
+ .setName("operations/test-resource-create-12345"))),
+ null,
+ true));
+ }
+
+ @ParameterizedTest(name = "{0}")
+ @MethodSource("waitTestCases")
+ void testLROCreateTestResourceWait(WaitTestCase testCase) throws Exception {
+ // Reset mock and apply fixtures.
+ reset(mockApiClient);
+ applyFixtures(testCase.fixtures);
+ // Create API and proper request.
+ LroTestingAPI api = new LroTestingAPI(mockApiClient);
+ CreateTestResourceOperation operation =
+ api.createTestResource(new CreateTestResourceRequest().setResource(new TestResource()));
+ if (testCase.wantErr) {
+ assertThrows(
+ Exception.class,
+ () ->
+ operation.waitForCompletion(
+ Optional.of(LroOptions.withTimeout(Duration.ofMinutes(1)))),
+ "Test case: " + testCase.name);
+ } else {
+ TestResource result =
+ operation.waitForCompletion(Optional.of(LroOptions.withTimeout(Duration.ofMinutes(1))));
+ assertEquals(testCase.wantResult, result, "Test case: " + testCase.name);
+ }
+ }
+
+ static List cancelTestCases()
+ throws JsonProcessingException, JsonMappingException {
+ return Arrays.asList(
+ new CancelTestCase(
+ "Success",
+ Arrays.asList(
+ new HTTPFixture(
+ "POST",
+ "/api/2.0/lro-testing/resources",
+ new Operation()
+ .setDone(false)
+ .setMetadata(
+ new ObjectMapper()
+ .readValue(
+ "{\n"
+ + " \"resource_id\": \"test-resource-123\",\n"
+ + " \"progress_percent\": 5\n"
+ + "}",
+ Object.class))
+ .setName("operations/test-resource-create-12345")),
+ new HTTPFixture(
+ "POST",
+ "/api/2.0/lro-testing/operations/operations/test-resource-create-12345/cancel",
+ new Operation()
+ .setDone(true)
+ .setName("operations/test-resource-create-12345"))),
+ false));
+ }
+
+ @ParameterizedTest(name = "{0}")
+ @MethodSource("cancelTestCases")
+ void testLROCancelTestResourceCancel(CancelTestCase testCase) throws Exception {
+ // Reset mock and apply fixtures.
+ reset(mockApiClient);
+ applyFixtures(testCase.fixtures);
+ // Create API and execute test.
+ LroTestingAPI api = new LroTestingAPI(mockApiClient);
+ CreateTestResourceOperation operation =
+ api.createTestResource(new CreateTestResourceRequest().setResource(new TestResource()));
+ if (testCase.wantErr) {
+ assertThrows(
+ Exception.class,
+ () -> operation.cancel(),
+ "Cancel should have failed for test case: " + testCase.name);
+ } else {
+ assertDoesNotThrow(() -> operation.cancel(), "Cancel failed for test case: " + testCase.name);
+ }
+ }
+
+ static List nameTestCases() throws JsonProcessingException, JsonMappingException {
+ return Arrays.asList(
+ new NameTestCase(
+ "Success",
+ Arrays.asList(
+ new HTTPFixture(
+ "POST",
+ "/api/2.0/lro-testing/resources",
+ new Operation()
+ .setDone(false)
+ .setMetadata(
+ new ObjectMapper()
+ .readValue(
+ "{\n"
+ + " \"resource_id\": \"test-resource-123\",\n"
+ + " \"progress_percent\": 5\n"
+ + "}",
+ Object.class))
+ .setName("operations/test-resource-create-12345"))),
+ "operations/test-resource-create-12345"));
+ }
+
+ @ParameterizedTest(name = "{0}")
+ @MethodSource("nameTestCases")
+ void testLROCreateTestResourceName(NameTestCase testCase) throws Exception {
+ // Reset mock and apply fixtures.
+ reset(mockApiClient);
+ applyFixtures(testCase.fixtures);
+ // Create API and execute test.
+ LroTestingAPI api = new LroTestingAPI(mockApiClient);
+ CreateTestResourceOperation operation =
+ api.createTestResource(new CreateTestResourceRequest().setResource(new TestResource()));
+ String name = operation.getName();
+ assertEquals(testCase.wantName, name, "Name mismatch for test case: " + testCase.name);
+ }
+
+ static List metadataTestCases()
+ throws JsonProcessingException, JsonMappingException {
+ return Arrays.asList(
+ new MetadataTestCase(
+ "Success",
+ Arrays.asList(
+ new HTTPFixture(
+ "POST",
+ "/api/2.0/lro-testing/resources",
+ new Operation()
+ .setDone(false)
+ .setMetadata(
+ new ObjectMapper()
+ .readValue(
+ "{\n"
+ + " \"resource_id\": \"test-resource-123\",\n"
+ + " \"progress_percent\": 5\n"
+ + "}",
+ Object.class))
+ .setName("operations/test-resource-create-12345"))),
+ new TestResourceOperationMetadata()
+ .setProgressPercent(5L)
+ .setResourceId("test-resource-123"),
+ false));
+ }
+
+ @ParameterizedTest(name = "{0}")
+ @MethodSource("metadataTestCases")
+ void testLROCreateTestResourceMetadata(MetadataTestCase testCase) throws Exception {
+ // Reset mock and apply fixtures.
+ reset(mockApiClient);
+ applyFixtures(testCase.fixtures);
+ // Create API and execute test.
+ LroTestingAPI api = new LroTestingAPI(mockApiClient);
+ CreateTestResourceOperation operation =
+ api.createTestResource(new CreateTestResourceRequest().setResource(new TestResource()));
+ if (testCase.wantErr) {
+ assertThrows(
+ Exception.class,
+ () -> operation.getMetadata(),
+ "Metadata should have failed for test case: " + testCase.name);
+ } else {
+ TestResourceOperationMetadata metadata = operation.getMetadata();
+ assertNotNull(metadata, "Metadata should not be null for test case: " + testCase.name);
+ assertEquals(
+ testCase.wantMetadata, metadata, "Metadata mismatch for test case: " + testCase.name);
+ }
+ }
+ // Done test cases.
+ static List doneTestCases() throws JsonProcessingException, JsonMappingException {
+ return Arrays.asList(
+ new DoneTestCase(
+ "True",
+ Arrays.asList(
+ new HTTPFixture(
+ "POST",
+ "/api/2.0/lro-testing/resources",
+ new Operation()
+ .setDone(false)
+ .setMetadata(
+ new ObjectMapper()
+ .readValue(
+ "{\n"
+ + " \"resource_id\": \"test-resource-123\",\n"
+ + " \"progress_percent\": 5\n"
+ + "}",
+ Object.class))
+ .setName("operations/test-resource-create-12345")),
+ new HTTPFixture(
+ "GET",
+ "/api/2.0/lro-testing/operations/operations/test-resource-create-12345",
+ new Operation()
+ .setDone(true)
+ .setMetadata(
+ new ObjectMapper()
+ .readValue(
+ "{\n"
+ + " \"resource_id\": \"test-resource-123\",\n"
+ + " \"progress_percent\": 100\n"
+ + "}",
+ Object.class))
+ .setName("operations/test-resource-create-12345")
+ .setResponse(
+ new ObjectMapper()
+ .readValue(
+ "{\n"
+ + " \"id\": \"test-resource-123\",\n"
+ + " \"name\": \"test-resource\"\n"
+ + "}",
+ Object.class)))),
+ true,
+ false),
+ new DoneTestCase(
+ "False",
+ Arrays.asList(
+ new HTTPFixture(
+ "POST",
+ "/api/2.0/lro-testing/resources",
+ new Operation()
+ .setDone(false)
+ .setMetadata(
+ new ObjectMapper()
+ .readValue(
+ "{\n"
+ + " \"resource_id\": \"test-resource-123\",\n"
+ + " \"progress_percent\": 5\n"
+ + "}",
+ Object.class))
+ .setName("operations/test-resource-create-12345")),
+ new HTTPFixture(
+ "GET",
+ "/api/2.0/lro-testing/operations/operations/test-resource-create-12345",
+ new Operation()
+ .setDone(false)
+ .setMetadata(
+ new ObjectMapper()
+ .readValue(
+ "{\n"
+ + " \"resource_id\": \"test-resource-123\",\n"
+ + " \"progress_percent\": 75\n"
+ + "}",
+ Object.class))
+ .setName("operations/test-resource-create-12345"))),
+ false,
+ false));
+ }
+
+ @ParameterizedTest(name = "{0}")
+ @MethodSource("doneTestCases")
+ void testLROCreateTestResourceDone(DoneTestCase testCase) throws Exception {
+ // Reset mock and apply fixtures.
+ reset(mockApiClient);
+ applyFixtures(testCase.fixtures);
+ // Create API and execute test.
+ LroTestingAPI api = new LroTestingAPI(mockApiClient);
+ CreateTestResourceOperation operation =
+ api.createTestResource(new CreateTestResourceRequest().setResource(new TestResource()));
+ if (testCase.wantErr) {
+ assertThrows(
+ Exception.class,
+ () -> operation.isDone(),
+ "Done should have failed for test case: " + testCase.name);
+ } else {
+ boolean done = operation.isDone();
+ assertEquals(testCase.wantDone, done, "Done mismatch for test case: " + testCase.name);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/ComplexQueryParam.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/ComplexQueryParam.java
new file mode 100755
index 000000000..238de5ed6
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/ComplexQueryParam.java
@@ -0,0 +1,64 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.httpcallv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ComplexQueryParam {
+ /** */
+ @JsonProperty("nested_optional_query_param")
+ @QueryParam("nested_optional_query_param")
+ private String nestedOptionalQueryParam;
+
+ /** */
+ @JsonProperty("nested_repeated_query_param")
+ @QueryParam("nested_repeated_query_param")
+ private Collection nestedRepeatedQueryParam;
+
+ public ComplexQueryParam setNestedOptionalQueryParam(String nestedOptionalQueryParam) {
+ this.nestedOptionalQueryParam = nestedOptionalQueryParam;
+ return this;
+ }
+
+ public String getNestedOptionalQueryParam() {
+ return nestedOptionalQueryParam;
+ }
+
+ public ComplexQueryParam setNestedRepeatedQueryParam(
+ Collection nestedRepeatedQueryParam) {
+ this.nestedRepeatedQueryParam = nestedRepeatedQueryParam;
+ return this;
+ }
+
+ public Collection getNestedRepeatedQueryParam() {
+ return nestedRepeatedQueryParam;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ComplexQueryParam that = (ComplexQueryParam) o;
+ return Objects.equals(nestedOptionalQueryParam, that.nestedOptionalQueryParam)
+ && Objects.equals(nestedRepeatedQueryParam, that.nestedRepeatedQueryParam);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(nestedOptionalQueryParam, nestedRepeatedQueryParam);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ComplexQueryParam.class)
+ .add("nestedOptionalQueryParam", nestedOptionalQueryParam)
+ .add("nestedRepeatedQueryParam", nestedRepeatedQueryParam)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/CreateResourceRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/CreateResourceRequest.java
new file mode 100755
index 000000000..336931156
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/CreateResourceRequest.java
@@ -0,0 +1,88 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.httpcallv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** This mimics "old" style post requests which have the resource inlined. */
+@Generated
+public class CreateResourceRequest {
+ /** Body element */
+ @JsonProperty("body_field")
+ private String bodyField;
+
+ /** */
+ @JsonIgnore private Boolean pathParamBool;
+
+ /** */
+ @JsonIgnore private Long pathParamInt;
+
+ /** */
+ @JsonIgnore private String pathParamString;
+
+ public CreateResourceRequest setBodyField(String bodyField) {
+ this.bodyField = bodyField;
+ return this;
+ }
+
+ public String getBodyField() {
+ return bodyField;
+ }
+
+ public CreateResourceRequest setPathParamBool(Boolean pathParamBool) {
+ this.pathParamBool = pathParamBool;
+ return this;
+ }
+
+ public Boolean getPathParamBool() {
+ return pathParamBool;
+ }
+
+ public CreateResourceRequest setPathParamInt(Long pathParamInt) {
+ this.pathParamInt = pathParamInt;
+ return this;
+ }
+
+ public Long getPathParamInt() {
+ return pathParamInt;
+ }
+
+ public CreateResourceRequest setPathParamString(String pathParamString) {
+ this.pathParamString = pathParamString;
+ return this;
+ }
+
+ public String getPathParamString() {
+ return pathParamString;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateResourceRequest that = (CreateResourceRequest) o;
+ return Objects.equals(bodyField, that.bodyField)
+ && Objects.equals(pathParamBool, that.pathParamBool)
+ && Objects.equals(pathParamInt, that.pathParamInt)
+ && Objects.equals(pathParamString, that.pathParamString);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(bodyField, pathParamBool, pathParamInt, pathParamString);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateResourceRequest.class)
+ .add("bodyField", bodyField)
+ .add("pathParamBool", pathParamBool)
+ .add("pathParamInt", pathParamInt)
+ .add("pathParamString", pathParamString)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/GetResourceRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/GetResourceRequest.java
new file mode 100755
index 000000000..ac2e90ec1
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/GetResourceRequest.java
@@ -0,0 +1,203 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.httpcallv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class GetResourceRequest {
+ /**
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ */
+ @JsonIgnore
+ @QueryParam("field_mask")
+ private String fieldMask;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("optional_complex_query_param")
+ private ComplexQueryParam optionalComplexQueryParam;
+
+ /** */
+ @JsonIgnore private Boolean pathParamBool;
+
+ /** */
+ @JsonIgnore private Long pathParamInt;
+
+ /** */
+ @JsonIgnore private String pathParamString;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("query_param_bool")
+ private Boolean queryParamBool;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("query_param_int")
+ private Long queryParamInt;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("query_param_string")
+ private String queryParamString;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("repeated_complex_query_param")
+ private Collection repeatedComplexQueryParam;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("repeated_query_param")
+ private Collection repeatedQueryParam;
+
+ public GetResourceRequest setFieldMask(String fieldMask) {
+ this.fieldMask = fieldMask;
+ return this;
+ }
+
+ public String getFieldMask() {
+ return fieldMask;
+ }
+
+ public GetResourceRequest setOptionalComplexQueryParam(
+ ComplexQueryParam optionalComplexQueryParam) {
+ this.optionalComplexQueryParam = optionalComplexQueryParam;
+ return this;
+ }
+
+ public ComplexQueryParam getOptionalComplexQueryParam() {
+ return optionalComplexQueryParam;
+ }
+
+ public GetResourceRequest setPathParamBool(Boolean pathParamBool) {
+ this.pathParamBool = pathParamBool;
+ return this;
+ }
+
+ public Boolean getPathParamBool() {
+ return pathParamBool;
+ }
+
+ public GetResourceRequest setPathParamInt(Long pathParamInt) {
+ this.pathParamInt = pathParamInt;
+ return this;
+ }
+
+ public Long getPathParamInt() {
+ return pathParamInt;
+ }
+
+ public GetResourceRequest setPathParamString(String pathParamString) {
+ this.pathParamString = pathParamString;
+ return this;
+ }
+
+ public String getPathParamString() {
+ return pathParamString;
+ }
+
+ public GetResourceRequest setQueryParamBool(Boolean queryParamBool) {
+ this.queryParamBool = queryParamBool;
+ return this;
+ }
+
+ public Boolean getQueryParamBool() {
+ return queryParamBool;
+ }
+
+ public GetResourceRequest setQueryParamInt(Long queryParamInt) {
+ this.queryParamInt = queryParamInt;
+ return this;
+ }
+
+ public Long getQueryParamInt() {
+ return queryParamInt;
+ }
+
+ public GetResourceRequest setQueryParamString(String queryParamString) {
+ this.queryParamString = queryParamString;
+ return this;
+ }
+
+ public String getQueryParamString() {
+ return queryParamString;
+ }
+
+ public GetResourceRequest setRepeatedComplexQueryParam(
+ Collection repeatedComplexQueryParam) {
+ this.repeatedComplexQueryParam = repeatedComplexQueryParam;
+ return this;
+ }
+
+ public Collection getRepeatedComplexQueryParam() {
+ return repeatedComplexQueryParam;
+ }
+
+ public GetResourceRequest setRepeatedQueryParam(Collection repeatedQueryParam) {
+ this.repeatedQueryParam = repeatedQueryParam;
+ return this;
+ }
+
+ public Collection getRepeatedQueryParam() {
+ return repeatedQueryParam;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetResourceRequest that = (GetResourceRequest) o;
+ return Objects.equals(fieldMask, that.fieldMask)
+ && Objects.equals(optionalComplexQueryParam, that.optionalComplexQueryParam)
+ && Objects.equals(pathParamBool, that.pathParamBool)
+ && Objects.equals(pathParamInt, that.pathParamInt)
+ && Objects.equals(pathParamString, that.pathParamString)
+ && Objects.equals(queryParamBool, that.queryParamBool)
+ && Objects.equals(queryParamInt, that.queryParamInt)
+ && Objects.equals(queryParamString, that.queryParamString)
+ && Objects.equals(repeatedComplexQueryParam, that.repeatedComplexQueryParam)
+ && Objects.equals(repeatedQueryParam, that.repeatedQueryParam);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ fieldMask,
+ optionalComplexQueryParam,
+ pathParamBool,
+ pathParamInt,
+ pathParamString,
+ queryParamBool,
+ queryParamInt,
+ queryParamString,
+ repeatedComplexQueryParam,
+ repeatedQueryParam);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetResourceRequest.class)
+ .add("fieldMask", fieldMask)
+ .add("optionalComplexQueryParam", optionalComplexQueryParam)
+ .add("pathParamBool", pathParamBool)
+ .add("pathParamInt", pathParamInt)
+ .add("pathParamString", pathParamString)
+ .add("queryParamBool", queryParamBool)
+ .add("queryParamInt", queryParamInt)
+ .add("queryParamString", queryParamString)
+ .add("repeatedComplexQueryParam", repeatedComplexQueryParam)
+ .add("repeatedQueryParam", repeatedQueryParam)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2API.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2API.java
new file mode 100755
index 000000000..8a7345c5d
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2API.java
@@ -0,0 +1,51 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.httpcallv2;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Lorem Ipsum */
+@Generated
+public class HttpCallV2API {
+ private static final Logger LOG = LoggerFactory.getLogger(HttpCallV2API.class);
+
+ private final HttpCallV2Service impl;
+
+ /** Regular-use constructor */
+ public HttpCallV2API(ApiClient apiClient) {
+ impl = new HttpCallV2Impl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public HttpCallV2API(HttpCallV2Service mock) {
+ impl = mock;
+ }
+
+ /** This mimics "old" style post requests which have the resource inlined. */
+ public Resource createResource(CreateResourceRequest request) {
+ return impl.createResource(request);
+ }
+
+ public Resource getResource(String pathParamString, long pathParamInt, boolean pathParamBool) {
+ return getResource(
+ new GetResourceRequest()
+ .setPathParamString(pathParamString)
+ .setPathParamInt(pathParamInt)
+ .setPathParamBool(pathParamBool));
+ }
+
+ public Resource getResource(GetResourceRequest request) {
+ return impl.getResource(request);
+ }
+
+ /** This mimics "new" style post requests which have a body field. */
+ public Resource updateResource(UpdateResourceRequest request) {
+ return impl.updateResource(request);
+ }
+
+ public HttpCallV2Service impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Impl.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Impl.java
new file mode 100755
index 000000000..fd0d11228
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Impl.java
@@ -0,0 +1,70 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.httpcallv2;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of HttpCallV2 */
+@Generated
+class HttpCallV2Impl implements HttpCallV2Service {
+ private final ApiClient apiClient;
+
+ public HttpCallV2Impl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public Resource createResource(CreateResourceRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/http-call/%s/%s/%s",
+ request.getPathParamString(), request.getPathParamInt(), request.getPathParamBool());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, Resource.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public Resource getResource(GetResourceRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/http-call/%s/%s/%s",
+ request.getPathParamString(), request.getPathParamInt(), request.getPathParamBool());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, Resource.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public Resource updateResource(UpdateResourceRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/http-call/%s/%s/%s",
+ request.getNestedPathParamString(),
+ request.getNestedPathParamInt(),
+ request.getNestedPathParamBool());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getResource()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, Resource.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Service.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Service.java
new file mode 100755
index 000000000..9ae378e4b
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/HttpCallV2Service.java
@@ -0,0 +1,22 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.httpcallv2;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Lorem Ipsum
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface HttpCallV2Service {
+ /** This mimics "old" style post requests which have the resource inlined. */
+ Resource createResource(CreateResourceRequest createResourceRequest);
+
+ Resource getResource(GetResourceRequest getResourceRequest);
+
+ /** This mimics "new" style post requests which have a body field. */
+ Resource updateResource(UpdateResourceRequest updateResourceRequest);
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/Resource.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/Resource.java
new file mode 100755
index 000000000..94779c31b
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/Resource.java
@@ -0,0 +1,105 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.httpcallv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class Resource {
+ /** */
+ @JsonProperty("any_field")
+ private Object anyField;
+
+ /** */
+ @JsonProperty("body_field")
+ private String bodyField;
+
+ /** */
+ @JsonProperty("nested_path_param_bool")
+ private Boolean nestedPathParamBool;
+
+ /** */
+ @JsonProperty("nested_path_param_int")
+ private Long nestedPathParamInt;
+
+ /** */
+ @JsonProperty("nested_path_param_string")
+ private String nestedPathParamString;
+
+ public Resource setAnyField(Object anyField) {
+ this.anyField = anyField;
+ return this;
+ }
+
+ public Object getAnyField() {
+ return anyField;
+ }
+
+ public Resource setBodyField(String bodyField) {
+ this.bodyField = bodyField;
+ return this;
+ }
+
+ public String getBodyField() {
+ return bodyField;
+ }
+
+ public Resource setNestedPathParamBool(Boolean nestedPathParamBool) {
+ this.nestedPathParamBool = nestedPathParamBool;
+ return this;
+ }
+
+ public Boolean getNestedPathParamBool() {
+ return nestedPathParamBool;
+ }
+
+ public Resource setNestedPathParamInt(Long nestedPathParamInt) {
+ this.nestedPathParamInt = nestedPathParamInt;
+ return this;
+ }
+
+ public Long getNestedPathParamInt() {
+ return nestedPathParamInt;
+ }
+
+ public Resource setNestedPathParamString(String nestedPathParamString) {
+ this.nestedPathParamString = nestedPathParamString;
+ return this;
+ }
+
+ public String getNestedPathParamString() {
+ return nestedPathParamString;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Resource that = (Resource) o;
+ return Objects.equals(anyField, that.anyField)
+ && Objects.equals(bodyField, that.bodyField)
+ && Objects.equals(nestedPathParamBool, that.nestedPathParamBool)
+ && Objects.equals(nestedPathParamInt, that.nestedPathParamInt)
+ && Objects.equals(nestedPathParamString, that.nestedPathParamString);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ anyField, bodyField, nestedPathParamBool, nestedPathParamInt, nestedPathParamString);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Resource.class)
+ .add("anyField", anyField)
+ .add("bodyField", bodyField)
+ .add("nestedPathParamBool", nestedPathParamBool)
+ .add("nestedPathParamInt", nestedPathParamInt)
+ .add("nestedPathParamString", nestedPathParamString)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/UpdateResourceRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/UpdateResourceRequest.java
new file mode 100755
index 000000000..96ccd80c3
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/httpcallv2/UpdateResourceRequest.java
@@ -0,0 +1,220 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.httpcallv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class UpdateResourceRequest {
+ /**
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ */
+ @JsonIgnore
+ @QueryParam("field_mask")
+ private String fieldMask;
+
+ /** */
+ @JsonIgnore private Boolean nestedPathParamBool;
+
+ /** */
+ @JsonIgnore private Long nestedPathParamInt;
+
+ /** */
+ @JsonIgnore private String nestedPathParamString;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("optional_complex_query_param")
+ private ComplexQueryParam optionalComplexQueryParam;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("query_param_bool")
+ private Boolean queryParamBool;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("query_param_int")
+ private Long queryParamInt;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("query_param_string")
+ private String queryParamString;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("repeated_complex_query_param")
+ private Collection repeatedComplexQueryParam;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("repeated_query_param")
+ private Collection repeatedQueryParam;
+
+ /** Body element */
+ @JsonProperty("resource")
+ private Resource resource;
+
+ public UpdateResourceRequest setFieldMask(String fieldMask) {
+ this.fieldMask = fieldMask;
+ return this;
+ }
+
+ public String getFieldMask() {
+ return fieldMask;
+ }
+
+ public UpdateResourceRequest setNestedPathParamBool(Boolean nestedPathParamBool) {
+ this.nestedPathParamBool = nestedPathParamBool;
+ return this;
+ }
+
+ public Boolean getNestedPathParamBool() {
+ return nestedPathParamBool;
+ }
+
+ public UpdateResourceRequest setNestedPathParamInt(Long nestedPathParamInt) {
+ this.nestedPathParamInt = nestedPathParamInt;
+ return this;
+ }
+
+ public Long getNestedPathParamInt() {
+ return nestedPathParamInt;
+ }
+
+ public UpdateResourceRequest setNestedPathParamString(String nestedPathParamString) {
+ this.nestedPathParamString = nestedPathParamString;
+ return this;
+ }
+
+ public String getNestedPathParamString() {
+ return nestedPathParamString;
+ }
+
+ public UpdateResourceRequest setOptionalComplexQueryParam(
+ ComplexQueryParam optionalComplexQueryParam) {
+ this.optionalComplexQueryParam = optionalComplexQueryParam;
+ return this;
+ }
+
+ public ComplexQueryParam getOptionalComplexQueryParam() {
+ return optionalComplexQueryParam;
+ }
+
+ public UpdateResourceRequest setQueryParamBool(Boolean queryParamBool) {
+ this.queryParamBool = queryParamBool;
+ return this;
+ }
+
+ public Boolean getQueryParamBool() {
+ return queryParamBool;
+ }
+
+ public UpdateResourceRequest setQueryParamInt(Long queryParamInt) {
+ this.queryParamInt = queryParamInt;
+ return this;
+ }
+
+ public Long getQueryParamInt() {
+ return queryParamInt;
+ }
+
+ public UpdateResourceRequest setQueryParamString(String queryParamString) {
+ this.queryParamString = queryParamString;
+ return this;
+ }
+
+ public String getQueryParamString() {
+ return queryParamString;
+ }
+
+ public UpdateResourceRequest setRepeatedComplexQueryParam(
+ Collection repeatedComplexQueryParam) {
+ this.repeatedComplexQueryParam = repeatedComplexQueryParam;
+ return this;
+ }
+
+ public Collection getRepeatedComplexQueryParam() {
+ return repeatedComplexQueryParam;
+ }
+
+ public UpdateResourceRequest setRepeatedQueryParam(Collection repeatedQueryParam) {
+ this.repeatedQueryParam = repeatedQueryParam;
+ return this;
+ }
+
+ public Collection getRepeatedQueryParam() {
+ return repeatedQueryParam;
+ }
+
+ public UpdateResourceRequest setResource(Resource resource) {
+ this.resource = resource;
+ return this;
+ }
+
+ public Resource getResource() {
+ return resource;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateResourceRequest that = (UpdateResourceRequest) o;
+ return Objects.equals(fieldMask, that.fieldMask)
+ && Objects.equals(nestedPathParamBool, that.nestedPathParamBool)
+ && Objects.equals(nestedPathParamInt, that.nestedPathParamInt)
+ && Objects.equals(nestedPathParamString, that.nestedPathParamString)
+ && Objects.equals(optionalComplexQueryParam, that.optionalComplexQueryParam)
+ && Objects.equals(queryParamBool, that.queryParamBool)
+ && Objects.equals(queryParamInt, that.queryParamInt)
+ && Objects.equals(queryParamString, that.queryParamString)
+ && Objects.equals(repeatedComplexQueryParam, that.repeatedComplexQueryParam)
+ && Objects.equals(repeatedQueryParam, that.repeatedQueryParam)
+ && Objects.equals(resource, that.resource);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ fieldMask,
+ nestedPathParamBool,
+ nestedPathParamInt,
+ nestedPathParamString,
+ optionalComplexQueryParam,
+ queryParamBool,
+ queryParamInt,
+ queryParamString,
+ repeatedComplexQueryParam,
+ repeatedQueryParam,
+ resource);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateResourceRequest.class)
+ .add("fieldMask", fieldMask)
+ .add("nestedPathParamBool", nestedPathParamBool)
+ .add("nestedPathParamInt", nestedPathParamInt)
+ .add("nestedPathParamString", nestedPathParamString)
+ .add("optionalComplexQueryParam", optionalComplexQueryParam)
+ .add("queryParamBool", queryParamBool)
+ .add("queryParamInt", queryParamInt)
+ .add("queryParamString", queryParamString)
+ .add("repeatedComplexQueryParam", repeatedComplexQueryParam)
+ .add("repeatedQueryParam", repeatedQueryParam)
+ .add("resource", resource)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/CreateTestResourceRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/CreateTestResourceRequest.java
new file mode 100755
index 000000000..6b72254c8
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/CreateTestResourceRequest.java
@@ -0,0 +1,62 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.idempotencytesting;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateTestResourceRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("request_id")
+ private String requestId;
+
+ /** */
+ @JsonProperty("test_resource")
+ private TestResource testResource;
+
+ public CreateTestResourceRequest setRequestId(String requestId) {
+ this.requestId = requestId;
+ return this;
+ }
+
+ public String getRequestId() {
+ return requestId;
+ }
+
+ public CreateTestResourceRequest setTestResource(TestResource testResource) {
+ this.testResource = testResource;
+ return this;
+ }
+
+ public TestResource getTestResource() {
+ return testResource;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateTestResourceRequest that = (CreateTestResourceRequest) o;
+ return Objects.equals(requestId, that.requestId)
+ && Objects.equals(testResource, that.testResource);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(requestId, testResource);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateTestResourceRequest.class)
+ .add("requestId", requestId)
+ .add("testResource", testResource)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/IdempotencyTestingAPI.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/IdempotencyTestingAPI.java
new file mode 100755
index 000000000..9f4dc8dc8
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/IdempotencyTestingAPI.java
@@ -0,0 +1,33 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.idempotencytesting;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Test service for Idempotency of Operations */
+@Generated
+public class IdempotencyTestingAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(IdempotencyTestingAPI.class);
+
+ private final IdempotencyTestingService impl;
+
+ /** Regular-use constructor */
+ public IdempotencyTestingAPI(ApiClient apiClient) {
+ impl = new IdempotencyTestingImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public IdempotencyTestingAPI(IdempotencyTestingService mock) {
+ impl = mock;
+ }
+
+ public TestResource createTestResource(CreateTestResourceRequest request) {
+ return impl.createTestResource(request);
+ }
+
+ public IdempotencyTestingService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/IdempotencyTestingImpl.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/IdempotencyTestingImpl.java
new file mode 100755
index 000000000..8b685b534
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/IdempotencyTestingImpl.java
@@ -0,0 +1,32 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.idempotencytesting;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of IdempotencyTesting */
+@Generated
+class IdempotencyTestingImpl implements IdempotencyTestingService {
+ private final ApiClient apiClient;
+
+ public IdempotencyTestingImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public TestResource createTestResource(CreateTestResourceRequest request) {
+ String path = "/api/2.0/idempotency-testing/resources";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request.getTestResource()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, TestResource.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/IdempotencyTestingService.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/IdempotencyTestingService.java
new file mode 100755
index 000000000..2dc443915
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/IdempotencyTestingService.java
@@ -0,0 +1,17 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.idempotencytesting;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Test service for Idempotency of Operations
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface IdempotencyTestingService {
+
+ TestResource createTestResource(CreateTestResourceRequest createTestResourceRequest);
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/TestResource.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/TestResource.java
new file mode 100755
index 000000000..3f2052572
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/idempotencytesting/TestResource.java
@@ -0,0 +1,55 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.idempotencytesting;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class TestResource {
+ /** */
+ @JsonProperty("id")
+ private String id;
+
+ /** */
+ @JsonProperty("name")
+ private String name;
+
+ public TestResource setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public TestResource setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ TestResource that = (TestResource) o;
+ return Objects.equals(id, that.id) && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(TestResource.class).add("id", id).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/GetResourceRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/GetResourceRequest.java
new file mode 100755
index 000000000..07c08db6c
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/GetResourceRequest.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jsonmarshallv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetResourceRequest {
+ /** */
+ @JsonIgnore private String name;
+
+ /** Description. */
+ @JsonIgnore
+ @QueryParam("resource")
+ private Resource resource;
+
+ public GetResourceRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public GetResourceRequest setResource(Resource resource) {
+ this.resource = resource;
+ return this;
+ }
+
+ public Resource getResource() {
+ return resource;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetResourceRequest that = (GetResourceRequest) o;
+ return Objects.equals(name, that.name) && Objects.equals(resource, that.resource);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, resource);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetResourceRequest.class)
+ .add("name", name)
+ .add("resource", resource)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2API.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2API.java
new file mode 100755
index 000000000..d84b553cb
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2API.java
@@ -0,0 +1,37 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.jsonmarshallv2;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Lorem Ipsum */
+@Generated
+public class JsonMarshallV2API {
+ private static final Logger LOG = LoggerFactory.getLogger(JsonMarshallV2API.class);
+
+ private final JsonMarshallV2Service impl;
+
+ /** Regular-use constructor */
+ public JsonMarshallV2API(ApiClient apiClient) {
+ impl = new JsonMarshallV2Impl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public JsonMarshallV2API(JsonMarshallV2Service mock) {
+ impl = mock;
+ }
+
+ public Resource getResource(String name, Resource resource) {
+ return getResource(new GetResourceRequest().setName(name).setResource(resource));
+ }
+
+ public Resource getResource(GetResourceRequest request) {
+ return impl.getResource(request);
+ }
+
+ public JsonMarshallV2Service impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Impl.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Impl.java
new file mode 100755
index 000000000..e7ad2b273
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Impl.java
@@ -0,0 +1,31 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.jsonmarshallv2;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of JsonMarshallV2 */
+@Generated
+class JsonMarshallV2Impl implements JsonMarshallV2Service {
+ private final ApiClient apiClient;
+
+ public JsonMarshallV2Impl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public Resource getResource(GetResourceRequest request) {
+ String path = String.format("/api/2.0/json-marshall/%s", request.getName());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, Resource.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Service.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Service.java
new file mode 100755
index 000000000..a88cba0f0
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/JsonMarshallV2Service.java
@@ -0,0 +1,17 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.jsonmarshallv2;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Lorem Ipsum
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface JsonMarshallV2Service {
+
+ Resource getResource(GetResourceRequest getResourceRequest);
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/NestedMessage.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/NestedMessage.java
new file mode 100755
index 000000000..d045ceb25
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/NestedMessage.java
@@ -0,0 +1,78 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jsonmarshallv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class NestedMessage {
+ /** */
+ @JsonProperty("optional_duration")
+ @QueryParam("optional_duration")
+ private String optionalDuration;
+
+ /** */
+ @JsonProperty("optional_string")
+ @QueryParam("optional_string")
+ private String optionalString;
+
+ /** */
+ @JsonProperty("optional_timestamp")
+ @QueryParam("optional_timestamp")
+ private String optionalTimestamp;
+
+ public NestedMessage setOptionalDuration(String optionalDuration) {
+ this.optionalDuration = optionalDuration;
+ return this;
+ }
+
+ public String getOptionalDuration() {
+ return optionalDuration;
+ }
+
+ public NestedMessage setOptionalString(String optionalString) {
+ this.optionalString = optionalString;
+ return this;
+ }
+
+ public String getOptionalString() {
+ return optionalString;
+ }
+
+ public NestedMessage setOptionalTimestamp(String optionalTimestamp) {
+ this.optionalTimestamp = optionalTimestamp;
+ return this;
+ }
+
+ public String getOptionalTimestamp() {
+ return optionalTimestamp;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ NestedMessage that = (NestedMessage) o;
+ return Objects.equals(optionalDuration, that.optionalDuration)
+ && Objects.equals(optionalString, that.optionalString)
+ && Objects.equals(optionalTimestamp, that.optionalTimestamp);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(optionalDuration, optionalString, optionalTimestamp);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(NestedMessage.class)
+ .add("optionalDuration", optionalDuration)
+ .add("optionalString", optionalString)
+ .add("optionalTimestamp", optionalTimestamp)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/OptionalFields.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/OptionalFields.java
new file mode 100755
index 000000000..19caee741
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/OptionalFields.java
@@ -0,0 +1,316 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jsonmarshallv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Map;
+import java.util.Objects;
+
+@Generated
+public class OptionalFields {
+ /** */
+ @JsonProperty("duration")
+ @QueryParam("duration")
+ private String duration;
+
+ /**
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ */
+ @JsonProperty("field_mask")
+ @QueryParam("field_mask")
+ private String fieldMask;
+
+ /** Legacy Well Known types */
+ @JsonProperty("legacy_duration")
+ @QueryParam("legacy_duration")
+ private String legacyDuration;
+
+ /**
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ */
+ @JsonProperty("legacy_field_mask")
+ @QueryParam("legacy_field_mask")
+ private String legacyFieldMask;
+
+ /** */
+ @JsonProperty("legacy_timestamp")
+ @QueryParam("legacy_timestamp")
+ private String legacyTimestamp;
+
+ /** */
+ @JsonProperty("list_value")
+ @QueryParam("list_value")
+ private Collection listValue;
+
+ /** Lint disable reason: This is a dummy field used to test SDK Generation logic. */
+ @JsonProperty("map")
+ @QueryParam("map")
+ private Map mapValue;
+
+ /** */
+ @JsonProperty("optional_bool")
+ @QueryParam("optional_bool")
+ private Boolean optionalBool;
+
+ /** */
+ @JsonProperty("optional_int32")
+ @QueryParam("optional_int32")
+ private Long optionalInt32;
+
+ /** */
+ @JsonProperty("optional_int64")
+ @QueryParam("optional_int64")
+ private Long optionalInt64;
+
+ /** */
+ @JsonProperty("optional_message")
+ @QueryParam("optional_message")
+ private NestedMessage optionalMessage;
+
+ /** */
+ @JsonProperty("optional_string")
+ @QueryParam("optional_string")
+ private String optionalString;
+
+ /** */
+ @JsonProperty("struct")
+ @QueryParam("struct")
+ private Map structValue;
+
+ /** */
+ @JsonProperty("test_enum")
+ @QueryParam("test_enum")
+ private TestEnum testEnum;
+
+ /** */
+ @JsonProperty("timestamp")
+ @QueryParam("timestamp")
+ private String timestamp;
+
+ /** */
+ @JsonProperty("value")
+ @QueryParam("value")
+ private Object /* MISSING TYPE */ value;
+
+ public OptionalFields setDuration(String duration) {
+ this.duration = duration;
+ return this;
+ }
+
+ public String getDuration() {
+ return duration;
+ }
+
+ public OptionalFields setFieldMask(String fieldMask) {
+ this.fieldMask = fieldMask;
+ return this;
+ }
+
+ public String getFieldMask() {
+ return fieldMask;
+ }
+
+ public OptionalFields setLegacyDuration(String legacyDuration) {
+ this.legacyDuration = legacyDuration;
+ return this;
+ }
+
+ public String getLegacyDuration() {
+ return legacyDuration;
+ }
+
+ public OptionalFields setLegacyFieldMask(String legacyFieldMask) {
+ this.legacyFieldMask = legacyFieldMask;
+ return this;
+ }
+
+ public String getLegacyFieldMask() {
+ return legacyFieldMask;
+ }
+
+ public OptionalFields setLegacyTimestamp(String legacyTimestamp) {
+ this.legacyTimestamp = legacyTimestamp;
+ return this;
+ }
+
+ public String getLegacyTimestamp() {
+ return legacyTimestamp;
+ }
+
+ public OptionalFields setListValue(Collection listValue) {
+ this.listValue = listValue;
+ return this;
+ }
+
+ public Collection getListValue() {
+ return listValue;
+ }
+
+ public OptionalFields setMap(Map mapValue) {
+ this.mapValue = mapValue;
+ return this;
+ }
+
+ public Map getMap() {
+ return mapValue;
+ }
+
+ public OptionalFields setOptionalBool(Boolean optionalBool) {
+ this.optionalBool = optionalBool;
+ return this;
+ }
+
+ public Boolean getOptionalBool() {
+ return optionalBool;
+ }
+
+ public OptionalFields setOptionalInt32(Long optionalInt32) {
+ this.optionalInt32 = optionalInt32;
+ return this;
+ }
+
+ public Long getOptionalInt32() {
+ return optionalInt32;
+ }
+
+ public OptionalFields setOptionalInt64(Long optionalInt64) {
+ this.optionalInt64 = optionalInt64;
+ return this;
+ }
+
+ public Long getOptionalInt64() {
+ return optionalInt64;
+ }
+
+ public OptionalFields setOptionalMessage(NestedMessage optionalMessage) {
+ this.optionalMessage = optionalMessage;
+ return this;
+ }
+
+ public NestedMessage getOptionalMessage() {
+ return optionalMessage;
+ }
+
+ public OptionalFields setOptionalString(String optionalString) {
+ this.optionalString = optionalString;
+ return this;
+ }
+
+ public String getOptionalString() {
+ return optionalString;
+ }
+
+ public OptionalFields setStruct(Map structValue) {
+ this.structValue = structValue;
+ return this;
+ }
+
+ public Map getStruct() {
+ return structValue;
+ }
+
+ public OptionalFields setTestEnum(TestEnum testEnum) {
+ this.testEnum = testEnum;
+ return this;
+ }
+
+ public TestEnum getTestEnum() {
+ return testEnum;
+ }
+
+ public OptionalFields setTimestamp(String timestamp) {
+ this.timestamp = timestamp;
+ return this;
+ }
+
+ public String getTimestamp() {
+ return timestamp;
+ }
+
+ public OptionalFields setValue(Object /* MISSING TYPE */ value) {
+ this.value = value;
+ return this;
+ }
+
+ public Object /* MISSING TYPE */ getValue() {
+ return value;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ OptionalFields that = (OptionalFields) o;
+ return Objects.equals(duration, that.duration)
+ && Objects.equals(fieldMask, that.fieldMask)
+ && Objects.equals(legacyDuration, that.legacyDuration)
+ && Objects.equals(legacyFieldMask, that.legacyFieldMask)
+ && Objects.equals(legacyTimestamp, that.legacyTimestamp)
+ && Objects.equals(listValue, that.listValue)
+ && Objects.equals(mapValue, that.mapValue)
+ && Objects.equals(optionalBool, that.optionalBool)
+ && Objects.equals(optionalInt32, that.optionalInt32)
+ && Objects.equals(optionalInt64, that.optionalInt64)
+ && Objects.equals(optionalMessage, that.optionalMessage)
+ && Objects.equals(optionalString, that.optionalString)
+ && Objects.equals(structValue, that.structValue)
+ && Objects.equals(testEnum, that.testEnum)
+ && Objects.equals(timestamp, that.timestamp)
+ && Objects.equals(value, that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ duration,
+ fieldMask,
+ legacyDuration,
+ legacyFieldMask,
+ legacyTimestamp,
+ listValue,
+ mapValue,
+ optionalBool,
+ optionalInt32,
+ optionalInt64,
+ optionalMessage,
+ optionalString,
+ structValue,
+ testEnum,
+ timestamp,
+ value);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(OptionalFields.class)
+ .add("duration", duration)
+ .add("fieldMask", fieldMask)
+ .add("legacyDuration", legacyDuration)
+ .add("legacyFieldMask", legacyFieldMask)
+ .add("legacyTimestamp", legacyTimestamp)
+ .add("listValue", listValue)
+ .add("mapValue", mapValue)
+ .add("optionalBool", optionalBool)
+ .add("optionalInt32", optionalInt32)
+ .add("optionalInt64", optionalInt64)
+ .add("optionalMessage", optionalMessage)
+ .add("optionalString", optionalString)
+ .add("structValue", structValue)
+ .add("testEnum", testEnum)
+ .add("timestamp", timestamp)
+ .add("value", value)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RepeatedFields.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RepeatedFields.java
new file mode 100755
index 000000000..4595b4a73
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RepeatedFields.java
@@ -0,0 +1,238 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jsonmarshallv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Map;
+import java.util.Objects;
+
+@Generated
+public class RepeatedFields {
+ /** */
+ @JsonProperty("repeated_bool")
+ @QueryParam("repeated_bool")
+ private Collection repeatedBool;
+
+ /** */
+ @JsonProperty("repeated_duration")
+ @QueryParam("repeated_duration")
+ private Collection repeatedDuration;
+
+ /** */
+ @JsonProperty("repeated_field_mask")
+ @QueryParam("repeated_field_mask")
+ private Collection repeatedFieldMask;
+
+ /** */
+ @JsonProperty("repeated_int32")
+ @QueryParam("repeated_int32")
+ private Collection repeatedInt32;
+
+ /** */
+ @JsonProperty("repeated_int64")
+ @QueryParam("repeated_int64")
+ private Collection repeatedInt64;
+
+ /** */
+ @JsonProperty("repeated_list_value")
+ @QueryParam("repeated_list_value")
+ private Collection> repeatedListValue;
+
+ /** */
+ @JsonProperty("repeated_message")
+ @QueryParam("repeated_message")
+ private Collection repeatedMessage;
+
+ /** */
+ @JsonProperty("repeated_string")
+ @QueryParam("repeated_string")
+ private Collection repeatedString;
+
+ /** */
+ @JsonProperty("repeated_struct")
+ @QueryParam("repeated_struct")
+ private Collection> repeatedStruct;
+
+ /** */
+ @JsonProperty("repeated_timestamp")
+ @QueryParam("repeated_timestamp")
+ private Collection repeatedTimestamp;
+
+ /** */
+ @JsonProperty("repeated_value")
+ @QueryParam("repeated_value")
+ private Collection repeatedValue;
+
+ /** */
+ @JsonProperty("test_repeated_enum")
+ @QueryParam("test_repeated_enum")
+ private Collection testRepeatedEnum;
+
+ public RepeatedFields setRepeatedBool(Collection repeatedBool) {
+ this.repeatedBool = repeatedBool;
+ return this;
+ }
+
+ public Collection getRepeatedBool() {
+ return repeatedBool;
+ }
+
+ public RepeatedFields setRepeatedDuration(Collection repeatedDuration) {
+ this.repeatedDuration = repeatedDuration;
+ return this;
+ }
+
+ public Collection getRepeatedDuration() {
+ return repeatedDuration;
+ }
+
+ public RepeatedFields setRepeatedFieldMask(Collection repeatedFieldMask) {
+ this.repeatedFieldMask = repeatedFieldMask;
+ return this;
+ }
+
+ public Collection getRepeatedFieldMask() {
+ return repeatedFieldMask;
+ }
+
+ public RepeatedFields setRepeatedInt32(Collection repeatedInt32) {
+ this.repeatedInt32 = repeatedInt32;
+ return this;
+ }
+
+ public Collection getRepeatedInt32() {
+ return repeatedInt32;
+ }
+
+ public RepeatedFields setRepeatedInt64(Collection repeatedInt64) {
+ this.repeatedInt64 = repeatedInt64;
+ return this;
+ }
+
+ public Collection getRepeatedInt64() {
+ return repeatedInt64;
+ }
+
+ public RepeatedFields setRepeatedListValue(
+ Collection> repeatedListValue) {
+ this.repeatedListValue = repeatedListValue;
+ return this;
+ }
+
+ public Collection> getRepeatedListValue() {
+ return repeatedListValue;
+ }
+
+ public RepeatedFields setRepeatedMessage(Collection repeatedMessage) {
+ this.repeatedMessage = repeatedMessage;
+ return this;
+ }
+
+ public Collection getRepeatedMessage() {
+ return repeatedMessage;
+ }
+
+ public RepeatedFields setRepeatedString(Collection repeatedString) {
+ this.repeatedString = repeatedString;
+ return this;
+ }
+
+ public Collection getRepeatedString() {
+ return repeatedString;
+ }
+
+ public RepeatedFields setRepeatedStruct(
+ Collection> repeatedStruct) {
+ this.repeatedStruct = repeatedStruct;
+ return this;
+ }
+
+ public Collection> getRepeatedStruct() {
+ return repeatedStruct;
+ }
+
+ public RepeatedFields setRepeatedTimestamp(Collection repeatedTimestamp) {
+ this.repeatedTimestamp = repeatedTimestamp;
+ return this;
+ }
+
+ public Collection getRepeatedTimestamp() {
+ return repeatedTimestamp;
+ }
+
+ public RepeatedFields setRepeatedValue(Collection repeatedValue) {
+ this.repeatedValue = repeatedValue;
+ return this;
+ }
+
+ public Collection getRepeatedValue() {
+ return repeatedValue;
+ }
+
+ public RepeatedFields setTestRepeatedEnum(Collection testRepeatedEnum) {
+ this.testRepeatedEnum = testRepeatedEnum;
+ return this;
+ }
+
+ public Collection getTestRepeatedEnum() {
+ return testRepeatedEnum;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ RepeatedFields that = (RepeatedFields) o;
+ return Objects.equals(repeatedBool, that.repeatedBool)
+ && Objects.equals(repeatedDuration, that.repeatedDuration)
+ && Objects.equals(repeatedFieldMask, that.repeatedFieldMask)
+ && Objects.equals(repeatedInt32, that.repeatedInt32)
+ && Objects.equals(repeatedInt64, that.repeatedInt64)
+ && Objects.equals(repeatedListValue, that.repeatedListValue)
+ && Objects.equals(repeatedMessage, that.repeatedMessage)
+ && Objects.equals(repeatedString, that.repeatedString)
+ && Objects.equals(repeatedStruct, that.repeatedStruct)
+ && Objects.equals(repeatedTimestamp, that.repeatedTimestamp)
+ && Objects.equals(repeatedValue, that.repeatedValue)
+ && Objects.equals(testRepeatedEnum, that.testRepeatedEnum);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ repeatedBool,
+ repeatedDuration,
+ repeatedFieldMask,
+ repeatedInt32,
+ repeatedInt64,
+ repeatedListValue,
+ repeatedMessage,
+ repeatedString,
+ repeatedStruct,
+ repeatedTimestamp,
+ repeatedValue,
+ testRepeatedEnum);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RepeatedFields.class)
+ .add("repeatedBool", repeatedBool)
+ .add("repeatedDuration", repeatedDuration)
+ .add("repeatedFieldMask", repeatedFieldMask)
+ .add("repeatedInt32", repeatedInt32)
+ .add("repeatedInt64", repeatedInt64)
+ .add("repeatedListValue", repeatedListValue)
+ .add("repeatedMessage", repeatedMessage)
+ .add("repeatedString", repeatedString)
+ .add("repeatedStruct", repeatedStruct)
+ .add("repeatedTimestamp", repeatedTimestamp)
+ .add("repeatedValue", repeatedValue)
+ .add("testRepeatedEnum", testRepeatedEnum)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RequiredFields.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RequiredFields.java
new file mode 100755
index 000000000..1caa4fa2b
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/RequiredFields.java
@@ -0,0 +1,243 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jsonmarshallv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Map;
+import java.util.Objects;
+
+@Generated
+public class RequiredFields {
+ /** */
+ @JsonProperty("required_bool")
+ @QueryParam("required_bool")
+ private Boolean requiredBool;
+
+ /** */
+ @JsonProperty("required_duration")
+ @QueryParam("required_duration")
+ private String requiredDuration;
+
+ /**
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ */
+ @JsonProperty("required_field_mask")
+ @QueryParam("required_field_mask")
+ private String requiredFieldMask;
+
+ /** */
+ @JsonProperty("required_int32")
+ @QueryParam("required_int32")
+ private Long requiredInt32;
+
+ /** */
+ @JsonProperty("required_int64")
+ @QueryParam("required_int64")
+ private Long requiredInt64;
+
+ /** */
+ @JsonProperty("required_list_value")
+ @QueryParam("required_list_value")
+ private Collection requiredListValue;
+
+ /** */
+ @JsonProperty("required_message")
+ @QueryParam("required_message")
+ private NestedMessage requiredMessage;
+
+ /** */
+ @JsonProperty("required_string")
+ @QueryParam("required_string")
+ private String requiredString;
+
+ /** */
+ @JsonProperty("required_struct")
+ @QueryParam("required_struct")
+ private Map requiredStruct;
+
+ /** */
+ @JsonProperty("required_timestamp")
+ @QueryParam("required_timestamp")
+ private String requiredTimestamp;
+
+ /** */
+ @JsonProperty("required_value")
+ @QueryParam("required_value")
+ private Object /* MISSING TYPE */ requiredValue;
+
+ /** */
+ @JsonProperty("test_required_enum")
+ @QueryParam("test_required_enum")
+ private TestEnum testRequiredEnum;
+
+ public RequiredFields setRequiredBool(Boolean requiredBool) {
+ this.requiredBool = requiredBool;
+ return this;
+ }
+
+ public Boolean getRequiredBool() {
+ return requiredBool;
+ }
+
+ public RequiredFields setRequiredDuration(String requiredDuration) {
+ this.requiredDuration = requiredDuration;
+ return this;
+ }
+
+ public String getRequiredDuration() {
+ return requiredDuration;
+ }
+
+ public RequiredFields setRequiredFieldMask(String requiredFieldMask) {
+ this.requiredFieldMask = requiredFieldMask;
+ return this;
+ }
+
+ public String getRequiredFieldMask() {
+ return requiredFieldMask;
+ }
+
+ public RequiredFields setRequiredInt32(Long requiredInt32) {
+ this.requiredInt32 = requiredInt32;
+ return this;
+ }
+
+ public Long getRequiredInt32() {
+ return requiredInt32;
+ }
+
+ public RequiredFields setRequiredInt64(Long requiredInt64) {
+ this.requiredInt64 = requiredInt64;
+ return this;
+ }
+
+ public Long getRequiredInt64() {
+ return requiredInt64;
+ }
+
+ public RequiredFields setRequiredListValue(
+ Collection requiredListValue) {
+ this.requiredListValue = requiredListValue;
+ return this;
+ }
+
+ public Collection getRequiredListValue() {
+ return requiredListValue;
+ }
+
+ public RequiredFields setRequiredMessage(NestedMessage requiredMessage) {
+ this.requiredMessage = requiredMessage;
+ return this;
+ }
+
+ public NestedMessage getRequiredMessage() {
+ return requiredMessage;
+ }
+
+ public RequiredFields setRequiredString(String requiredString) {
+ this.requiredString = requiredString;
+ return this;
+ }
+
+ public String getRequiredString() {
+ return requiredString;
+ }
+
+ public RequiredFields setRequiredStruct(Map requiredStruct) {
+ this.requiredStruct = requiredStruct;
+ return this;
+ }
+
+ public Map getRequiredStruct() {
+ return requiredStruct;
+ }
+
+ public RequiredFields setRequiredTimestamp(String requiredTimestamp) {
+ this.requiredTimestamp = requiredTimestamp;
+ return this;
+ }
+
+ public String getRequiredTimestamp() {
+ return requiredTimestamp;
+ }
+
+ public RequiredFields setRequiredValue(Object /* MISSING TYPE */ requiredValue) {
+ this.requiredValue = requiredValue;
+ return this;
+ }
+
+ public Object /* MISSING TYPE */ getRequiredValue() {
+ return requiredValue;
+ }
+
+ public RequiredFields setTestRequiredEnum(TestEnum testRequiredEnum) {
+ this.testRequiredEnum = testRequiredEnum;
+ return this;
+ }
+
+ public TestEnum getTestRequiredEnum() {
+ return testRequiredEnum;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ RequiredFields that = (RequiredFields) o;
+ return Objects.equals(requiredBool, that.requiredBool)
+ && Objects.equals(requiredDuration, that.requiredDuration)
+ && Objects.equals(requiredFieldMask, that.requiredFieldMask)
+ && Objects.equals(requiredInt32, that.requiredInt32)
+ && Objects.equals(requiredInt64, that.requiredInt64)
+ && Objects.equals(requiredListValue, that.requiredListValue)
+ && Objects.equals(requiredMessage, that.requiredMessage)
+ && Objects.equals(requiredString, that.requiredString)
+ && Objects.equals(requiredStruct, that.requiredStruct)
+ && Objects.equals(requiredTimestamp, that.requiredTimestamp)
+ && Objects.equals(requiredValue, that.requiredValue)
+ && Objects.equals(testRequiredEnum, that.testRequiredEnum);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ requiredBool,
+ requiredDuration,
+ requiredFieldMask,
+ requiredInt32,
+ requiredInt64,
+ requiredListValue,
+ requiredMessage,
+ requiredString,
+ requiredStruct,
+ requiredTimestamp,
+ requiredValue,
+ testRequiredEnum);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RequiredFields.class)
+ .add("requiredBool", requiredBool)
+ .add("requiredDuration", requiredDuration)
+ .add("requiredFieldMask", requiredFieldMask)
+ .add("requiredInt32", requiredInt32)
+ .add("requiredInt64", requiredInt64)
+ .add("requiredListValue", requiredListValue)
+ .add("requiredMessage", requiredMessage)
+ .add("requiredString", requiredString)
+ .add("requiredStruct", requiredStruct)
+ .add("requiredTimestamp", requiredTimestamp)
+ .add("requiredValue", requiredValue)
+ .add("testRequiredEnum", testRequiredEnum)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/Resource.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/Resource.java
new file mode 100755
index 000000000..09141d80a
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/Resource.java
@@ -0,0 +1,82 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jsonmarshallv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * We separate this into 3 submessages to simplify test cases. E.g., any required top level field
+ * needs to be included in the expected json for each test case.
+ */
+@Generated
+public class Resource {
+ /** */
+ @JsonProperty("optional_fields")
+ @QueryParam("optional_fields")
+ private OptionalFields optionalFields;
+
+ /** */
+ @JsonProperty("repeated_fields")
+ @QueryParam("repeated_fields")
+ private RepeatedFields repeatedFields;
+
+ /** */
+ @JsonProperty("required_fields")
+ @QueryParam("required_fields")
+ private RequiredFields requiredFields;
+
+ public Resource setOptionalFields(OptionalFields optionalFields) {
+ this.optionalFields = optionalFields;
+ return this;
+ }
+
+ public OptionalFields getOptionalFields() {
+ return optionalFields;
+ }
+
+ public Resource setRepeatedFields(RepeatedFields repeatedFields) {
+ this.repeatedFields = repeatedFields;
+ return this;
+ }
+
+ public RepeatedFields getRepeatedFields() {
+ return repeatedFields;
+ }
+
+ public Resource setRequiredFields(RequiredFields requiredFields) {
+ this.requiredFields = requiredFields;
+ return this;
+ }
+
+ public RequiredFields getRequiredFields() {
+ return requiredFields;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Resource that = (Resource) o;
+ return Objects.equals(optionalFields, that.optionalFields)
+ && Objects.equals(repeatedFields, that.repeatedFields)
+ && Objects.equals(requiredFields, that.requiredFields);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(optionalFields, repeatedFields, requiredFields);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Resource.class)
+ .add("optionalFields", optionalFields)
+ .add("repeatedFields", repeatedFields)
+ .add("requiredFields", requiredFields)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/TestEnum.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/TestEnum.java
new file mode 100755
index 000000000..48684e38a
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/jsonmarshallv2/TestEnum.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jsonmarshallv2;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum TestEnum {
+ TEST_ENUM_ONE,
+ TEST_ENUM_TWO,
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CancelOperationRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CancelOperationRequest.java
new file mode 100755
index 000000000..41ead0280
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CancelOperationRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.lrotesting;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class CancelOperationRequest {
+ /** The name of the operation resource to be cancelled. */
+ @JsonIgnore private String name;
+
+ public CancelOperationRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CancelOperationRequest that = (CancelOperationRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CancelOperationRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceOperation.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceOperation.java
new file mode 100755
index 000000000..b3e1f28ed
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceOperation.java
@@ -0,0 +1,166 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.lrotesting;
+
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.utils.SerDeUtils;
+import com.databricks.sdk.service.common.lro.LroOptions;
+import com.databricks.sdk.support.Generated;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import java.time.Duration;
+import java.util.Optional;
+import java.util.concurrent.TimeoutException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Wrapper for interacting with a long-running createTestResource operation. Provides methods to
+ * wait for completion, check status, cancel, and access metadata.
+ */
+@Generated
+public class CreateTestResourceOperation {
+ private static final Logger LOG = LoggerFactory.getLogger(CreateTestResourceOperation.class);
+ private static final Duration DEFAULT_TIMEOUT = Duration.ofMinutes(20);
+
+ private final LroTestingService impl;
+ private Operation operation;
+ private final ObjectMapper objectMapper;
+
+ public CreateTestResourceOperation(LroTestingService impl, Operation operation) {
+ this.impl = impl;
+ this.operation = operation;
+ this.objectMapper = SerDeUtils.createMapper();
+ }
+
+ /**
+ * Wait for the operation to complete and return the resulting TestResource. Uses the default
+ * timeout of 20 minutes.
+ *
+ * @return the created TestResource
+ * @throws TimeoutException if the operation doesn't complete within the timeout
+ * @throws DatabricksException if the operation fails
+ */
+ public TestResource waitForCompletion() throws TimeoutException {
+ return waitForCompletion(Optional.empty());
+ }
+
+ /**
+ * Wait for the operation to complete and return the resulting TestResource.
+ *
+ * @param options the options for configuring the wait behavior, can be empty for defaults
+ * @return the created TestResource
+ * @throws TimeoutException if the operation doesn't complete within the timeout
+ * @throws DatabricksException if the operation fails
+ */
+ public TestResource waitForCompletion(Optional options) throws TimeoutException {
+ Duration timeout = options.flatMap(LroOptions::getTimeout).orElse(DEFAULT_TIMEOUT);
+ long deadline = System.currentTimeMillis() + timeout.toMillis();
+ String statusMessage = "polling operation...";
+ int attempt = 1;
+
+ while (System.currentTimeMillis() < deadline) {
+ // Refresh the operation state
+ refreshOperation();
+
+ if (operation.getDone() != null && operation.getDone()) {
+ // Operation completed, check for success or failure
+ if (operation.getError() != null) {
+ String errorMsg = "unknown error";
+ if (operation.getError().getMessage() != null
+ && !operation.getError().getMessage().isEmpty()) {
+ errorMsg = operation.getError().getMessage();
+ }
+
+ if (operation.getError().getErrorCode() != null) {
+ errorMsg = String.format("[%s] %s", operation.getError().getErrorCode(), errorMsg);
+ }
+
+ throw new DatabricksException("Operation failed: " + errorMsg);
+ }
+
+ // Operation completed successfully, unmarshal response
+ if (operation.getResponse() == null) {
+ throw new DatabricksException("Operation completed but no response available");
+ }
+
+ try {
+ JsonNode responseJson = objectMapper.valueToTree(operation.getResponse());
+ return objectMapper.treeToValue(responseJson, TestResource.class);
+ } catch (JsonProcessingException e) {
+ throw new DatabricksException(
+ "Failed to unmarshal testResource response: " + e.getMessage(), e);
+ }
+ }
+
+ // Operation still in progress, wait before polling again
+ String prefix = String.format("operation=%s", operation.getName());
+ int sleep = Math.min(attempt, 10); // sleep 10s max per attempt
+ LOG.info("{}: operation in progress (sleeping ~{}s)", prefix, sleep);
+
+ try {
+ Thread.sleep((long) (sleep * 1000L + Math.random() * 1000));
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new DatabricksException("Current thread was interrupted", e);
+ }
+ attempt++;
+ }
+
+ throw new TimeoutException(
+ String.format("Operation timed out after %s: %s", timeout, statusMessage));
+ }
+
+ /**
+ * Cancel the operation.
+ *
+ * @throws DatabricksException if the cancellation fails
+ */
+ public void cancel() {
+ impl.cancelOperation(new CancelOperationRequest().setName(operation.getName()));
+ }
+
+ /**
+ * Get the operation name.
+ *
+ * @return the operation name
+ */
+ public String getName() {
+ return operation.getName();
+ }
+
+ /**
+ * Get the operation metadata.
+ *
+ * @return the operation metadata, or null if not available
+ * @throws DatabricksException if the metadata cannot be deserialized
+ */
+ public TestResourceOperationMetadata getMetadata() {
+ if (operation.getMetadata() == null) {
+ return null;
+ }
+
+ try {
+ JsonNode metadataJson = objectMapper.valueToTree(operation.getMetadata());
+ return objectMapper.treeToValue(metadataJson, TestResourceOperationMetadata.class);
+ } catch (JsonProcessingException e) {
+ throw new DatabricksException("Failed to unmarshal operation metadata: " + e.getMessage(), e);
+ }
+ }
+
+ /**
+ * Check if the operation is done. This method refreshes the operation state before checking.
+ *
+ * @return true if the operation is complete, false otherwise
+ * @throws DatabricksException if the status check fails
+ */
+ public boolean isDone() {
+ refreshOperation();
+ return operation.getDone() != null && operation.getDone();
+ }
+
+ /** Refresh the operation state by polling the server. */
+ private void refreshOperation() {
+ operation = impl.getOperation(new GetOperationRequest().setName(operation.getName()));
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceRequest.java
new file mode 100755
index 000000000..428aca8f1
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceRequest.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.lrotesting;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateTestResourceRequest {
+ /** The resource to create */
+ @JsonProperty("resource")
+ private TestResource resource;
+
+ public CreateTestResourceRequest setResource(TestResource resource) {
+ this.resource = resource;
+ return this;
+ }
+
+ public TestResource getResource() {
+ return resource;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateTestResourceRequest that = (CreateTestResourceRequest) o;
+ return Objects.equals(resource, that.resource);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(resource);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateTestResourceRequest.class).add("resource", resource).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DatabricksServiceExceptionWithDetailsProto.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DatabricksServiceExceptionWithDetailsProto.java
new file mode 100755
index 000000000..e2254a076
--- /dev/null
+++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DatabricksServiceExceptionWithDetailsProto.java
@@ -0,0 +1,100 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.lrotesting;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+/**
+ * Serialization format for DatabricksServiceException with error details. This message doesn't work
+ * for ScalaPB-04 as google.protobuf.Any is only available to ScalaPB-09. Note the definition of
+ * this message should be in sync with DatabricksServiceExceptionProto defined in
+ * /api-base/proto/legacy/databricks.proto except the later one doesn't have the error details field
+ * defined.
+ */
+@Generated
+public class DatabricksServiceExceptionWithDetailsProto {
+ /**
+ * @pbjson-skip
+ */
+ @JsonProperty("details")
+ private Collection