diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 1989860d0..aa1180c39 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -2304d9e46d27bb60c2ba3a3185adf302a0ecd17a \ No newline at end of file +b54bbd860200d735fa2c306ec1559090625370e6 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 820ada772..d3dfd069a 100755 --- a/.gitattributes +++ b/.gitattributes @@ -1,4 +1,6 @@ +HttpPathTest.java linguist-generated=true LroTestingAPITest.java linguist-generated=true +MarshallTest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorMapper.java linguist-generated=true @@ -1681,6 +1683,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastin databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateLoggedModelResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateMaterializedFeatureRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateModelVersionRequest.java linguist-generated=true @@ -1701,6 +1704,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteFeatureReq databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteFeatureTagRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteLoggedModelTagRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteMaterializedFeatureRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelTagRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteModelVersionRequest.java linguist-generated=true @@ -1766,6 +1770,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersion databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMaterializedFeatureRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMetricHistoryResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelResponse.java linguist-generated=true @@ -1793,6 +1798,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListFeatureTagsR databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListFeatureTagsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListFeaturesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListFeaturesResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListMaterializedFeaturesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListMaterializedFeaturesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListModelsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListOnlineStoresRequest.java linguist-generated=true @@ -1814,6 +1821,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelInfo. databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelParameter.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/LoggedModelTag.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeature.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturePipelineScheduleState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturesService.java linguist-generated=true @@ -1830,6 +1839,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersion.jav databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionDatabricks.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelVersionTag.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/OfflineStoreConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/OnlineStore.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/OnlineStoreState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Param.java linguist-generated=true @@ -1892,6 +1902,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateCommentRes databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateExperiment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateFeatureRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateFeatureTagRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateMaterializedFeatureRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateModelVersionRequest.java linguist-generated=true @@ -3143,6 +3154,8 @@ databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CancelOp databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceOperation.java linguist-generated=true databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/CreateTestResourceRequest.java linguist-generated=true databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DatabricksServiceExceptionWithDetailsProto.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DeleteTestResourceOperation.java linguist-generated=true +databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DeleteTestResourceRequest.java linguist-generated=true databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/ErrorCode.java linguist-generated=true databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/GetOperationRequest.java linguist-generated=true databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/GetTestResourceRequest.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index b4585a98d..6dc0a4826 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -11,3 +11,14 @@ ### Internal Changes ### API Changes +* Add `createMaterializedFeature()`, `deleteMaterializedFeature()`, `getMaterializedFeature()`, `listMaterializedFeatures()` and `updateMaterializedFeature()` methods for `workspaceClient.featureEngineering()` service. +* Add `filterCondition` field for `com.databricks.sdk.service.ml.Feature`. +* Change `displayName`, `evaluation`, `queryText`, `schedule` and `warehouseId` fields for `com.databricks.sdk.service.sql.AlertV2` to be required. +* [Breaking] Change `displayName`, `evaluation`, `queryText`, `schedule` and `warehouseId` fields for `com.databricks.sdk.service.sql.AlertV2` to be required. +* [Breaking] Change `comparisonOperator` and `source` fields for `com.databricks.sdk.service.sql.AlertV2Evaluation` to be required. +* Change `comparisonOperator` and `source` fields for `com.databricks.sdk.service.sql.AlertV2Evaluation` to be required. +* Change `name` field for `com.databricks.sdk.service.sql.AlertV2OperandColumn` to be required. +* [Breaking] Change `name` field for `com.databricks.sdk.service.sql.AlertV2OperandColumn` to be required. +* Change `quartzCronSchedule` and `timezoneId` fields for `com.databricks.sdk.service.sql.CronSchedule` to be required. +* [Breaking] Change `quartzCronSchedule` and `timezoneId` fields for `com.databricks.sdk.service.sql.CronSchedule` to be required. +* [Breaking] Remove `results` field for `com.databricks.sdk.service.sql.ListAlertsV2Response`. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java index 4bd7689e0..e4c7f6f0a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java @@ -8,7 +8,7 @@ import java.util.Map; import java.util.Objects; -/** Next ID: 41 */ +/** Next ID: 42 */ @Generated public class SchemaInfo { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTask.java index 2c53eebed..7822d54e9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CleanRoomsNotebookTask.java @@ -8,6 +8,10 @@ import java.util.Map; import java.util.Objects; +/** + * Clean Rooms notebook task for V1 Clean Room service (GA). Replaces the deprecated + * CleanRoomNotebookTask (defined above) which was for V0 service. + */ @Generated public class CleanRoomsNotebookTask { /** The clean room that the notebook belongs to. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateMaterializedFeatureRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateMaterializedFeatureRequest.java new file mode 100755 index 000000000..dd40c0fec --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateMaterializedFeatureRequest.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateMaterializedFeatureRequest { + /** The materialized feature to create. */ + @JsonProperty("materialized_feature") + private MaterializedFeature materializedFeature; + + public CreateMaterializedFeatureRequest setMaterializedFeature( + MaterializedFeature materializedFeature) { + this.materializedFeature = materializedFeature; + return this; + } + + public MaterializedFeature getMaterializedFeature() { + return materializedFeature; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateMaterializedFeatureRequest that = (CreateMaterializedFeatureRequest) o; + return Objects.equals(materializedFeature, that.materializedFeature); + } + + @Override + public int hashCode() { + return Objects.hash(materializedFeature); + } + + @Override + public String toString() { + return new ToStringer(CreateMaterializedFeatureRequest.class) + .add("materializedFeature", materializedFeature) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteMaterializedFeatureRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteMaterializedFeatureRequest.java new file mode 100755 index 000000000..fca20320b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DeleteMaterializedFeatureRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteMaterializedFeatureRequest { + /** The ID of the materialized feature to delete. */ + @JsonIgnore private String materializedFeatureId; + + public DeleteMaterializedFeatureRequest setMaterializedFeatureId(String materializedFeatureId) { + this.materializedFeatureId = materializedFeatureId; + return this; + } + + public String getMaterializedFeatureId() { + return materializedFeatureId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteMaterializedFeatureRequest that = (DeleteMaterializedFeatureRequest) o; + return Objects.equals(materializedFeatureId, that.materializedFeatureId); + } + + @Override + public int hashCode() { + return Objects.hash(materializedFeatureId); + } + + @Override + public String toString() { + return new ToStringer(DeleteMaterializedFeatureRequest.class) + .add("materializedFeatureId", materializedFeatureId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java index d8eaa420c..a95df6e9e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Feature.java @@ -14,6 +14,10 @@ public class Feature { @JsonProperty("description") private String description; + /** The filter condition applied to the source data before aggregation. */ + @JsonProperty("filter_condition") + private String filterCondition; + /** The full three-part name (catalog, schema, name) of the feature. */ @JsonProperty("full_name") private String fullName; @@ -43,6 +47,15 @@ public String getDescription() { return description; } + public Feature setFilterCondition(String filterCondition) { + this.filterCondition = filterCondition; + return this; + } + + public String getFilterCondition() { + return filterCondition; + } + public Feature setFullName(String fullName) { this.fullName = fullName; return this; @@ -94,6 +107,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; Feature that = (Feature) o; return Objects.equals(description, that.description) + && Objects.equals(filterCondition, that.filterCondition) && Objects.equals(fullName, that.fullName) && Objects.equals(function, that.function) && Objects.equals(inputs, that.inputs) @@ -103,13 +117,15 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(description, fullName, function, inputs, source, timeWindow); + return Objects.hash( + description, filterCondition, fullName, function, inputs, source, timeWindow); } @Override public String toString() { return new ToStringer(Feature.class) .add("description", description) + .add("filterCondition", filterCondition) .add("fullName", fullName) .add("function", function) .add("inputs", inputs) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringAPI.java index 549c4226e..87f248c97 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringAPI.java @@ -29,6 +29,11 @@ public Feature createFeature(CreateFeatureRequest request) { return impl.createFeature(request); } + /** Create a materialized feature. */ + public MaterializedFeature createMaterializedFeature(CreateMaterializedFeatureRequest request) { + return impl.createMaterializedFeature(request); + } + public void deleteFeature(String fullName) { deleteFeature(new DeleteFeatureRequest().setFullName(fullName)); } @@ -38,6 +43,16 @@ public void deleteFeature(DeleteFeatureRequest request) { impl.deleteFeature(request); } + public void deleteMaterializedFeature(String materializedFeatureId) { + deleteMaterializedFeature( + new DeleteMaterializedFeatureRequest().setMaterializedFeatureId(materializedFeatureId)); + } + + /** Delete a materialized feature. */ + public void deleteMaterializedFeature(DeleteMaterializedFeatureRequest request) { + impl.deleteMaterializedFeature(request); + } + public Feature getFeature(String fullName) { return getFeature(new GetFeatureRequest().setFullName(fullName)); } @@ -47,6 +62,16 @@ public Feature getFeature(GetFeatureRequest request) { return impl.getFeature(request); } + public MaterializedFeature getMaterializedFeature(String materializedFeatureId) { + return getMaterializedFeature( + new GetMaterializedFeatureRequest().setMaterializedFeatureId(materializedFeatureId)); + } + + /** Get a materialized feature. */ + public MaterializedFeature getMaterializedFeature(GetMaterializedFeatureRequest request) { + return impl.getMaterializedFeature(request); + } + /** List Features. */ public Iterable listFeatures(ListFeaturesRequest request) { return new Paginator<>( @@ -62,11 +87,32 @@ public Iterable listFeatures(ListFeaturesRequest request) { }); } + /** List materialized features. */ + public Iterable listMaterializedFeatures( + ListMaterializedFeaturesRequest request) { + return new Paginator<>( + request, + impl::listMaterializedFeatures, + ListMaterializedFeaturesResponse::getMaterializedFeatures, + response -> { + String token = response.getNextPageToken(); + if (token == null || token.isEmpty()) { + return null; + } + return request.setPageToken(token); + }); + } + /** Update a Feature. */ public Feature updateFeature(UpdateFeatureRequest request) { return impl.updateFeature(request); } + /** Update a materialized feature (pause/resume). */ + public MaterializedFeature updateMaterializedFeature(UpdateMaterializedFeatureRequest request) { + return impl.updateMaterializedFeature(request); + } + public FeatureEngineeringService impl() { return impl; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringImpl.java index ec20c895e..c1f6957fd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringImpl.java @@ -30,6 +30,21 @@ public Feature createFeature(CreateFeatureRequest request) { } } + @Override + public MaterializedFeature createMaterializedFeature(CreateMaterializedFeatureRequest request) { + String path = "/api/2.0/feature-engineering/materialized-features"; + try { + Request req = + new Request("POST", path, apiClient.serialize(request.getMaterializedFeature())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, MaterializedFeature.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public void deleteFeature(DeleteFeatureRequest request) { String path = String.format("/api/2.0/feature-engineering/features/%s", request.getFullName()); @@ -43,6 +58,22 @@ public void deleteFeature(DeleteFeatureRequest request) { } } + @Override + public void deleteMaterializedFeature(DeleteMaterializedFeatureRequest request) { + String path = + String.format( + "/api/2.0/feature-engineering/materialized-features/%s", + request.getMaterializedFeatureId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public Feature getFeature(GetFeatureRequest request) { String path = String.format("/api/2.0/feature-engineering/features/%s", request.getFullName()); @@ -56,6 +87,22 @@ public Feature getFeature(GetFeatureRequest request) { } } + @Override + public MaterializedFeature getMaterializedFeature(GetMaterializedFeatureRequest request) { + String path = + String.format( + "/api/2.0/feature-engineering/materialized-features/%s", + request.getMaterializedFeatureId()); + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, MaterializedFeature.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public ListFeaturesResponse listFeatures(ListFeaturesRequest request) { String path = "/api/2.0/feature-engineering/features"; @@ -69,6 +116,20 @@ public ListFeaturesResponse listFeatures(ListFeaturesRequest request) { } } + @Override + public ListMaterializedFeaturesResponse listMaterializedFeatures( + ListMaterializedFeaturesRequest request) { + String path = "/api/2.0/feature-engineering/materialized-features"; + try { + Request req = new Request("GET", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, ListMaterializedFeaturesResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public Feature updateFeature(UpdateFeatureRequest request) { String path = String.format("/api/2.0/feature-engineering/features/%s", request.getFullName()); @@ -82,4 +143,22 @@ public Feature updateFeature(UpdateFeatureRequest request) { throw new DatabricksException("IO error: " + e.getMessage(), e); } } + + @Override + public MaterializedFeature updateMaterializedFeature(UpdateMaterializedFeatureRequest request) { + String path = + String.format( + "/api/2.0/feature-engineering/materialized-features/%s", + request.getMaterializedFeatureId()); + try { + Request req = + new Request("PATCH", path, apiClient.serialize(request.getMaterializedFeature())); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + return apiClient.execute(req, MaterializedFeature.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringService.java index 0cfadd68b..4d99afef5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureEngineeringService.java @@ -15,15 +15,34 @@ public interface FeatureEngineeringService { /** Create a Feature. */ Feature createFeature(CreateFeatureRequest createFeatureRequest); + /** Create a materialized feature. */ + MaterializedFeature createMaterializedFeature( + CreateMaterializedFeatureRequest createMaterializedFeatureRequest); + /** Delete a Feature. */ void deleteFeature(DeleteFeatureRequest deleteFeatureRequest); + /** Delete a materialized feature. */ + void deleteMaterializedFeature(DeleteMaterializedFeatureRequest deleteMaterializedFeatureRequest); + /** Get a Feature. */ Feature getFeature(GetFeatureRequest getFeatureRequest); + /** Get a materialized feature. */ + MaterializedFeature getMaterializedFeature( + GetMaterializedFeatureRequest getMaterializedFeatureRequest); + /** List Features. */ ListFeaturesResponse listFeatures(ListFeaturesRequest listFeaturesRequest); + /** List materialized features. */ + ListMaterializedFeaturesResponse listMaterializedFeatures( + ListMaterializedFeaturesRequest listMaterializedFeaturesRequest); + /** Update a Feature. */ Feature updateFeature(UpdateFeatureRequest updateFeatureRequest); + + /** Update a materialized feature (pause/resume). */ + MaterializedFeature updateMaterializedFeature( + UpdateMaterializedFeatureRequest updateMaterializedFeatureRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMaterializedFeatureRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMaterializedFeatureRequest.java new file mode 100755 index 000000000..a8e683be8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMaterializedFeatureRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GetMaterializedFeatureRequest { + /** The ID of the materialized feature. */ + @JsonIgnore private String materializedFeatureId; + + public GetMaterializedFeatureRequest setMaterializedFeatureId(String materializedFeatureId) { + this.materializedFeatureId = materializedFeatureId; + return this; + } + + public String getMaterializedFeatureId() { + return materializedFeatureId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetMaterializedFeatureRequest that = (GetMaterializedFeatureRequest) o; + return Objects.equals(materializedFeatureId, that.materializedFeatureId); + } + + @Override + public int hashCode() { + return Objects.hash(materializedFeatureId); + } + + @Override + public String toString() { + return new ToStringer(GetMaterializedFeatureRequest.class) + .add("materializedFeatureId", materializedFeatureId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListMaterializedFeaturesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListMaterializedFeaturesRequest.java new file mode 100755 index 000000000..7561b1122 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListMaterializedFeaturesRequest.java @@ -0,0 +1,84 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ListMaterializedFeaturesRequest { + /** + * Filter by feature name. If specified, only materialized features materialized from this feature + * will be returned. + */ + @JsonIgnore + @QueryParam("feature_name") + private String featureName; + + /** + * The maximum number of results to return. Defaults to 100 if not specified. Cannot be greater + * than 1000. + */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** Pagination token to go to the next page based on a previous query. */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + public ListMaterializedFeaturesRequest setFeatureName(String featureName) { + this.featureName = featureName; + return this; + } + + public String getFeatureName() { + return featureName; + } + + public ListMaterializedFeaturesRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListMaterializedFeaturesRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListMaterializedFeaturesRequest that = (ListMaterializedFeaturesRequest) o; + return Objects.equals(featureName, that.featureName) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(featureName, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListMaterializedFeaturesRequest.class) + .add("featureName", featureName) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListMaterializedFeaturesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListMaterializedFeaturesResponse.java new file mode 100755 index 000000000..84d0a89d4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ListMaterializedFeaturesResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListMaterializedFeaturesResponse { + /** List of materialized features. */ + @JsonProperty("materialized_features") + private Collection materializedFeatures; + + /** Pagination token to request the next page of results for this query. */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListMaterializedFeaturesResponse setMaterializedFeatures( + Collection materializedFeatures) { + this.materializedFeatures = materializedFeatures; + return this; + } + + public Collection getMaterializedFeatures() { + return materializedFeatures; + } + + public ListMaterializedFeaturesResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListMaterializedFeaturesResponse that = (ListMaterializedFeaturesResponse) o; + return Objects.equals(materializedFeatures, that.materializedFeatures) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(materializedFeatures, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListMaterializedFeaturesResponse.class) + .add("materializedFeatures", materializedFeatures) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeature.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeature.java new file mode 100755 index 000000000..598f1df11 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeature.java @@ -0,0 +1,149 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A materialized feature represents a feature that is continuously computed and stored. */ +@Generated +public class MaterializedFeature { + /** The full name of the feature in Unity Catalog. */ + @JsonProperty("feature_name") + private String featureName; + + /** + * The timestamp when the pipeline last ran and updated the materialized feature values. If the + * pipeline has not run yet, this field will be null. + */ + @JsonProperty("last_materialization_time") + private String lastMaterializationTime; + + /** Unique identifier for the materialized feature. */ + @JsonProperty("materialized_feature_id") + private String materializedFeatureId; + + /** */ + @JsonProperty("offline_store_config") + private OfflineStoreConfig offlineStoreConfig; + + /** */ + @JsonProperty("online_store_config") + private OnlineStore onlineStoreConfig; + + /** The schedule state of the materialization pipeline. */ + @JsonProperty("pipeline_schedule_state") + private MaterializedFeaturePipelineScheduleState pipelineScheduleState; + + /** + * The fully qualified Unity Catalog path to the table containing the materialized feature (Delta + * table or Lakebase table). Output only. + */ + @JsonProperty("table_name") + private String tableName; + + public MaterializedFeature setFeatureName(String featureName) { + this.featureName = featureName; + return this; + } + + public String getFeatureName() { + return featureName; + } + + public MaterializedFeature setLastMaterializationTime(String lastMaterializationTime) { + this.lastMaterializationTime = lastMaterializationTime; + return this; + } + + public String getLastMaterializationTime() { + return lastMaterializationTime; + } + + public MaterializedFeature setMaterializedFeatureId(String materializedFeatureId) { + this.materializedFeatureId = materializedFeatureId; + return this; + } + + public String getMaterializedFeatureId() { + return materializedFeatureId; + } + + public MaterializedFeature setOfflineStoreConfig(OfflineStoreConfig offlineStoreConfig) { + this.offlineStoreConfig = offlineStoreConfig; + return this; + } + + public OfflineStoreConfig getOfflineStoreConfig() { + return offlineStoreConfig; + } + + public MaterializedFeature setOnlineStoreConfig(OnlineStore onlineStoreConfig) { + this.onlineStoreConfig = onlineStoreConfig; + return this; + } + + public OnlineStore getOnlineStoreConfig() { + return onlineStoreConfig; + } + + public MaterializedFeature setPipelineScheduleState( + MaterializedFeaturePipelineScheduleState pipelineScheduleState) { + this.pipelineScheduleState = pipelineScheduleState; + return this; + } + + public MaterializedFeaturePipelineScheduleState getPipelineScheduleState() { + return pipelineScheduleState; + } + + public MaterializedFeature setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MaterializedFeature that = (MaterializedFeature) o; + return Objects.equals(featureName, that.featureName) + && Objects.equals(lastMaterializationTime, that.lastMaterializationTime) + && Objects.equals(materializedFeatureId, that.materializedFeatureId) + && Objects.equals(offlineStoreConfig, that.offlineStoreConfig) + && Objects.equals(onlineStoreConfig, that.onlineStoreConfig) + && Objects.equals(pipelineScheduleState, that.pipelineScheduleState) + && Objects.equals(tableName, that.tableName); + } + + @Override + public int hashCode() { + return Objects.hash( + featureName, + lastMaterializationTime, + materializedFeatureId, + offlineStoreConfig, + onlineStoreConfig, + pipelineScheduleState, + tableName); + } + + @Override + public String toString() { + return new ToStringer(MaterializedFeature.class) + .add("featureName", featureName) + .add("lastMaterializationTime", lastMaterializationTime) + .add("materializedFeatureId", materializedFeatureId) + .add("offlineStoreConfig", offlineStoreConfig) + .add("onlineStoreConfig", onlineStoreConfig) + .add("pipelineScheduleState", pipelineScheduleState) + .add("tableName", tableName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturePipelineScheduleState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturePipelineScheduleState.java new file mode 100755 index 000000000..4965d4f27 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeaturePipelineScheduleState.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum MaterializedFeaturePipelineScheduleState { + ACTIVE, + PAUSED, + SNAPSHOT, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/OfflineStoreConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/OfflineStoreConfig.java new file mode 100755 index 000000000..fbf7c79e1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/OfflineStoreConfig.java @@ -0,0 +1,78 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Configuration for offline store destination. */ +@Generated +public class OfflineStoreConfig { + /** The Unity Catalog catalog name. */ + @JsonProperty("catalog_name") + private String catalogName; + + /** The Unity Catalog schema name. */ + @JsonProperty("schema_name") + private String schemaName; + + /** + * Prefix for Unity Catalog table name. The materialized feature will be stored in a table with + * this prefix and a generated postfix. + */ + @JsonProperty("table_name_prefix") + private String tableNamePrefix; + + public OfflineStoreConfig setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public OfflineStoreConfig setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public String getSchemaName() { + return schemaName; + } + + public OfflineStoreConfig setTableNamePrefix(String tableNamePrefix) { + this.tableNamePrefix = tableNamePrefix; + return this; + } + + public String getTableNamePrefix() { + return tableNamePrefix; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OfflineStoreConfig that = (OfflineStoreConfig) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(schemaName, that.schemaName) + && Objects.equals(tableNamePrefix, that.tableNamePrefix); + } + + @Override + public int hashCode() { + return Objects.hash(catalogName, schemaName, tableNamePrefix); + } + + @Override + public String toString() { + return new ToStringer(OfflineStoreConfig.class) + .add("catalogName", catalogName) + .add("schemaName", schemaName) + .add("tableNamePrefix", tableNamePrefix) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateMaterializedFeatureRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateMaterializedFeatureRequest.java new file mode 100755 index 000000000..d0a3869f0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/UpdateMaterializedFeatureRequest.java @@ -0,0 +1,80 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateMaterializedFeatureRequest { + /** The materialized feature to update. */ + @JsonProperty("materialized_feature") + private MaterializedFeature materializedFeature; + + /** Unique identifier for the materialized feature. */ + @JsonIgnore private String materializedFeatureId; + + /** + * Provide the materialization feature fields which should be updated. Currently, only the + * pipeline_state field can be updated. + */ + @JsonIgnore + @QueryParam("update_mask") + private String updateMask; + + public UpdateMaterializedFeatureRequest setMaterializedFeature( + MaterializedFeature materializedFeature) { + this.materializedFeature = materializedFeature; + return this; + } + + public MaterializedFeature getMaterializedFeature() { + return materializedFeature; + } + + public UpdateMaterializedFeatureRequest setMaterializedFeatureId(String materializedFeatureId) { + this.materializedFeatureId = materializedFeatureId; + return this; + } + + public String getMaterializedFeatureId() { + return materializedFeatureId; + } + + public UpdateMaterializedFeatureRequest setUpdateMask(String updateMask) { + this.updateMask = updateMask; + return this; + } + + public String getUpdateMask() { + return updateMask; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateMaterializedFeatureRequest that = (UpdateMaterializedFeatureRequest) o; + return Objects.equals(materializedFeature, that.materializedFeature) + && Objects.equals(materializedFeatureId, that.materializedFeatureId) + && Objects.equals(updateMask, that.updateMask); + } + + @Override + public int hashCode() { + return Objects.hash(materializedFeature, materializedFeatureId, updateMask); + } + + @Override + public String toString() { + return new ToStringer(UpdateMaterializedFeatureRequest.class) + .add("materializedFeature", materializedFeature) + .add("materializedFeatureId", materializedFeatureId) + .add("updateMask", updateMask) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DayOfWeek.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DayOfWeek.java index 6bce1de65..f50ce7bee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DayOfWeek.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DayOfWeek.java @@ -5,8 +5,8 @@ import com.databricks.sdk.support.Generated; /** - * Days of week in which the restart is allowed to happen (within a five-hour window starting at - * start_hour). If not specified all days of the week will be used. + * Days of week in which the window is allowed to happen. If not specified all days of the week will + * be used. */ @Generated public enum DayOfWeek { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java index 8f90da55e..8a0c9fdbe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertsV2API.java @@ -43,7 +43,7 @@ public Iterable listAlerts(ListAlertsV2Request request) { return new Paginator<>( request, impl::listAlerts, - ListAlertsV2Response::getResults, + ListAlertsV2Response::getAlerts, response -> { String token = response.getNextPageToken(); if (token == null || token.isEmpty()) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Response.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Response.java index 6819ecdeb..b241ecceb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Response.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsV2Response.java @@ -18,10 +18,6 @@ public class ListAlertsV2Response { @JsonProperty("next_page_token") private String nextPageToken; - /** Deprecated. Use `alerts` instead. */ - @JsonProperty("results") - private Collection results; - public ListAlertsV2Response setAlerts(Collection alerts) { this.alerts = alerts; return this; @@ -40,28 +36,17 @@ public String getNextPageToken() { return nextPageToken; } - public ListAlertsV2Response setResults(Collection results) { - this.results = results; - return this; - } - - public Collection getResults() { - return results; - } - @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ListAlertsV2Response that = (ListAlertsV2Response) o; - return Objects.equals(alerts, that.alerts) - && Objects.equals(nextPageToken, that.nextPageToken) - && Objects.equals(results, that.results); + return Objects.equals(alerts, that.alerts) && Objects.equals(nextPageToken, that.nextPageToken); } @Override public int hashCode() { - return Objects.hash(alerts, nextPageToken, results); + return Objects.hash(alerts, nextPageToken); } @Override @@ -69,7 +54,6 @@ public String toString() { return new ToStringer(ListAlertsV2Response.class) .add("alerts", alerts) .add("nextPageToken", nextPageToken) - .add("results", results) .toString(); } } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/HttpPathTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/HttpPathTest.java new file mode 100755 index 000000000..9da7d1adf --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/HttpPathTest.java @@ -0,0 +1,493 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.gentesting.unittests; + +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.DatabricksConfig; +import com.databricks.sdk.core.DummyCredentialsProvider; +import com.databricks.sdk.core.http.HttpClient; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.core.http.Response; +import com.databricks.sdk.core.utils.SerDeUtils; +import com.databricks.sdk.service.httpcallv2.*; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.net.URL; +import java.util.*; +import java.util.function.Consumer; +import java.util.function.Supplier; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +public class HttpPathTest { + private static final ObjectMapper objectMapper = SerDeUtils.createMapper(); + private static final String HOST = "http://test.databricks.com"; + + static class TestCase { + String name; + Consumer invoke; + String path; + Supplier expectedBody; + + TestCase( + String name, Consumer invoke, String path, Supplier expectedBody) { + this.name = name; + this.invoke = invoke; + this.path = path; + this.expectedBody = expectedBody; + } + } + + static List testCases() throws JsonProcessingException, JsonMappingException { + return Arrays.asList( + new TestCase( + "LegacyHttpPostNoQueryParamsNoBody", + client -> { + try { + client.createResource( + new CreateResourceRequest() + .setPathParamString("string_val") + .setPathParamInt(123L) + .setPathParamBool(true)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "/api/2.0/http-call/string_val/123/true", + null), + new TestCase( + "LegacyHttpPostWithBody", + client -> { + try { + client.createResource( + new CreateResourceRequest() + .setBodyField("request_body_content") + .setPathParamString("test_string") + .setPathParamInt(456L) + .setPathParamBool(false)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "/api/2.0/http-call/test_string/456/false", + () -> { + try { + return objectMapper.writeValueAsString( + new CreateResourceRequest() + .setBodyField("request_body_content") + .setPathParamString("test_string") + .setPathParamInt(456L) + .setPathParamBool(false)); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + }), + new TestCase( + "UpdateResourceNoQueryParamsNoBody", + client -> { + try { + client.updateResource( + new UpdateResourceRequest() + .setResource( + new Resource() + .setAnyField( + new ObjectMapper() + .readValue("{\"key\": \"value\"}", Object.class)) + .setNestedPathParamBool(true) + .setNestedPathParamInt(789L) + .setNestedPathParamString("update_string")) + .setNestedPathParamString("update_string") + .setNestedPathParamInt(789L) + .setNestedPathParamBool(true)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "/api/2.0/http-call/update_string/789/true", + () -> { + try { + return objectMapper.writeValueAsString( + new Resource() + .setAnyField( + new ObjectMapper().readValue("{\"key\": \"value\"}", Object.class)) + .setNestedPathParamBool(true) + .setNestedPathParamInt(789L) + .setNestedPathParamString("update_string")); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + }), + new TestCase( + "UpdateResourceWithBody", + client -> { + try { + client.updateResource( + new UpdateResourceRequest() + .setResource( + new Resource() + .setBodyField("request_body_content") + .setNestedPathParamBool(true) + .setNestedPathParamInt(789L) + .setNestedPathParamString("update_string")) + .setNestedPathParamString("update_string") + .setNestedPathParamInt(789L) + .setNestedPathParamBool(true)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "/api/2.0/http-call/update_string/789/true", + () -> { + try { + return objectMapper.writeValueAsString( + new Resource() + .setBodyField("request_body_content") + .setNestedPathParamBool(true) + .setNestedPathParamInt(789L) + .setNestedPathParamString("update_string")); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + }), + new TestCase( + "UpdateResourceWithSimpleQueryParams", + client -> { + try { + client.updateResource( + new UpdateResourceRequest() + .setResource( + new Resource() + .setNestedPathParamBool(true) + .setNestedPathParamInt(789L) + .setNestedPathParamString("update_string")) + .setNestedPathParamString("update_string") + .setNestedPathParamInt(789L) + .setNestedPathParamBool(true) + .setQueryParamString("query_string_val") + .setQueryParamInt(999L) + .setQueryParamBool(true) + .setFieldMask("field.mask.value")); + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "/api/2.0/http-call/update_string/789/true?field_mask=field.mask.value&query_param_bool=true&query_param_int=999&query_param_string=query_string_val", + () -> { + try { + return objectMapper.writeValueAsString( + new Resource() + .setNestedPathParamBool(true) + .setNestedPathParamInt(789L) + .setNestedPathParamString("update_string")); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + }), + new TestCase( + "UpdateResourceWithOneNestedQueryParam", + client -> { + try { + client.updateResource( + new UpdateResourceRequest() + .setResource( + new Resource() + .setNestedPathParamBool(true) + .setNestedPathParamInt(789L) + .setNestedPathParamString("update_string")) + .setNestedPathParamString("update_string") + .setNestedPathParamInt(789L) + .setNestedPathParamBool(true) + .setOptionalComplexQueryParam( + new ComplexQueryParam() + .setNestedOptionalQueryParam("nested_optional"))); + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "/api/2.0/http-call/update_string/789/true?optional_complex_query_param.nested_optional_query_param=nested_optional", + () -> { + try { + return objectMapper.writeValueAsString( + new Resource() + .setNestedPathParamBool(true) + .setNestedPathParamInt(789L) + .setNestedPathParamString("update_string")); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + }), + new TestCase( + "UpdateResourceWithRepeatedQueryParam", + client -> { + try { + client.updateResource( + new UpdateResourceRequest() + .setResource( + new Resource() + .setNestedPathParamBool(true) + .setNestedPathParamInt(789L) + .setNestedPathParamString("update_string")) + .setNestedPathParamString("update_string") + .setNestedPathParamInt(789L) + .setNestedPathParamBool(true) + .setRepeatedQueryParam(Arrays.asList("item1", "item2", "item3"))); + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "/api/2.0/http-call/update_string/789/true?repeated_query_param=item1&repeated_query_param=item2&repeated_query_param=item3", + () -> { + try { + return objectMapper.writeValueAsString( + new Resource() + .setNestedPathParamBool(true) + .setNestedPathParamInt(789L) + .setNestedPathParamString("update_string")); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + }), + new TestCase( + "UpdateResourceWithRepeatedNestedQueryParam", + client -> { + try { + client.updateResource( + new UpdateResourceRequest() + .setResource( + new Resource() + .setNestedPathParamBool(true) + .setNestedPathParamInt(789L) + .setNestedPathParamString("update_string")) + .setNestedPathParamString("update_string") + .setNestedPathParamInt(789L) + .setNestedPathParamBool(true) + .setOptionalComplexQueryParam( + new ComplexQueryParam() + .setNestedRepeatedQueryParam( + Arrays.asList("item1", "item2", "item3")))); + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "/api/2.0/http-call/update_string/789/true?optional_complex_query_param.nested_repeated_query_param=item1&optional_complex_query_param.nested_repeated_query_param=item2&optional_complex_query_param.nested_repeated_query_param=item3", + () -> { + try { + return objectMapper.writeValueAsString( + new Resource() + .setNestedPathParamBool(true) + .setNestedPathParamInt(789L) + .setNestedPathParamString("update_string")); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + }), + new TestCase( + "UpdateResourceWithDoubleRepeatedNestedQueryParam", + client -> { + try { + client.updateResource( + new UpdateResourceRequest() + .setResource( + new Resource() + .setNestedPathParamBool(true) + .setNestedPathParamInt(789L) + .setNestedPathParamString("update_string")) + .setNestedPathParamString("update_string") + .setNestedPathParamInt(789L) + .setNestedPathParamBool(true) + .setRepeatedComplexQueryParam( + Arrays.asList( + new ComplexQueryParam() + .setNestedRepeatedQueryParam( + Arrays.asList("item1", "item2", "item3")), + new ComplexQueryParam() + .setNestedRepeatedQueryParam( + Arrays.asList("item4", "item5", "item6"))))); + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "/api/2.0/http-call/update_string/789/true?repeated_complex_query_param=ComplexQueryParam%5BnestedRepeatedQueryParam%3D%5Bitem1%2C+item2%2C+item3%5D%5D&repeated_complex_query_param=ComplexQueryParam%5BnestedRepeatedQueryParam%3D%5Bitem4%2C+item5%2C+item6%5D%5D", + () -> { + try { + return objectMapper.writeValueAsString( + new Resource() + .setNestedPathParamBool(true) + .setNestedPathParamInt(789L) + .setNestedPathParamString("update_string")); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + }), + new TestCase( + "GetResourceNoQueryParams", + client -> { + try { + client.getResource( + new GetResourceRequest() + .setPathParamString("get_string") + .setPathParamInt(123L) + .setPathParamBool(true)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "/api/2.0/http-call/get_string/123/true", + null), + new TestCase( + "GetResourceWithSimpleQueryParams", + client -> { + try { + client.getResource( + new GetResourceRequest() + .setPathParamString("get_string") + .setPathParamInt(456L) + .setPathParamBool(false) + .setQueryParamString("query_string_val") + .setQueryParamInt(999L) + .setQueryParamBool(true) + .setFieldMask("field.mask.value")); + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "/api/2.0/http-call/get_string/456/false?field_mask=field.mask.value&query_param_bool=true&query_param_int=999&query_param_string=query_string_val", + null), + new TestCase( + "GetResourceWithOneNestedQueryParam", + client -> { + try { + client.getResource( + new GetResourceRequest() + .setPathParamString("get_string") + .setPathParamInt(789L) + .setPathParamBool(true) + .setOptionalComplexQueryParam( + new ComplexQueryParam() + .setNestedOptionalQueryParam("nested_optional"))); + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "/api/2.0/http-call/get_string/789/true?optional_complex_query_param.nested_optional_query_param=nested_optional", + null), + new TestCase( + "GetResourceWithRepeatedQueryParam", + client -> { + try { + client.getResource( + new GetResourceRequest() + .setPathParamString("get_string") + .setPathParamInt(101L) + .setPathParamBool(false) + .setRepeatedQueryParam(Arrays.asList("item1", "item2", "item3"))); + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "/api/2.0/http-call/get_string/101/false?repeated_query_param=item1&repeated_query_param=item2&repeated_query_param=item3", + null), + new TestCase( + "GetResourceWithRepeatedNestedQueryParam", + client -> { + try { + client.getResource( + new GetResourceRequest() + .setPathParamString("get_string") + .setPathParamInt(202L) + .setPathParamBool(true) + .setOptionalComplexQueryParam( + new ComplexQueryParam() + .setNestedRepeatedQueryParam( + Arrays.asList("item1", "item2", "item3")))); + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "/api/2.0/http-call/get_string/202/true?optional_complex_query_param.nested_repeated_query_param=item1&optional_complex_query_param.nested_repeated_query_param=item2&optional_complex_query_param.nested_repeated_query_param=item3", + null), + new TestCase( + "GetResourceWithDoubleRepeatedNestedQueryParam", + client -> { + try { + client.getResource( + new GetResourceRequest() + .setPathParamString("get_string") + .setPathParamInt(303L) + .setPathParamBool(false) + .setRepeatedComplexQueryParam( + Arrays.asList( + new ComplexQueryParam() + .setNestedRepeatedQueryParam( + Arrays.asList("item1", "item2", "item3")), + new ComplexQueryParam() + .setNestedRepeatedQueryParam( + Arrays.asList("item4", "item5", "item6"))))); + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "/api/2.0/http-call/get_string/303/false?repeated_complex_query_param=ComplexQueryParam%5BnestedRepeatedQueryParam%3D%5Bitem1%2C+item2%2C+item3%5D%5D&repeated_complex_query_param=ComplexQueryParam%5BnestedRepeatedQueryParam%3D%5Bitem4%2C+item5%2C+item6%5D%5D", + null)); + } + + @ParameterizedTest(name = "{0}") + @MethodSource("testCases") + void testHttpPath(TestCase testCase) throws Exception { + HttpClient mock = mock(HttpClient.class); + HttpCallV2API client = prepareClient(mock, testCase); + + testCase.invoke.accept(client); + } + + private HttpCallV2API prepareClient(HttpClient mock, TestCase testCase) throws IOException { + + // Configure mock to return a successful dummy response for the test case. + Response successResponse = new Response("{}", 200, "OK", new URL(HOST)); + + // Validate the request is the expected request. + when(mock.execute(any())) + .thenAnswer( + invocation -> { + Request request = invocation.getArgument(0); + String expectedUrl = HOST + testCase.path; + if (!request.getUri().toString().equals(expectedUrl)) { + throw new AssertionError( + "Expected request URL: " + + expectedUrl + + " but got: " + + request.getUri().toString()); + } + if (testCase.expectedBody != null) { + if (!request.getBodyString().equals(testCase.expectedBody.get())) { + throw new AssertionError( + "Expected request body: " + + testCase.expectedBody.get() + + " but got: " + + request.getBodyString()); + } + } + return successResponse; + }); + + // Create DatabricksConfig with the mock HttpClient. + DatabricksConfig config = + new DatabricksConfig() + .setHost(HOST) + .setCredentialsProvider(new DummyCredentialsProvider()) + .setHttpClient(mock); + + // Create ApiClient with the config. + ApiClient apiClient = new ApiClient(config); + + // Create HttpCallV2API with the ApiClient. + return new HttpCallV2API(apiClient); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/MarshallTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/MarshallTest.java new file mode 100755 index 000000000..c35078873 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/gentesting/unittests/MarshallTest.java @@ -0,0 +1,257 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.gentesting.unittests; + +import static org.junit.jupiter.api.Assertions.*; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.utils.SerDeUtils; +import com.databricks.sdk.service.jsonmarshallv2.*; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.*; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +public class MarshallTest { + @Mock private ApiClient mockApiClient; + private ObjectMapper objectMapper; + + static class MarshallTestCase { + String name; + Object input; + String output; + + MarshallTestCase(String name, Object input, String output) { + this.name = name; + this.input = input; + this.output = output; + } + } + + @BeforeEach + void setUp() { + objectMapper = SerDeUtils.createMapper(); + } + + static List marshallTestCases() + throws JsonProcessingException, JsonMappingException { + List cases = new ArrayList<>(); + cases.add( + new MarshallTestCase( + "OptionalString", + new OptionalFields().setOptionalString("test"), + "{" + " \"optional_string\": \"test\"" + "}")); + cases.add( + new MarshallTestCase( + "OptionalInt32", + new OptionalFields().setOptionalInt32(42L), + "{" + " \"optional_int32\": 42" + "}")); + cases.add( + new MarshallTestCase( + "OptionalInt64", + new OptionalFields().setOptionalInt64(9223372036854775807L), + "{" + " \"optional_int64\": 9223372036854775807" + "}")); + cases.add( + new MarshallTestCase( + "OptionalBool", + new OptionalFields().setOptionalBool(true), + "{" + " \"optional_bool\": true" + "}")); + cases.add( + new MarshallTestCase( + "OptionalEnum", + new OptionalFields().setTestEnum(TestEnum.TEST_ENUM_ONE), + "{" + " \"test_enum\": \"TEST_ENUM_ONE\"" + "}")); + cases.add( + new MarshallTestCase( + "OptionalNestedMessage", + new OptionalFields() + .setOptionalMessage(new NestedMessage().setOptionalString("nested_value")), + "{" + + " \"optional_message\": {" + + " \"optional_string\": \"nested_value\"" + + " }" + + "}")); + cases.add( + new MarshallTestCase( + "OptionalMap", + new OptionalFields() + .setMap( + new HashMap() { + { + put("key", "test_key"); + put("value", "test_value"); + } + }), + "{" + + " \"map\": {" + + " \"key\": \"test_key\"," + + " \"value\": \"test_value\"" + + " }" + + "}")); + cases.add( + new MarshallTestCase( + "OptionalDuration", + new OptionalFields().setDuration("3600s"), + "{" + " \"duration\": \"3600s\"" + "}")); + cases.add( + new MarshallTestCase( + "OptionalFieldMask", + new OptionalFields().setFieldMask("optional_string,optional_int32"), + "{" + " \"field_mask\": \"optional_string,optional_int32\"" + "}")); + cases.add( + new MarshallTestCase( + "OptionalTimestamp", + new OptionalFields().setTimestamp("2023-01-01T00:00:00Z"), + "{" + " \"timestamp\": \"2023-01-01T00:00:00Z\"" + "}")); + cases.add( + new MarshallTestCase( + "MultipleOptionalFields", + new OptionalFields() + .setOptionalBool(true) + .setOptionalInt32(42L) + .setOptionalString("test"), + "{" + + " \"optional_string\":\"test\"," + + " \"optional_int32\":42," + + " \"optional_bool\":true" + + "}")); + cases.add(new MarshallTestCase("RequiredFieldsNoInput", new RequiredFields(), "{}")); + cases.add( + new MarshallTestCase( + "RequiredFieldsWithNestedMessage", + new RequiredFields() + .setRequiredMessage(new NestedMessage().setOptionalString("nested_value")), + "{" + + " \"required_message\": {" + + " \"optional_string\": \"nested_value\"" + + " }" + + " }")); + cases.add( + new MarshallTestCase( + "RepeatedString", + new RepeatedFields().setRepeatedString(Arrays.asList("item1", "item2", "item3")), + "{" + " \"repeated_string\": [\"item1\", \"item2\", \"item3\"]" + "}")); + cases.add( + new MarshallTestCase( + "RepeatedInt32", + new RepeatedFields().setRepeatedInt32(Arrays.asList(1L, 2L, 3L, 4L, 5L)), + "{" + " \"repeated_int32\": [1, 2, 3, 4, 5]" + "}")); + cases.add( + new MarshallTestCase( + "RepeatedInt64", + new RepeatedFields() + .setRepeatedInt64(Arrays.asList(1000000000000000000L, 2000000000000000000L)), + "{" + " \"repeated_int64\": [1000000000000000000, 2000000000000000000]" + "}")); + cases.add( + new MarshallTestCase( + "RepeatedBool", + new RepeatedFields().setRepeatedBool(Arrays.asList(true, false, true)), + "{" + " \"repeated_bool\": [true, false, true]" + "}")); + cases.add( + new MarshallTestCase( + "RepeatedEnum", + new RepeatedFields() + .setTestRepeatedEnum(Arrays.asList(TestEnum.TEST_ENUM_ONE, TestEnum.TEST_ENUM_TWO)), + "{" + " \"test_repeated_enum\": [\"TEST_ENUM_ONE\", \"TEST_ENUM_TWO\"]" + "}")); + cases.add( + new MarshallTestCase( + "RepeatedNestedMessage", + new RepeatedFields() + .setRepeatedMessage( + Arrays.asList( + new NestedMessage().setOptionalString("nested1"), + new NestedMessage().setOptionalString("nested2"))), + "{" + + " \"repeated_message\": [" + + " {" + + " \"optional_string\": \"nested1\"" + + " }," + + " {" + + " \"optional_string\": \"nested2\"" + + " }" + + " ]" + + "}")); + cases.add( + new MarshallTestCase( + "RepeatedDuration", + new RepeatedFields().setRepeatedDuration(Arrays.asList("60s", "120s", "180s")), + "{" + " \"repeated_duration\": [\"60s\", \"120s\", \"180s\"]" + "}")); + cases.add( + new MarshallTestCase( + "RepeatedFieldMask", + new RepeatedFields().setRepeatedFieldMask(Arrays.asList("field1", "field2,field3")), + "{" + " \"repeated_field_mask\": [\"field1\", \"field2,field3\"]" + "}")); + cases.add( + new MarshallTestCase( + "RepeatedTimestamp", + new RepeatedFields() + .setRepeatedTimestamp( + Arrays.asList("2023-01-01T00:00:00Z", "2023-01-02T00:00:00Z")), + "{" + + " \"repeated_timestamp\": [\"2023-01-01T00:00:00Z\", \"2023-01-02T00:00:00Z\"]" + + "}")); + cases.add( + new MarshallTestCase( + "MultipleRepeatedFields", + new RepeatedFields() + .setRepeatedBool(Arrays.asList(true, false)) + .setRepeatedInt32(Arrays.asList(10L, 20L, 30L)) + .setRepeatedString(Arrays.asList("a", "b", "c")), + "{" + + " \"repeated_string\": [\"a\", \"b\", \"c\"]," + + " \"repeated_int32\": [10, 20, 30]," + + " \"repeated_bool\": [true, false]" + + "}")); + cases.add( + new MarshallTestCase( + "EmptyRepeatedFields", + new RepeatedFields().setRepeatedString(new ArrayList<>()), + "{" + " \"repeated_string\": []" + " }")); + cases.add(new MarshallTestCase("OptionalFieldsNoInput", new OptionalFields(), "{}")); + cases.add( + new MarshallTestCase( + "OptionalFieldsZeroValues", + new OptionalFields() + .setOptionalBool(false) + .setOptionalInt32(0L) + .setOptionalInt64(0L) + .setOptionalString(""), + "{" + + " \"optional_int32\": 0," + + " \"optional_int64\": 0," + + " \"optional_bool\": false," + + " \"optional_string\": \"\"" + + " }")); + cases.add( + new MarshallTestCase( + "LegacyWellKnownTypes", + new OptionalFields() + .setLegacyDuration("1s") + .setLegacyFieldMask("legacy_duration,legacy_timestamp") + .setLegacyTimestamp("2023-01-01T00:00:00Z"), + "{" + + " \"legacy_duration\": \"1s\"," + + " \"legacy_timestamp\": \"2023-01-01T00:00:00Z\"," + + " \"legacy_field_mask\": \"legacy_duration,legacy_timestamp\"" + + "}")); + return cases; + } + + @ParameterizedTest(name = "{0}") + @MethodSource("marshallTestCases") + void testMarshall(MarshallTestCase testCase) throws Exception { + String result = objectMapper.writeValueAsString(testCase.input); + + // Parse both JSONs and compare as JsonNodes - more forgiving of formatting differences + JsonNode expectedJson = objectMapper.readTree(testCase.output); + JsonNode actualJson = objectMapper.readTree(result); + assertEquals(expectedJson, actualJson); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DeleteTestResourceOperation.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DeleteTestResourceOperation.java new file mode 100755 index 000000000..df2943fc8 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DeleteTestResourceOperation.java @@ -0,0 +1,165 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.lrotesting; + +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.utils.SerDeUtils; +import com.databricks.sdk.service.common.lro.LroOptions; +import com.databricks.sdk.support.Generated; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.time.Duration; +import java.util.Optional; +import java.util.concurrent.TimeoutException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Wrapper for interacting with a long-running deleteTestResource operation. Provides methods to + * wait for completion, check status, cancel, and access metadata. + */ +@Generated +public class DeleteTestResourceOperation { + private static final Logger LOG = LoggerFactory.getLogger(DeleteTestResourceOperation.class); + private static final Duration DEFAULT_TIMEOUT = Duration.ofMinutes(20); + + private final LroTestingService impl; + private Operation operation; + private final ObjectMapper objectMapper; + + public DeleteTestResourceOperation(LroTestingService impl, Operation operation) { + this.impl = impl; + this.operation = operation; + this.objectMapper = SerDeUtils.createMapper(); + } + + /** + * Wait for the operation to complete and return the resulting . Uses the default timeout of 20 + * minutes. + * + * @return the created + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public void waitForCompletion() throws TimeoutException { + waitForCompletion(Optional.empty()); + } + + /** + * Wait for the operation to complete and return the resulting . + * + * @param options the options for configuring the wait behavior, can be empty for defaults + * @return the created + * @throws TimeoutException if the operation doesn't complete within the timeout + * @throws DatabricksException if the operation fails + */ + public void waitForCompletion(Optional options) throws TimeoutException { + Duration timeout = options.flatMap(LroOptions::getTimeout).orElse(DEFAULT_TIMEOUT); + long deadline = System.currentTimeMillis() + timeout.toMillis(); + String statusMessage = "polling operation..."; + int attempt = 1; + + while (System.currentTimeMillis() < deadline) { + // Refresh the operation state + refreshOperation(); + + if (operation.getDone() != null && operation.getDone()) { + // Operation completed, check for success or failure + if (operation.getError() != null) { + String errorMsg = "unknown error"; + if (operation.getError().getMessage() != null + && !operation.getError().getMessage().isEmpty()) { + errorMsg = operation.getError().getMessage(); + } + + if (operation.getError().getErrorCode() != null) { + errorMsg = String.format("[%s] %s", operation.getError().getErrorCode(), errorMsg); + } + + throw new DatabricksException("Operation failed: " + errorMsg); + } + + // Operation completed successfully, unmarshal response + if (operation.getResponse() == null) { + throw new DatabricksException("Operation completed but no response available"); + } + + try { + JsonNode responseJson = objectMapper.valueToTree(operation.getResponse()); + objectMapper.treeToValue(responseJson, Void.class); + } catch (JsonProcessingException e) { + throw new DatabricksException("Failed to unmarshal response: " + e.getMessage(), e); + } + } + + // Operation still in progress, wait before polling again + String prefix = String.format("operation=%s", operation.getName()); + int sleep = Math.min(attempt, 10); // sleep 10s max per attempt + LOG.info("{}: operation in progress (sleeping ~{}s)", prefix, sleep); + + try { + Thread.sleep((long) (sleep * 1000L + Math.random() * 1000)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new DatabricksException("Current thread was interrupted", e); + } + attempt++; + } + + throw new TimeoutException( + String.format("Operation timed out after %s: %s", timeout, statusMessage)); + } + + /** + * Cancel the operation. + * + * @throws DatabricksException if the cancellation fails + */ + public void cancel() { + impl.cancelOperation(new CancelOperationRequest().setName(operation.getName())); + } + + /** + * Get the operation name. + * + * @return the operation name + */ + public String getName() { + return operation.getName(); + } + + /** + * Get the operation metadata. + * + * @return the operation metadata, or null if not available + * @throws DatabricksException if the metadata cannot be deserialized + */ + public TestResourceOperationMetadata getMetadata() { + if (operation.getMetadata() == null) { + return null; + } + + try { + JsonNode metadataJson = objectMapper.valueToTree(operation.getMetadata()); + return objectMapper.treeToValue(metadataJson, TestResourceOperationMetadata.class); + } catch (JsonProcessingException e) { + throw new DatabricksException("Failed to unmarshal operation metadata: " + e.getMessage(), e); + } + } + + /** + * Check if the operation is done. This method refreshes the operation state before checking. + * + * @return true if the operation is complete, false otherwise + * @throws DatabricksException if the status check fails + */ + public boolean isDone() { + refreshOperation(); + return operation.getDone() != null && operation.getDone(); + } + + /** Refresh the operation state by polling the server. */ + private void refreshOperation() { + operation = impl.getOperation(new GetOperationRequest().setName(operation.getName())); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DeleteTestResourceRequest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DeleteTestResourceRequest.java new file mode 100755 index 000000000..f5c2631e5 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/DeleteTestResourceRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.lrotesting; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteTestResourceRequest { + /** Resource ID to delete */ + @JsonIgnore private String resourceId; + + public DeleteTestResourceRequest setResourceId(String resourceId) { + this.resourceId = resourceId; + return this; + } + + public String getResourceId() { + return resourceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteTestResourceRequest that = (DeleteTestResourceRequest) o; + return Objects.equals(resourceId, that.resourceId); + } + + @Override + public int hashCode() { + return Objects.hash(resourceId); + } + + @Override + public String toString() { + return new ToStringer(DeleteTestResourceRequest.class).add("resourceId", resourceId).toString(); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingAPI.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingAPI.java index 82cdda20e..91b92e67d 100755 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingAPI.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingAPI.java @@ -33,6 +33,15 @@ public CreateTestResourceOperation createTestResource(CreateTestResourceRequest return new CreateTestResourceOperation(impl, operation); } + public DeleteTestResourceOperation deleteTestResource(String resourceId) { + return deleteTestResource(new DeleteTestResourceRequest().setResourceId(resourceId)); + } + + public DeleteTestResourceOperation deleteTestResource(DeleteTestResourceRequest request) { + Operation operation = impl.deleteTestResource(request); + return new DeleteTestResourceOperation(impl, operation); + } + public Operation getOperation(String name) { return getOperation(new GetOperationRequest().setName(name)); } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingImpl.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingImpl.java index b1bd471df..f4e787e49 100755 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingImpl.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingImpl.java @@ -43,6 +43,19 @@ public Operation createTestResource(CreateTestResourceRequest request) { } } + @Override + public Operation deleteTestResource(DeleteTestResourceRequest request) { + String path = String.format("/api/2.0/lro-testing/resources/%s", request.getResourceId()); + try { + Request req = new Request("DELETE", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, Operation.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public Operation getOperation(GetOperationRequest request) { String path = String.format("/api/2.0/lro-testing/operations/%s", request.getName()); diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingService.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingService.java index 769d17f0c..82e7e5209 100755 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingService.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/service/lrotesting/LroTestingService.java @@ -18,6 +18,8 @@ public interface LroTestingService { /** Simple method to create test resource for LRO testing */ Operation createTestResource(CreateTestResourceRequest createTestResourceRequest); + Operation deleteTestResource(DeleteTestResourceRequest deleteTestResourceRequest); + Operation getOperation(GetOperationRequest getOperationRequest); /** Simple method to get test resource */