diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index a7b80d538..2924d5d6d 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -cd641c9dd4febe334b339dd7878d099dcf0eeab5 \ No newline at end of file +31b3fea21dbe5a3a652937691602eb66d6dba30b \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index b5d27343a..49878353a 100755 --- a/.gitattributes +++ b/.gitattributes @@ -680,8 +680,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitS databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptEventDetails.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptExecutionDetails.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptExecutionDetailsStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptExecutionDetailsInitScriptExecutionStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoAndExecutionDetails.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstallLibraries.java linguist-generated=true @@ -810,6 +809,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCon databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java linguist-generated=true @@ -1641,6 +1642,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipel databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ErrorDetail.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLevel.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLogSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileLibrary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Filters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionLevelsRequest.java linguist-generated=true @@ -1802,15 +1804,18 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayUs databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfigBedrockProvider.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AnthropicConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ApiKeyAuth.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigInput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BearerTokenAuth.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessageRole.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CohereConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CreateServingEndpoint.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CustomProviderConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataPlaneInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DatabricksModelServingConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/DataframeSplitInput.java linguist-generated=true @@ -1834,6 +1839,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalFun databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelProvider.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelUsageElement.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FallbackConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FoundationModel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/GetOpenApiResponse.java linguist-generated=true @@ -2149,9 +2155,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteRespo databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeleteShareRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingDependencyList.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunctionDependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingTableDependency.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Function.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterInfos.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/FunctionParameterMode.java linguist-generated=true @@ -2309,6 +2315,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EnumValue.java databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExecuteStatementRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExecuteStatementRequestOnWaitTimeout.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalLink.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySource.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySourceJobInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Format.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/GetAlertsLegacyRequest.java linguist-generated=true diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 3637e7375..25299c316 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -11,3 +11,34 @@ ### Internal Changes ### API Changes +* Added `generateDownloadFullQueryResult()` method for `workspaceClient.genie()` service. +* Added `effectiveUserApiScopes`, `oauth2AppClientId`, `oauth2AppIntegrationId` and `userApiScopes` fields for `com.databricks.sdk.service.apps.App`. +* Added `abfss`, `dbfs`, `errorMessage`, `executionDurationSeconds`, `file`, `gcs`, `s3`, `status`, `volumes` and `workspace` fields for `com.databricks.sdk.service.compute.InitScriptInfoAndExecutionDetails`. +* [Breaking] Added `forecastGranularity` field for `com.databricks.sdk.service.ml.CreateForecastingExperimentRequest`. +* Added `jwksUri` field for `com.databricks.sdk.service.oauth2.OidcFederationPolicy`. +* Added `eventLog` field for `com.databricks.sdk.service.pipelines.CreatePipeline`. +* Added `eventLog` field for `com.databricks.sdk.service.pipelines.EditPipeline`. +* Added `eventLog` field for `com.databricks.sdk.service.pipelines.PipelineSpec`. +* Added `fallbackConfig` field for `com.databricks.sdk.service.serving.AiGatewayConfig`. +* Added `customProviderConfig` field for `com.databricks.sdk.service.serving.ExternalModel`. +* Added `fallbackConfig` field for `com.databricks.sdk.service.serving.PutAiGatewayRequest`. +* Added `fallbackConfig` field for `com.databricks.sdk.service.serving.PutAiGatewayResponse`. +* Added `aliases`, `comment`, `dataType`, `dependencyList`, `fullDataType`, `id`, `inputParams`, `name`, `properties`, `routineDefinition`, `schema`, `securableKind`, `share`, `shareId`, `storageLocation` and `tags` fields for `com.databricks.sdk.service.sharing.DeltaSharingFunction`. +* Added `querySource` field for `com.databricks.sdk.service.sql.QueryInfo`. +* Added `FOREIGN_CATALOG` enum value for `com.databricks.sdk.service.catalog.CatalogType`. +* Added `BROWSE` enum value for `com.databricks.sdk.service.catalog.Privilege`. +* Added `ACCESS_TOKEN_FAILURE`, `ALLOCATION_TIMEOUT`, `ALLOCATION_TIMEOUT_NODE_DAEMON_NOT_READY`, `ALLOCATION_TIMEOUT_NO_HEALTHY_CLUSTERS`, `ALLOCATION_TIMEOUT_NO_MATCHED_CLUSTERS`, `ALLOCATION_TIMEOUT_NO_READY_CLUSTERS`, `ALLOCATION_TIMEOUT_NO_UNALLOCATED_CLUSTERS`, `ALLOCATION_TIMEOUT_NO_WARMED_UP_CLUSTERS`, `AWS_INACCESSIBLE_KMS_KEY_FAILURE`, `AWS_INSTANCE_PROFILE_UPDATE_FAILURE`, `AWS_INVALID_KEY_PAIR`, `AWS_INVALID_KMS_KEY_STATE`, `AWS_RESOURCE_QUOTA_EXCEEDED`, `AZURE_PACKED_DEPLOYMENT_PARTIAL_FAILURE`, `BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG`, `BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED`, `BUDGET_POLICY_RESOLUTION_FAILURE`, `CLOUD_ACCOUNT_SETUP_FAILURE`, `CLOUD_OPERATION_CANCELLED`, `CLOUD_PROVIDER_INSTANCE_NOT_LAUNCHED`, `CLOUD_PROVIDER_LAUNCH_FAILURE_DUE_TO_MISCONFIG`, `CLOUD_PROVIDER_RESOURCE_STOCKOUT_DUE_TO_MISCONFIG`, `CLUSTER_OPERATION_THROTTLED`, `CLUSTER_OPERATION_TIMEOUT`, `CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG`, `DATA_ACCESS_CONFIG_CHANGED`, `DISASTER_RECOVERY_REPLICATION`, `DRIVER_EVICTION`, `DRIVER_LAUNCH_TIMEOUT`, `DRIVER_NODE_UNREACHABLE`, `DRIVER_OUT_OF_DISK`, `DRIVER_OUT_OF_MEMORY`, `DRIVER_POD_CREATION_FAILURE`, `DRIVER_UNEXPECTED_FAILURE`, `DYNAMIC_SPARK_CONF_SIZE_EXCEEDED`, `EOS_SPARK_IMAGE`, `EXECUTOR_POD_UNSCHEDULED`, `GCP_API_RATE_QUOTA_EXCEEDED`, `GCP_FORBIDDEN`, `GCP_IAM_TIMEOUT`, `GCP_INACCESSIBLE_KMS_KEY_FAILURE`, `GCP_INSUFFICIENT_CAPACITY`, `GCP_IP_SPACE_EXHAUSTED`, `GCP_KMS_KEY_PERMISSION_DENIED`, `GCP_NOT_FOUND`, `GCP_RESOURCE_QUOTA_EXCEEDED`, `GCP_SERVICE_ACCOUNT_ACCESS_DENIED`, `GCP_SERVICE_ACCOUNT_NOT_FOUND`, `GCP_SUBNET_NOT_READY`, `GCP_TRUSTED_IMAGE_PROJECTS_VIOLATED`, `GKE_BASED_CLUSTER_TERMINATION`, `INIT_CONTAINER_NOT_FINISHED`, `INSTANCE_POOL_MAX_CAPACITY_REACHED`, `INSTANCE_POOL_NOT_FOUND`, `INSTANCE_UNREACHABLE_DUE_TO_MISCONFIG`, `INTERNAL_CAPACITY_FAILURE`, `INVALID_AWS_PARAMETER`, `INVALID_INSTANCE_PLACEMENT_PROTOCOL`, `INVALID_WORKER_IMAGE_FAILURE`, `IN_PENALTY_BOX`, `LAZY_ALLOCATION_TIMEOUT`, `MAINTENANCE_MODE`, `NETVISOR_SETUP_TIMEOUT`, `NO_MATCHED_K8S`, `NO_MATCHED_K8S_TESTING_TAG`, `POD_ASSIGNMENT_FAILURE`, `POD_SCHEDULING_FAILURE`, `RESOURCE_USAGE_BLOCKED`, `SECRET_CREATION_FAILURE`, `SERVERLESS_LONG_RUNNING_TERMINATED`, `SPARK_IMAGE_DOWNLOAD_THROTTLED`, `SPARK_IMAGE_NOT_FOUND`, `SSH_BOOTSTRAP_FAILURE`, `STORAGE_DOWNLOAD_FAILURE_DUE_TO_MISCONFIG`, `STORAGE_DOWNLOAD_FAILURE_SLOW`, `STORAGE_DOWNLOAD_FAILURE_THROTTLED`, `UNEXPECTED_POD_RECREATION`, `USER_INITIATED_VM_TERMINATION` and `WORKSPACE_UPDATE` enum values for `com.databricks.sdk.service.compute.TerminationReasonCode`. +* Added `GENERATED_SQL_QUERY_TOO_LONG_EXCEPTION` and `MISSING_SQL_QUERY_EXCEPTION` enum values for `com.databricks.sdk.service.dashboards.MessageErrorType`. +* Added `BALANCED` enum value for `com.databricks.sdk.service.jobs.PerformanceTarget`. +* Added `LISTING_RESOURCE` enum value for `com.databricks.sdk.service.marketplace.FileParentType`. +* Added `APP` enum value for `com.databricks.sdk.service.marketplace.MarketplaceFileType`. +* Added `CUSTOM` enum value for `com.databricks.sdk.service.serving.ExternalModelProvider`. +* Added `ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN` and `ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY` enum values for `com.databricks.sdk.service.settings.TokenType`. +* [Breaking] Changed `createExperiment()` method for `workspaceClient.forecasting()` service with new required argument order. +* Changed `instanceTypeId` field for `com.databricks.sdk.service.compute.NodeInstanceType` to be required. +* Changed `category` field for `com.databricks.sdk.service.compute.NodeType` to be required. +* [Breaking] Changed `functions` field for `com.databricks.sdk.service.sharing.ListProviderShareAssetsResponse` to type `com.databricks.sdk.service.sharing.DeltaSharingFunctionList` class. +* [Breaking] Removed `executionDetails` and `script` fields for `com.databricks.sdk.service.compute.InitScriptInfoAndExecutionDetails`. +* [Breaking] Removed `supportsElasticDisk` field for `com.databricks.sdk.service.compute.NodeType`. +* [Breaking] Removed `dataGranularityQuantity` and `dataGranularityUnit` fields for `com.databricks.sdk.service.ml.CreateForecastingExperimentRequest`. +* [Breaking] Removed `aliases`, `comment`, `dataType`, `dependencyList`, `fullDataType`, `id`, `inputParams`, `name`, `properties`, `routineDefinition`, `schema`, `securableKind`, `share`, `shareId`, `storageLocation` and `tags` fields for `com.databricks.sdk.service.sharing.Function`. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java index 0b0d38da1..97b6f3b19 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java @@ -52,6 +52,10 @@ public class App { @JsonProperty("effective_budget_policy_id") private String effectiveBudgetPolicyId; + /** The effective api scopes granted to the user access token. */ + @JsonProperty("effective_user_api_scopes") + private Collection effectiveUserApiScopes; + /** The unique identifier of the app. */ @JsonProperty("id") private String id; @@ -63,6 +67,14 @@ public class App { @JsonProperty("name") private String name; + /** */ + @JsonProperty("oauth2_app_client_id") + private String oauth2AppClientId; + + /** */ + @JsonProperty("oauth2_app_integration_id") + private String oauth2AppIntegrationId; + /** * The pending deployment of the app. A deployment is considered pending when it is being prepared * for deployment to the app compute. @@ -98,6 +110,10 @@ public class App { @JsonProperty("url") private String url; + /** */ + @JsonProperty("user_api_scopes") + private Collection userApiScopes; + public App setActiveDeployment(AppDeployment activeDeployment) { this.activeDeployment = activeDeployment; return this; @@ -179,6 +195,15 @@ public String getEffectiveBudgetPolicyId() { return effectiveBudgetPolicyId; } + public App setEffectiveUserApiScopes(Collection effectiveUserApiScopes) { + this.effectiveUserApiScopes = effectiveUserApiScopes; + return this; + } + + public Collection getEffectiveUserApiScopes() { + return effectiveUserApiScopes; + } + public App setId(String id) { this.id = id; return this; @@ -197,6 +222,24 @@ public String getName() { return name; } + public App setOauth2AppClientId(String oauth2AppClientId) { + this.oauth2AppClientId = oauth2AppClientId; + return this; + } + + public String getOauth2AppClientId() { + return oauth2AppClientId; + } + + public App setOauth2AppIntegrationId(String oauth2AppIntegrationId) { + this.oauth2AppIntegrationId = oauth2AppIntegrationId; + return this; + } + + public String getOauth2AppIntegrationId() { + return oauth2AppIntegrationId; + } + public App setPendingDeployment(AppDeployment pendingDeployment) { this.pendingDeployment = pendingDeployment; return this; @@ -269,6 +312,15 @@ public String getUrl() { return url; } + public App setUserApiScopes(Collection userApiScopes) { + this.userApiScopes = userApiScopes; + return this; + } + + public Collection getUserApiScopes() { + return userApiScopes; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -283,8 +335,11 @@ public boolean equals(Object o) { && Objects.equals(defaultSourceCodePath, that.defaultSourceCodePath) && Objects.equals(description, that.description) && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) + && Objects.equals(effectiveUserApiScopes, that.effectiveUserApiScopes) && Objects.equals(id, that.id) && Objects.equals(name, that.name) + && Objects.equals(oauth2AppClientId, that.oauth2AppClientId) + && Objects.equals(oauth2AppIntegrationId, that.oauth2AppIntegrationId) && Objects.equals(pendingDeployment, that.pendingDeployment) && Objects.equals(resources, that.resources) && Objects.equals(servicePrincipalClientId, that.servicePrincipalClientId) @@ -292,7 +347,8 @@ public boolean equals(Object o) { && Objects.equals(servicePrincipalName, that.servicePrincipalName) && Objects.equals(updateTime, that.updateTime) && Objects.equals(updater, that.updater) - && Objects.equals(url, that.url); + && Objects.equals(url, that.url) + && Objects.equals(userApiScopes, that.userApiScopes); } @Override @@ -307,8 +363,11 @@ public int hashCode() { defaultSourceCodePath, description, effectiveBudgetPolicyId, + effectiveUserApiScopes, id, name, + oauth2AppClientId, + oauth2AppIntegrationId, pendingDeployment, resources, servicePrincipalClientId, @@ -316,7 +375,8 @@ public int hashCode() { servicePrincipalName, updateTime, updater, - url); + url, + userApiScopes); } @Override @@ -331,8 +391,11 @@ public String toString() { .add("defaultSourceCodePath", defaultSourceCodePath) .add("description", description) .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) + .add("effectiveUserApiScopes", effectiveUserApiScopes) .add("id", id) .add("name", name) + .add("oauth2AppClientId", oauth2AppClientId) + .add("oauth2AppIntegrationId", oauth2AppIntegrationId) .add("pendingDeployment", pendingDeployment) .add("resources", resources) .add("servicePrincipalClientId", servicePrincipalClientId) @@ -341,6 +404,7 @@ public String toString() { .add("updateTime", updateTime) .add("updater", updater) .add("url", url) + .add("userApiScopes", userApiScopes) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicy.java index 86e2737b1..aa6143b63 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetPolicy.java @@ -21,7 +21,8 @@ public class BudgetPolicy { /** * The name of the policy. - Must be unique among active policies. - Can contain only characters - * from the ISO 8859-1 (latin1) set. + * from the ISO 8859-1 (latin1) set. - Can't start with reserved keywords such as + * `databricks:default-policy`. */ @JsonProperty("policy_name") private String policyName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogType.java index 4893be2d2..47f4d0ce0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogType.java @@ -8,6 +8,7 @@ @Generated public enum CatalogType { DELTASHARING_CATALOG, + FOREIGN_CATALOG, MANAGED_CATALOG, SYSTEM_CATALOG, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java index 3a1b3c5e3..204671a6b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java @@ -9,6 +9,7 @@ public enum Privilege { ACCESS, ALL_PRIVILEGES, APPLY_TAG, + BROWSE, CREATE, CREATE_CATALOG, CREATE_CONNECTION, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java index ff6867b05..7dea44dd8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateCredentialRequest.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Next ID: 17 */ @Generated public class ValidateCredentialRequest { /** The AWS IAM role configuration */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Adlsgen2Info.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Adlsgen2Info.java index 2a313db0f..9aa66fa06 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Adlsgen2Info.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Adlsgen2Info.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** A storage location in Adls Gen2 */ @Generated public class Adlsgen2Info { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java index 505b70166..825127bce 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Attributes set during cluster creation which are related to Amazon Web Services. */ @Generated public class AwsAttributes { /** @@ -83,8 +84,6 @@ public class AwsAttributes { * administrator. * *

This feature may only be available to certain customer plans. - * - *

If this field is ommitted, we will pull in the default from the conf if it exists. */ @JsonProperty("instance_profile_arn") private String instanceProfileArn; @@ -98,9 +97,6 @@ public class AwsAttributes { * instances are requested for this cluster, only spot instances whose bid price percentage * matches this field will be considered. Note that, for safety, we enforce this field to be no * more than 10000. - * - *

The default value and documentation here should be kept consistent with - * CommonConf.defaultSpotBidPricePercent and CommonConf.maxSpotBidPricePercent. */ @JsonProperty("spot_bid_price_percent") private Long spotBidPricePercent; @@ -112,8 +108,10 @@ public class AwsAttributes { * deployment resides in the "us-east-1" region. This is an optional field at cluster creation, * and if not specified, a default zone will be used. If the zone specified is "auto", will try to * place cluster in a zone with high availability, and will retry placement in a different AZ if - * there is not enough capacity. The list of available zones as well as the default value can be - * found by using the `List Zones` method. + * there is not enough capacity. + * + *

The list of available zones as well as the default value can be found by using the `List + * Zones` method. */ @JsonProperty("zone_id") private String zoneId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAttributes.java index b93a872e3..ec8f49702 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAttributes.java @@ -7,12 +7,12 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Attributes set during cluster creation which are related to Microsoft Azure. */ @Generated public class AzureAttributes { /** * Availability type used for all subsequent nodes past the `first_on_demand` ones. Note: If - * `first_on_demand` is zero (which only happens on pool clusters), this availability type will be - * used for the entire cluster. + * `first_on_demand` is zero, this availability type will be used for the entire cluster. */ @JsonProperty("availability") private AzureAvailability availability; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAvailability.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAvailability.java index 886a11a80..1f68f9678 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAvailability.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AzureAvailability.java @@ -6,8 +6,7 @@ /** * Availability type used for all subsequent nodes past the `first_on_demand` ones. Note: If - * `first_on_demand` is zero (which only happens on pool clusters), this availability type will be - * used for the entire cluster. + * `first_on_demand` is zero, this availability type will be used for the entire cluster. */ @Generated public enum AzureAvailability { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwner.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwner.java index 578be107c..2bd85e237 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwner.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwner.java @@ -9,7 +9,7 @@ @Generated public class ChangeClusterOwner { - /** */ + /** */ @JsonProperty("cluster_id") private String clusterId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloudProviderNodeInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloudProviderNodeInfo.java index 0519bca38..ca72a882b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloudProviderNodeInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloudProviderNodeInfo.java @@ -10,7 +10,7 @@ @Generated public class CloudProviderNodeInfo { - /** */ + /** Status as reported by the cloud provider */ @JsonProperty("status") private Collection status; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java index 5632c4bdb..38f4c5061 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java @@ -9,6 +9,10 @@ import java.util.Map; import java.util.Objects; +/** + * Common set of attributes set during cluster creation. These attributes cannot be changed over the + * lifetime of a cluster. + */ @Generated public class ClusterAttributes { /** @@ -93,7 +97,7 @@ public class ClusterAttributes { @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; - /** */ + /** Custom docker image BYOC */ @JsonProperty("docker_image") private DockerImage dockerImage; @@ -107,6 +111,10 @@ public class ClusterAttributes { /** * The node type of the Spark driver. Note that this field is optional; if unset, the driver node * type will be set as the same value as `node_type_id` defined above. + * + *

This field, along with node_type_id, should not be set if virtual_cluster_size is set. If + * both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, + * driver_node_type_id and node_type_id take precedence. */ @JsonProperty("driver_node_type_id") private String driverNodeTypeId; @@ -246,7 +254,7 @@ public class ClusterAttributes { @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; - /** */ + /** Cluster Attributes showing for clusters workload types. */ @JsonProperty("workload_type") private WorkloadType workloadType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java index 0311bee84..3623d38c3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java @@ -9,6 +9,7 @@ import java.util.Map; import java.util.Objects; +/** Describes all of the metadata about a single Spark cluster in Databricks. */ @Generated public class ClusterDetails { /** @@ -83,7 +84,7 @@ public class ClusterDetails { /** * Determines whether the cluster was created by a user through the UI, created by the Databricks - * Jobs Scheduler, or through an API request. This is the same as cluster_creator, but read only. + * Jobs Scheduler, or through an API request. */ @JsonProperty("cluster_source") private ClusterSource clusterSource; @@ -153,7 +154,7 @@ public class ClusterDetails { @JsonProperty("default_tags") private Map defaultTags; - /** */ + /** Custom docker image BYOC */ @JsonProperty("docker_image") private DockerImage dockerImage; @@ -174,6 +175,10 @@ public class ClusterDetails { /** * The node type of the Spark driver. Note that this field is optional; if unset, the driver node * type will be set as the same value as `node_type_id` defined above. + * + *

This field, along with node_type_id, should not be set if virtual_cluster_size is set. If + * both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, + * driver_node_type_id and node_type_id take precedence. */ @JsonProperty("driver_node_type_id") private String driverNodeTypeId; @@ -337,10 +342,8 @@ public class ClusterDetails { private String sparkVersion; /** - * `spec` contains a snapshot of the field values that were used to create or edit this cluster. - * The contents of `spec` can be used in the body of a create cluster request. This field might - * not be populated for older clusters. Note: not included in the response of the ListClusters - * API. + * The spec contains a snapshot of the latest user specified settings that were used to + * create/edit the cluster. Note: not included in the response of the ListClusters API. */ @JsonProperty("spec") private ClusterSpec spec; @@ -391,7 +394,7 @@ public class ClusterDetails { @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; - /** */ + /** Cluster Attributes showing for clusters workload types. */ @JsonProperty("workload_type") private WorkloadType workloadType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterEvent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterEvent.java index 48b4abb91..c6bf7cd5c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterEvent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterEvent.java @@ -9,15 +9,15 @@ @Generated public class ClusterEvent { - /** */ + /** */ @JsonProperty("cluster_id") private String clusterId; - /** */ + /** */ @JsonProperty("data_plane_event_details") private DataPlaneEventDetails dataPlaneEventDetails; - /** */ + /** */ @JsonProperty("details") private EventDetails details; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLogConf.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLogConf.java index d73ce9854..e22885f56 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLogConf.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterLogConf.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Cluster log delivery config */ @Generated public class ClusterLogConf { /** @@ -26,7 +27,7 @@ public class ClusterLogConf { private S3StorageInfo s3; /** - * destination needs to be provided. e.g. `{ "volumes" : { "destination" : + * destination needs to be provided, e.g. `{ "volumes": { "destination": * "/Volumes/catalog/schema/volume/cluster_log" } }` */ @JsonProperty("volumes") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java index c1b8ed876..31235a846 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java @@ -9,6 +9,10 @@ import java.util.Map; import java.util.Objects; +/** + * Contains a snapshot of the latest user specified settings that were used to create/edit the + * cluster. + */ @Generated public class ClusterSpec { /** @@ -107,7 +111,7 @@ public class ClusterSpec { @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; - /** */ + /** Custom docker image BYOC */ @JsonProperty("docker_image") private DockerImage dockerImage; @@ -121,6 +125,10 @@ public class ClusterSpec { /** * The node type of the Spark driver. Note that this field is optional; if unset, the driver node * type will be set as the same value as `node_type_id` defined above. + * + *

This field, along with node_type_id, should not be set if virtual_cluster_size is set. If + * both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, + * driver_node_type_id and node_type_id take precedence. */ @JsonProperty("driver_node_type_id") private String driverNodeTypeId; @@ -273,7 +281,7 @@ public class ClusterSpec { @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; - /** */ + /** Cluster Attributes showing for clusters workload types. */ @JsonProperty("workload_type") private WorkloadType workloadType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java index 285d93495..b27e7aad7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java @@ -165,8 +165,11 @@ public Wait create(String sparkVersion) { * Create new cluster. * *

Creates a new Spark cluster. This method will acquire new instances from the cloud provider - * if necessary. Note: Databricks may not be able to acquire some of the requested nodes, due to - * cloud provider limitations (account limits, spot price, etc.) or transient network issues. + * if necessary. This method is asynchronous; the returned ``cluster_id`` can be used to poll the + * cluster status. When this method returns, the cluster will be in a ``PENDING`` state. The + * cluster will be usable once it enters a ``RUNNING`` state. Note: Databricks may not be able to + * acquire some of the requested nodes, due to cloud provider limitations (account limits, spot + * price, etc.) or transient network issues. * *

If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will * succeed. Otherwise the cluster will terminate with an informative error message. @@ -234,8 +237,8 @@ public Iterable events(String clusterId) { * List cluster activity events. * *

Retrieves a list of events about the activity of a cluster. This API is paginated. If there - * are more events to read, the response includes all the nparameters necessary to request the - * next page of events. + * are more events to read, the response includes all the parameters necessary to request the next + * page of events. */ public Iterable events(GetEvents request) { return new Paginator<>( @@ -420,12 +423,11 @@ public Wait start(String clusterId) { * Start terminated cluster. * *

Starts a terminated Spark cluster with the supplied ID. This works similar to - * `createCluster` except: - * - *

* The previous cluster id and attributes are preserved. * The cluster starts with the last - * specified cluster size. * If the previous cluster was an autoscaling cluster, the current - * cluster starts with the minimum number of nodes. * If the cluster is not currently in a - * `TERMINATED` state, nothing will happen. * Clusters launched to run a job cannot be started. + * `createCluster` except: - The previous cluster id and attributes are preserved. - The cluster + * starts with the last specified cluster size. - If the previous cluster was an autoscaling + * cluster, the current cluster starts with the minimum number of nodes. - If the cluster is not + * currently in a ``TERMINATED`` state, nothing will happen. - Clusters launched to run a job + * cannot be started. */ public Wait start(StartCluster request) { impl.start(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java index b85b439b9..48bdd74a0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java @@ -45,8 +45,11 @@ public interface ClustersService { * Create new cluster. * *

Creates a new Spark cluster. This method will acquire new instances from the cloud provider - * if necessary. Note: Databricks may not be able to acquire some of the requested nodes, due to - * cloud provider limitations (account limits, spot price, etc.) or transient network issues. + * if necessary. This method is asynchronous; the returned ``cluster_id`` can be used to poll the + * cluster status. When this method returns, the cluster will be in a ``PENDING`` state. The + * cluster will be usable once it enters a ``RUNNING`` state. Note: Databricks may not be able to + * acquire some of the requested nodes, due to cloud provider limitations (account limits, spot + * price, etc.) or transient network issues. * *

If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will * succeed. Otherwise the cluster will terminate with an informative error message. @@ -89,8 +92,8 @@ public interface ClustersService { * List cluster activity events. * *

Retrieves a list of events about the activity of a cluster. This API is paginated. If there - * are more events to read, the response includes all the nparameters necessary to request the - * next page of events. + * are more events to read, the response includes all the parameters necessary to request the next + * page of events. */ GetEventsResponse events(GetEvents getEvents); @@ -198,12 +201,11 @@ GetClusterPermissionLevelsResponse getPermissionLevels( * Start terminated cluster. * *

Starts a terminated Spark cluster with the supplied ID. This works similar to - * `createCluster` except: - * - *

* The previous cluster id and attributes are preserved. * The cluster starts with the last - * specified cluster size. * If the previous cluster was an autoscaling cluster, the current - * cluster starts with the minimum number of nodes. * If the cluster is not currently in a - * `TERMINATED` state, nothing will happen. * Clusters launched to run a job cannot be started. + * `createCluster` except: - The previous cluster id and attributes are preserved. - The cluster + * starts with the last specified cluster size. - If the previous cluster was an autoscaling + * cluster, the current cluster starts with the minimum number of nodes. - If the cluster is not + * currently in a ``TERMINATED`` state, nothing will happen. - Clusters launched to run a job + * cannot be started. */ void start(StartCluster startCluster); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java index 460119af2..a8c6b9270 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java @@ -114,7 +114,7 @@ public class CreateCluster { @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; - /** */ + /** Custom docker image BYOC */ @JsonProperty("docker_image") private DockerImage dockerImage; @@ -128,6 +128,10 @@ public class CreateCluster { /** * The node type of the Spark driver. Note that this field is optional; if unset, the driver node * type will be set as the same value as `node_type_id` defined above. + * + *

This field, along with node_type_id, should not be set if virtual_cluster_size is set. If + * both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, + * driver_node_type_id and node_type_id take precedence. */ @JsonProperty("driver_node_type_id") private String driverNodeTypeId; @@ -280,7 +284,7 @@ public class CreateCluster { @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; - /** */ + /** Cluster Attributes showing for clusters workload types. */ @JsonProperty("workload_type") private WorkloadType workloadType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetails.java index 56d087348..4ff098588 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetails.java @@ -9,19 +9,19 @@ @Generated public class DataPlaneEventDetails { - /** */ + /** */ @JsonProperty("event_type") private DataPlaneEventDetailsEventType eventType; - /** */ + /** */ @JsonProperty("executor_failures") private Long executorFailures; - /** */ + /** */ @JsonProperty("host_id") private String hostId; - /** */ + /** */ @JsonProperty("timestamp") private Long timestamp; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetailsEventType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetailsEventType.java index 3993eadda..da8d16b40 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetailsEventType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetailsEventType.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** */ @Generated public enum DataPlaneEventDetailsEventType { NODE_BLACKLISTED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DbfsStorageInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DbfsStorageInfo.java index eb8d67479..a41d4b921 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DbfsStorageInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DbfsStorageInfo.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** A storage location in DBFS */ @Generated public class DbfsStorageInfo { /** dbfs destination, e.g. `dbfs:/my/path` */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EbsVolumeType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EbsVolumeType.java index f4669b927..614f37d84 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EbsVolumeType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EbsVolumeType.java @@ -4,7 +4,10 @@ import com.databricks.sdk.support.Generated; -/** The type of EBS volumes that will be launched with this cluster. */ +/** + * All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for + * details. + */ @Generated public enum EbsVolumeType { GENERAL_PURPOSE_SSD, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java index 0183721b9..c10ea0a7a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java @@ -111,7 +111,7 @@ public class EditCluster { @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; - /** */ + /** Custom docker image BYOC */ @JsonProperty("docker_image") private DockerImage dockerImage; @@ -125,6 +125,10 @@ public class EditCluster { /** * The node type of the Spark driver. Note that this field is optional; if unset, the driver node * type will be set as the same value as `node_type_id` defined above. + * + *

This field, along with node_type_id, should not be set if virtual_cluster_size is set. If + * both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, + * driver_node_type_id and node_type_id take precedence. */ @JsonProperty("driver_node_type_id") private String driverNodeTypeId; @@ -277,7 +281,7 @@ public class EditCluster { @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; - /** */ + /** Cluster Attributes showing for clusters workload types. */ @JsonProperty("workload_type") private WorkloadType workloadType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetails.java index 3e7029da1..a365e67d5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetails.java @@ -32,7 +32,7 @@ public class EventDetails { @JsonProperty("current_num_workers") private Long currentNumWorkers; - /** */ + /** */ @JsonProperty("did_not_expand_reason") private String didNotExpandReason; @@ -50,7 +50,7 @@ public class EventDetails { @JsonProperty("enable_termination_for_node_blocklisted") private Boolean enableTerminationForNodeBlocklisted; - /** */ + /** */ @JsonProperty("free_space") private Long freeSpace; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributes.java index 3539111fe..61d5be7d7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcpAttributes.java @@ -7,16 +7,18 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Attributes set during cluster creation which are related to GCP. */ @Generated public class GcpAttributes { /** - * This field determines whether the instance pool will contain preemptible VMs, on-demand VMs, or - * preemptible VMs with a fallback to on-demand VMs if the former is unavailable. + * This field determines whether the spark executors will be scheduled to run on preemptible VMs, + * on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is + * unavailable. */ @JsonProperty("availability") private GcpAvailability availability; - /** boot disk size in GB */ + /** Boot disk size in GB */ @JsonProperty("boot_disk_size") private Long bootDiskSize; @@ -42,7 +44,7 @@ public class GcpAttributes { /** * This field determines whether the spark executors will be scheduled to run on preemptible VMs * (when set to true) versus standard compute engine VMs (when set to false; default). Note: Soon - * to be deprecated, use the availability field instead. + * to be deprecated, use the 'availability' field instead. */ @JsonProperty("use_preemptible_executors") private Boolean usePreemptibleExecutors; @@ -50,7 +52,7 @@ public class GcpAttributes { /** * Identifier for the availability zone in which the cluster resides. This can be one of the * following: - "HA" => High availability, spread nodes across availability zones for a Databricks - * deployment region [default] - "AUTO" => Databricks picks an availability zone to schedule the + * deployment region [default]. - "AUTO" => Databricks picks an availability zone to schedule the * cluster on. - A GCP availability zone => Pick One of the available zones for (machine type + * region) from https://cloud.google.com/compute/docs/regions-zones. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcsStorageInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcsStorageInfo.java index dfea55a37..819a421ef 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcsStorageInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GcsStorageInfo.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** A storage location in Google Cloud Platform's GCS */ @Generated public class GcsStorageInfo { /** GCS destination/URI, e.g. `gs://my-bucket/some-prefix` */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsOrder.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsOrder.java index eccb9e121..27a07dc80 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsOrder.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsOrder.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** The order to list events in; either "ASC" or "DESC". Defaults to "DESC". */ @Generated public enum GetEventsOrder { ASC, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsResponse.java index e1dff0630..7499839bb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetEventsResponse.java @@ -10,7 +10,7 @@ @Generated public class GetEventsResponse { - /** */ + /** */ @JsonProperty("events") private Collection events; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptEventDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptEventDetails.java index e8c8ffd0b..f8b84ae54 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptEventDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptEventDetails.java @@ -10,15 +10,21 @@ @Generated public class InitScriptEventDetails { - /** The cluster scoped init scripts associated with this cluster event */ + /** The cluster scoped init scripts associated with this cluster event. */ @JsonProperty("cluster") private Collection cluster; - /** The global init scripts associated with this cluster event */ + /** The global init scripts associated with this cluster event. */ @JsonProperty("global") private Collection global; - /** The private ip address of the node where the init scripts were run. */ + /** + * The private ip of the node we are reporting init script execution details for (we will select + * the execution details from only one node rather than reporting the execution details from every + * node to keep these event details small) + * + *

This should only be defined for the INIT_SCRIPTS_FINISHED event + */ @JsonProperty("reported_for_node") private String reportedForNode; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptExecutionDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptExecutionDetails.java deleted file mode 100755 index 3e225c78a..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptExecutionDetails.java +++ /dev/null @@ -1,74 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class InitScriptExecutionDetails { - /** Addition details regarding errors. */ - @JsonProperty("error_message") - private String errorMessage; - - /** The duration of the script execution in seconds. */ - @JsonProperty("execution_duration_seconds") - private Long executionDurationSeconds; - - /** The current status of the script */ - @JsonProperty("status") - private InitScriptExecutionDetailsStatus status; - - public InitScriptExecutionDetails setErrorMessage(String errorMessage) { - this.errorMessage = errorMessage; - return this; - } - - public String getErrorMessage() { - return errorMessage; - } - - public InitScriptExecutionDetails setExecutionDurationSeconds(Long executionDurationSeconds) { - this.executionDurationSeconds = executionDurationSeconds; - return this; - } - - public Long getExecutionDurationSeconds() { - return executionDurationSeconds; - } - - public InitScriptExecutionDetails setStatus(InitScriptExecutionDetailsStatus status) { - this.status = status; - return this; - } - - public InitScriptExecutionDetailsStatus getStatus() { - return status; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - InitScriptExecutionDetails that = (InitScriptExecutionDetails) o; - return Objects.equals(errorMessage, that.errorMessage) - && Objects.equals(executionDurationSeconds, that.executionDurationSeconds) - && Objects.equals(status, that.status); - } - - @Override - public int hashCode() { - return Objects.hash(errorMessage, executionDurationSeconds, status); - } - - @Override - public String toString() { - return new ToStringer(InitScriptExecutionDetails.class) - .add("errorMessage", errorMessage) - .add("executionDurationSeconds", executionDurationSeconds) - .add("status", status) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptExecutionDetailsStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptExecutionDetailsInitScriptExecutionStatus.java similarity index 67% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptExecutionDetailsStatus.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptExecutionDetailsInitScriptExecutionStatus.java index 50a1227c6..c04f1d30d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptExecutionDetailsStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptExecutionDetailsInitScriptExecutionStatus.java @@ -4,11 +4,12 @@ import com.databricks.sdk.support.Generated; -/** The current status of the script */ +/** Result of attempted script execution */ @Generated -public enum InitScriptExecutionDetailsStatus { +public enum InitScriptExecutionDetailsInitScriptExecutionStatus { FAILED_EXECUTION, FAILED_FETCH, + FUSE_MOUNT_FAILED, NOT_EXECUTED, SKIPPED, SUCCEEDED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfo.java index 07b60eded..9a696988e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfo.java @@ -7,54 +7,55 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Config for an individual init script Next ID: 11 */ @Generated public class InitScriptInfo { /** - * destination needs to be provided. e.g. `{ "abfss" : { "destination" : - * "abfss://@.dfs.core.windows.net/" } } + * destination needs to be provided, e.g. + * `abfss://@.dfs.core.windows.net/` */ @JsonProperty("abfss") private Adlsgen2Info abfss; /** - * destination needs to be provided. e.g. `{ "dbfs" : { "destination" : "dbfs:/home/cluster_log" } + * destination needs to be provided. e.g. `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } * }` */ @JsonProperty("dbfs") private DbfsStorageInfo dbfs; /** - * destination needs to be provided. e.g. `{ "file" : { "destination" : "file:/my/local/file.sh" } + * destination needs to be provided, e.g. `{ "file": { "destination": "file:/my/local/file.sh" } * }` */ @JsonProperty("file") private LocalFileInfo file; /** - * destination needs to be provided. e.g. `{ "gcs": { "destination": "gs://my-bucket/file.sh" } }` + * destination needs to be provided, e.g. `{ "gcs": { "destination": "gs://my-bucket/file.sh" } }` */ @JsonProperty("gcs") private GcsStorageInfo gcs; /** - * destination and either the region or endpoint need to be provided. e.g. `{ "s3": { - * "destination" : "s3://cluster_log_bucket/prefix", "region" : "us-west-2" } }` Cluster iam role - * is used to access s3, please make sure the cluster iam role in `instance_profile_arn` has + * destination and either the region or endpoint need to be provided. e.g. `{ \"s3\": { + * \"destination\": \"s3://cluster_log_bucket/prefix\", \"region\": \"us-west-2\" } }` Cluster iam + * role is used to access s3, please make sure the cluster iam role in `instance_profile_arn` has * permission to write data to the s3 destination. */ @JsonProperty("s3") private S3StorageInfo s3; /** - * destination needs to be provided. e.g. `{ "volumes" : { "destination" : "/Volumes/my-init.sh" } - * }` + * destination needs to be provided. e.g. `{ \"volumes\" : { \"destination\" : + * \"/Volumes/my-init.sh\" } }` */ @JsonProperty("volumes") private VolumesStorageInfo volumes; /** - * destination needs to be provided. e.g. `{ "workspace" : { "destination" : - * "/Users/user1@databricks.com/my-init.sh" } }` + * destination needs to be provided, e.g. `{ "workspace": { "destination": + * "/cluster-init-scripts/setup-datadog.sh" } }` */ @JsonProperty("workspace") private WorkspaceStorageInfo workspace; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoAndExecutionDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoAndExecutionDetails.java index aebdf15a7..44fd3c1f4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoAndExecutionDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InitScriptInfoAndExecutionDetails.java @@ -9,31 +9,162 @@ @Generated public class InitScriptInfoAndExecutionDetails { - /** Details about the script */ - @JsonProperty("execution_details") - private InitScriptExecutionDetails executionDetails; + /** + * destination needs to be provided, e.g. + * `abfss://@.dfs.core.windows.net/` + */ + @JsonProperty("abfss") + private Adlsgen2Info abfss; - /** The script */ - @JsonProperty("script") - private InitScriptInfo script; + /** + * destination needs to be provided. e.g. `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } + * }` + */ + @JsonProperty("dbfs") + private DbfsStorageInfo dbfs; - public InitScriptInfoAndExecutionDetails setExecutionDetails( - InitScriptExecutionDetails executionDetails) { - this.executionDetails = executionDetails; + /** + * Additional details regarding errors (such as a file not found message if the status is + * FAILED_FETCH). This field should only be used to provide *additional* information to the status + * field, not duplicate it. + */ + @JsonProperty("error_message") + private String errorMessage; + + /** The number duration of the script execution in seconds */ + @JsonProperty("execution_duration_seconds") + private Long executionDurationSeconds; + + /** + * destination needs to be provided, e.g. `{ "file": { "destination": "file:/my/local/file.sh" } + * }` + */ + @JsonProperty("file") + private LocalFileInfo file; + + /** + * destination needs to be provided, e.g. `{ "gcs": { "destination": "gs://my-bucket/file.sh" } }` + */ + @JsonProperty("gcs") + private GcsStorageInfo gcs; + + /** + * destination and either the region or endpoint need to be provided. e.g. `{ \"s3\": { + * \"destination\": \"s3://cluster_log_bucket/prefix\", \"region\": \"us-west-2\" } }` Cluster iam + * role is used to access s3, please make sure the cluster iam role in `instance_profile_arn` has + * permission to write data to the s3 destination. + */ + @JsonProperty("s3") + private S3StorageInfo s3; + + /** The current status of the script */ + @JsonProperty("status") + private InitScriptExecutionDetailsInitScriptExecutionStatus status; + + /** + * destination needs to be provided. e.g. `{ \"volumes\" : { \"destination\" : + * \"/Volumes/my-init.sh\" } }` + */ + @JsonProperty("volumes") + private VolumesStorageInfo volumes; + + /** + * destination needs to be provided, e.g. `{ "workspace": { "destination": + * "/cluster-init-scripts/setup-datadog.sh" } }` + */ + @JsonProperty("workspace") + private WorkspaceStorageInfo workspace; + + public InitScriptInfoAndExecutionDetails setAbfss(Adlsgen2Info abfss) { + this.abfss = abfss; + return this; + } + + public Adlsgen2Info getAbfss() { + return abfss; + } + + public InitScriptInfoAndExecutionDetails setDbfs(DbfsStorageInfo dbfs) { + this.dbfs = dbfs; + return this; + } + + public DbfsStorageInfo getDbfs() { + return dbfs; + } + + public InitScriptInfoAndExecutionDetails setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + return this; + } + + public String getErrorMessage() { + return errorMessage; + } + + public InitScriptInfoAndExecutionDetails setExecutionDurationSeconds( + Long executionDurationSeconds) { + this.executionDurationSeconds = executionDurationSeconds; + return this; + } + + public Long getExecutionDurationSeconds() { + return executionDurationSeconds; + } + + public InitScriptInfoAndExecutionDetails setFile(LocalFileInfo file) { + this.file = file; + return this; + } + + public LocalFileInfo getFile() { + return file; + } + + public InitScriptInfoAndExecutionDetails setGcs(GcsStorageInfo gcs) { + this.gcs = gcs; + return this; + } + + public GcsStorageInfo getGcs() { + return gcs; + } + + public InitScriptInfoAndExecutionDetails setS3(S3StorageInfo s3) { + this.s3 = s3; + return this; + } + + public S3StorageInfo getS3() { + return s3; + } + + public InitScriptInfoAndExecutionDetails setStatus( + InitScriptExecutionDetailsInitScriptExecutionStatus status) { + this.status = status; + return this; + } + + public InitScriptExecutionDetailsInitScriptExecutionStatus getStatus() { + return status; + } + + public InitScriptInfoAndExecutionDetails setVolumes(VolumesStorageInfo volumes) { + this.volumes = volumes; return this; } - public InitScriptExecutionDetails getExecutionDetails() { - return executionDetails; + public VolumesStorageInfo getVolumes() { + return volumes; } - public InitScriptInfoAndExecutionDetails setScript(InitScriptInfo script) { - this.script = script; + public InitScriptInfoAndExecutionDetails setWorkspace(WorkspaceStorageInfo workspace) { + this.workspace = workspace; return this; } - public InitScriptInfo getScript() { - return script; + public WorkspaceStorageInfo getWorkspace() { + return workspace; } @Override @@ -41,20 +172,46 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; InitScriptInfoAndExecutionDetails that = (InitScriptInfoAndExecutionDetails) o; - return Objects.equals(executionDetails, that.executionDetails) - && Objects.equals(script, that.script); + return Objects.equals(abfss, that.abfss) + && Objects.equals(dbfs, that.dbfs) + && Objects.equals(errorMessage, that.errorMessage) + && Objects.equals(executionDurationSeconds, that.executionDurationSeconds) + && Objects.equals(file, that.file) + && Objects.equals(gcs, that.gcs) + && Objects.equals(s3, that.s3) + && Objects.equals(status, that.status) + && Objects.equals(volumes, that.volumes) + && Objects.equals(workspace, that.workspace); } @Override public int hashCode() { - return Objects.hash(executionDetails, script); + return Objects.hash( + abfss, + dbfs, + errorMessage, + executionDurationSeconds, + file, + gcs, + s3, + status, + volumes, + workspace); } @Override public String toString() { return new ToStringer(InitScriptInfoAndExecutionDetails.class) - .add("executionDetails", executionDetails) - .add("script", script) + .add("abfss", abfss) + .add("dbfs", dbfs) + .add("errorMessage", errorMessage) + .add("executionDurationSeconds", executionDurationSeconds) + .add("file", file) + .add("gcs", gcs) + .add("s3", s3) + .add("status", status) + .add("volumes", volumes) + .add("workspace", workspace) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAvailableZonesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAvailableZonesResponse.java index c2d6ce5ea..aa7b0a6dd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAvailableZonesResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAvailableZonesResponse.java @@ -10,7 +10,7 @@ @Generated public class ListAvailableZonesResponse { - /** The availability zone if no `zone_id` is provided in the cluster creation request. */ + /** The availability zone if no ``zone_id`` is provided in the cluster creation request. */ @JsonProperty("default_zone") private String defaultZone; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersResponse.java index d06536644..bbbbe0fb1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersResponse.java @@ -10,7 +10,7 @@ @Generated public class ListClustersResponse { - /** */ + /** */ @JsonProperty("clusters") private Collection clusters; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByDirection.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByDirection.java index ebf0a5be8..a36179e1c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByDirection.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByDirection.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** The direction to sort by. */ @Generated public enum ListClustersSortByDirection { ASC, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByField.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByField.java index bace2d30a..73c4370a3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByField.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByField.java @@ -4,10 +4,6 @@ import com.databricks.sdk.support.Generated; -/** - * The sorting criteria. By default, clusters are sorted by 3 columns from highest to lowest - * precedence: cluster state, pinned or unpinned, then cluster name. - */ @Generated public enum ListClustersSortByField { CLUSTER_NAME, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListSortOrder.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListSortOrder.java index 8dda5ed39..672c9ec25 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListSortOrder.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListSortOrder.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** A generic ordering enum for list-based queries. */ @Generated public enum ListSortOrder { ASC, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogAnalyticsInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogAnalyticsInfo.java index 4bd96d93c..af2e30072 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogAnalyticsInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogAnalyticsInfo.java @@ -9,11 +9,11 @@ @Generated public class LogAnalyticsInfo { - /** */ + /** */ @JsonProperty("log_analytics_primary_key") private String logAnalyticsPrimaryKey; - /** */ + /** */ @JsonProperty("log_analytics_workspace_id") private String logAnalyticsWorkspaceId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogSyncStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogSyncStatus.java index 4a7f830c5..9bd528bc4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogSyncStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogSyncStatus.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** The log delivery status */ @Generated public class LogSyncStatus { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanceType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanceType.java index 3e3ecd792..184999dbd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanceType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanceType.java @@ -7,25 +7,30 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** + * This structure embodies the machine type that hosts spark containers Note: this should be an + * internal data structure for now It is defined in proto in case we want to send it over the wire + * in the future (which is likely) + */ @Generated public class NodeInstanceType { - /** */ + /** Unique identifier across instance types */ @JsonProperty("instance_type_id") private String instanceTypeId; - /** */ + /** Size of the individual local disks attached to this instance (i.e. per local disk). */ @JsonProperty("local_disk_size_gb") private Long localDiskSizeGb; - /** */ + /** Number of local disks that are present on this instance. */ @JsonProperty("local_disks") private Long localDisks; - /** */ + /** Size of the individual local nvme disks attached to this instance (i.e. per local disk). */ @JsonProperty("local_nvme_disk_size_gb") private Long localNvmeDiskSizeGb; - /** */ + /** Number of local nvme disks that are present on this instance. */ @JsonProperty("local_nvme_disks") private Long localNvmeDisks; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeType.java index 23238462d..77a487607 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeType.java @@ -7,9 +7,16 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** + * A description of a Spark node type including both the dimensions of the node and the instance + * type on which it will be hosted. + */ @Generated public class NodeType { - /** */ + /** + * A descriptive category for this node type. Examples include "Memory Optimized" and "Compute + * Optimized". + */ @JsonProperty("category") private String category; @@ -17,7 +24,10 @@ public class NodeType { @JsonProperty("description") private String description; - /** */ + /** + * An optional hint at the display order of node types in the UI. Within a node type category, + * lowest numbers come first. + */ @JsonProperty("display_order") private Long displayOrder; @@ -36,15 +46,15 @@ public class NodeType { @JsonProperty("is_encrypted_in_transit") private Boolean isEncryptedInTransit; - /** */ + /** Whether this is an Arm-based instance. */ @JsonProperty("is_graviton") private Boolean isGraviton; - /** */ + /** Whether this node is hidden from presentation in the UI. */ @JsonProperty("is_hidden") private Boolean isHidden; - /** */ + /** Whether this node comes with IO cache enabled by default. */ @JsonProperty("is_io_cache_enabled") private Boolean isIoCacheEnabled; @@ -52,11 +62,11 @@ public class NodeType { @JsonProperty("memory_mb") private Long memoryMb; - /** */ + /** A collection of node type info reported by the cloud provider */ @JsonProperty("node_info") private CloudProviderNodeInfo nodeInfo; - /** */ + /** The NodeInstanceType object corresponding to instance_type_id */ @JsonProperty("node_instance_type") private NodeInstanceType nodeInstanceType; @@ -72,7 +82,7 @@ public class NodeType { @JsonProperty("num_cores") private Double numCores; - /** */ + /** Number of GPUs available for this node type. */ @JsonProperty("num_gpus") private Long numGpus; @@ -84,25 +94,21 @@ public class NodeType { @JsonProperty("photon_worker_capable") private Boolean photonWorkerCapable; - /** */ + /** Whether this node type support cluster tags. */ @JsonProperty("support_cluster_tags") private Boolean supportClusterTags; - /** */ + /** + * Whether this node type support EBS volumes. EBS volumes is disabled for node types that we + * could place multiple corresponding containers on the same hosting instance. + */ @JsonProperty("support_ebs_volumes") private Boolean supportEbsVolumes; - /** */ + /** Whether this node type supports port forwarding. */ @JsonProperty("support_port_forwarding") private Boolean supportPortForwarding; - /** - * Indicates if this node type can be used for an instance pool or cluster with elastic disk - * enabled. This is true for most node types. - */ - @JsonProperty("supports_elastic_disk") - private Boolean supportsElasticDisk; - public NodeType setCategory(String category) { this.category = category; return this; @@ -283,15 +289,6 @@ public Boolean getSupportPortForwarding() { return supportPortForwarding; } - public NodeType setSupportsElasticDisk(Boolean supportsElasticDisk) { - this.supportsElasticDisk = supportsElasticDisk; - return this; - } - - public Boolean getSupportsElasticDisk() { - return supportsElasticDisk; - } - @Override public boolean equals(Object o) { if (this == o) return true; @@ -316,8 +313,7 @@ public boolean equals(Object o) { && Objects.equals(photonWorkerCapable, that.photonWorkerCapable) && Objects.equals(supportClusterTags, that.supportClusterTags) && Objects.equals(supportEbsVolumes, that.supportEbsVolumes) - && Objects.equals(supportPortForwarding, that.supportPortForwarding) - && Objects.equals(supportsElasticDisk, that.supportsElasticDisk); + && Objects.equals(supportPortForwarding, that.supportPortForwarding); } @Override @@ -342,8 +338,7 @@ public int hashCode() { photonWorkerCapable, supportClusterTags, supportEbsVolumes, - supportPortForwarding, - supportsElasticDisk); + supportPortForwarding); } @Override @@ -369,7 +364,6 @@ public String toString() { .add("supportClusterTags", supportClusterTags) .add("supportEbsVolumes", supportEbsVolumes) .add("supportPortForwarding", supportPortForwarding) - .add("supportsElasticDisk", supportsElasticDisk) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinCluster.java index adc4478b0..acdbf8013 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinCluster.java @@ -9,7 +9,7 @@ @Generated public class PinCluster { - /** */ + /** */ @JsonProperty("cluster_id") private String clusterId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartCluster.java index b56dc2c32..ee78f3391 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartCluster.java @@ -13,7 +13,7 @@ public class RestartCluster { @JsonProperty("cluster_id") private String clusterId; - /** */ + /** */ @JsonProperty("restart_user") private String restartUser; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RuntimeEngine.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RuntimeEngine.java index 31d195722..0939b3bae 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RuntimeEngine.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RuntimeEngine.java @@ -4,15 +4,6 @@ import com.databricks.sdk.support.Generated; -/** - * Determines the cluster's runtime engine, either standard or Photon. - * - *

This field is not compatible with legacy `spark_version` values that contain `-photon-`. - * Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`. - * - *

If left unspecified, the runtime engine defaults to standard unless the spark_version contains - * -photon-, in which case Photon will be used. - */ @Generated public enum RuntimeEngine { NULL, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/S3StorageInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/S3StorageInfo.java index be097b122..de453f1ca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/S3StorageInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/S3StorageInfo.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** A storage location in Amazon S3 */ @Generated public class S3StorageInfo { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNode.java index 281b86bc3..4f24cf973 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNode.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Describes a specific Spark driver or executor. */ @Generated public class SparkNode { /** The private IP address of the host instance. */ @@ -36,19 +37,11 @@ public class SparkNode { * Public DNS address of this node. This address can be used to access the Spark JDBC server on * the driver node. To communicate with the JDBC server, traffic must be manually authorized by * adding security group rules to the "worker-unmanaged" security group via the AWS console. - * - *

Actually it's the public DNS address of the host instance. */ @JsonProperty("public_dns") private String publicDns; - /** - * The timestamp (in millisecond) when the Spark node is launched. - * - *

The start_timestamp is set right before the container is being launched. The timestamp when - * the container is placed on the ResourceManager, before its launch and setup by the NodeDaemon. - * This timestamp is the same as the creation timestamp in the database. - */ + /** The timestamp (in millisecond) when the Spark node is launched. */ @JsonProperty("start_timestamp") private Long startTimestamp; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNodeAwsAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNodeAwsAttributes.java index a3128c60e..b3f24f2de 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNodeAwsAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/SparkNodeAwsAttributes.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Attributes specific to AWS for a Spark node. */ @Generated public class SparkNodeAwsAttributes { /** Whether this node is on an Amazon spot instance. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/State.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/State.java index fa60c3812..a7714ef95 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/State.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/State.java @@ -4,7 +4,14 @@ import com.databricks.sdk.support.Generated; -/** Current state of the cluster. */ +/** + * The state of a Cluster. The current allowable state transitions are as follows: + * + *

- `PENDING` -> `RUNNING` - `PENDING` -> `TERMINATING` - `RUNNING` -> `RESIZING` - `RUNNING` -> + * `RESTARTING` - `RUNNING` -> `TERMINATING` - `RESTARTING` -> `RUNNING` - `RESTARTING` -> + * `TERMINATING` - `RESIZING` -> `RUNNING` - `RESIZING` -> `TERMINATING` - `TERMINATING` -> + * `TERMINATED` + */ @Generated public enum State { ERROR, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java index 206eed33d..bdd74cd8c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/TerminationReasonCode.java @@ -4,21 +4,35 @@ import com.databricks.sdk.support.Generated; -/** status code indicating why the cluster was terminated */ +/** The status code indicating why the cluster was terminated */ @Generated public enum TerminationReasonCode { ABUSE_DETECTED, + ACCESS_TOKEN_FAILURE, + ALLOCATION_TIMEOUT, + ALLOCATION_TIMEOUT_NODE_DAEMON_NOT_READY, + ALLOCATION_TIMEOUT_NO_HEALTHY_CLUSTERS, + ALLOCATION_TIMEOUT_NO_MATCHED_CLUSTERS, + ALLOCATION_TIMEOUT_NO_READY_CLUSTERS, + ALLOCATION_TIMEOUT_NO_UNALLOCATED_CLUSTERS, + ALLOCATION_TIMEOUT_NO_WARMED_UP_CLUSTERS, ATTACH_PROJECT_FAILURE, AWS_AUTHORIZATION_FAILURE, + AWS_INACCESSIBLE_KMS_KEY_FAILURE, + AWS_INSTANCE_PROFILE_UPDATE_FAILURE, AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE, AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE, + AWS_INVALID_KEY_PAIR, + AWS_INVALID_KMS_KEY_STATE, AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE, AWS_REQUEST_LIMIT_EXCEEDED, + AWS_RESOURCE_QUOTA_EXCEEDED, AWS_UNSUPPORTED_FAILURE, AZURE_BYOK_KEY_PERMISSION_FAILURE, AZURE_EPHEMERAL_DISK_FAILURE, AZURE_INVALID_DEPLOYMENT_TEMPLATE, AZURE_OPERATION_NOT_ALLOWED_EXCEPTION, + AZURE_PACKED_DEPLOYMENT_PARTIAL_FAILURE, AZURE_QUOTA_EXCEEDED_EXCEPTION, AZURE_RESOURCE_MANAGER_THROTTLING, AZURE_RESOURCE_PROVIDER_THROTTLING, @@ -27,63 +41,128 @@ public enum TerminationReasonCode { AZURE_VNET_CONFIGURATION_FAILURE, BOOTSTRAP_TIMEOUT, BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION, + BOOTSTRAP_TIMEOUT_DUE_TO_MISCONFIG, + BUDGET_POLICY_LIMIT_ENFORCEMENT_ACTIVATED, + BUDGET_POLICY_RESOLUTION_FAILURE, + CLOUD_ACCOUNT_SETUP_FAILURE, + CLOUD_OPERATION_CANCELLED, CLOUD_PROVIDER_DISK_SETUP_FAILURE, + CLOUD_PROVIDER_INSTANCE_NOT_LAUNCHED, CLOUD_PROVIDER_LAUNCH_FAILURE, + CLOUD_PROVIDER_LAUNCH_FAILURE_DUE_TO_MISCONFIG, CLOUD_PROVIDER_RESOURCE_STOCKOUT, + CLOUD_PROVIDER_RESOURCE_STOCKOUT_DUE_TO_MISCONFIG, CLOUD_PROVIDER_SHUTDOWN, + CLUSTER_OPERATION_THROTTLED, + CLUSTER_OPERATION_TIMEOUT, COMMUNICATION_LOST, CONTAINER_LAUNCH_FAILURE, CONTROL_PLANE_REQUEST_FAILURE, + CONTROL_PLANE_REQUEST_FAILURE_DUE_TO_MISCONFIG, DATABASE_CONNECTION_FAILURE, + DATA_ACCESS_CONFIG_CHANGED, DBFS_COMPONENT_UNHEALTHY, + DISASTER_RECOVERY_REPLICATION, DOCKER_IMAGE_PULL_FAILURE, + DRIVER_EVICTION, + DRIVER_LAUNCH_TIMEOUT, + DRIVER_NODE_UNREACHABLE, + DRIVER_OUT_OF_DISK, + DRIVER_OUT_OF_MEMORY, + DRIVER_POD_CREATION_FAILURE, + DRIVER_UNEXPECTED_FAILURE, DRIVER_UNREACHABLE, DRIVER_UNRESPONSIVE, + DYNAMIC_SPARK_CONF_SIZE_EXCEEDED, + EOS_SPARK_IMAGE, EXECUTION_COMPONENT_UNHEALTHY, + EXECUTOR_POD_UNSCHEDULED, + GCP_API_RATE_QUOTA_EXCEEDED, + GCP_FORBIDDEN, + GCP_IAM_TIMEOUT, + GCP_INACCESSIBLE_KMS_KEY_FAILURE, + GCP_INSUFFICIENT_CAPACITY, + GCP_IP_SPACE_EXHAUSTED, + GCP_KMS_KEY_PERMISSION_DENIED, + GCP_NOT_FOUND, GCP_QUOTA_EXCEEDED, + GCP_RESOURCE_QUOTA_EXCEEDED, + GCP_SERVICE_ACCOUNT_ACCESS_DENIED, GCP_SERVICE_ACCOUNT_DELETED, + GCP_SERVICE_ACCOUNT_NOT_FOUND, + GCP_SUBNET_NOT_READY, + GCP_TRUSTED_IMAGE_PROJECTS_VIOLATED, + GKE_BASED_CLUSTER_TERMINATION, GLOBAL_INIT_SCRIPT_FAILURE, HIVE_METASTORE_PROVISIONING_FAILURE, IMAGE_PULL_PERMISSION_DENIED, INACTIVITY, + INIT_CONTAINER_NOT_FINISHED, INIT_SCRIPT_FAILURE, INSTANCE_POOL_CLUSTER_FAILURE, + INSTANCE_POOL_MAX_CAPACITY_REACHED, + INSTANCE_POOL_NOT_FOUND, INSTANCE_UNREACHABLE, + INSTANCE_UNREACHABLE_DUE_TO_MISCONFIG, + INTERNAL_CAPACITY_FAILURE, INTERNAL_ERROR, INVALID_ARGUMENT, + INVALID_AWS_PARAMETER, + INVALID_INSTANCE_PLACEMENT_PROTOCOL, INVALID_SPARK_IMAGE, + INVALID_WORKER_IMAGE_FAILURE, + IN_PENALTY_BOX, IP_EXHAUSTION_FAILURE, JOB_FINISHED, K8S_AUTOSCALING_FAILURE, K8S_DBR_CLUSTER_LAUNCH_TIMEOUT, + LAZY_ALLOCATION_TIMEOUT, + MAINTENANCE_MODE, METASTORE_COMPONENT_UNHEALTHY, NEPHOS_RESOURCE_MANAGEMENT, + NETVISOR_SETUP_TIMEOUT, NETWORK_CONFIGURATION_FAILURE, NFS_MOUNT_FAILURE, + NO_MATCHED_K8S, + NO_MATCHED_K8S_TESTING_TAG, NPIP_TUNNEL_SETUP_FAILURE, NPIP_TUNNEL_TOKEN_FAILURE, + POD_ASSIGNMENT_FAILURE, + POD_SCHEDULING_FAILURE, REQUEST_REJECTED, REQUEST_THROTTLED, + RESOURCE_USAGE_BLOCKED, + SECRET_CREATION_FAILURE, SECRET_RESOLUTION_ERROR, SECURITY_DAEMON_REGISTRATION_EXCEPTION, SELF_BOOTSTRAP_FAILURE, + SERVERLESS_LONG_RUNNING_TERMINATED, SKIPPED_SLOW_NODES, SLOW_IMAGE_DOWNLOAD, SPARK_ERROR, SPARK_IMAGE_DOWNLOAD_FAILURE, + SPARK_IMAGE_DOWNLOAD_THROTTLED, + SPARK_IMAGE_NOT_FOUND, SPARK_STARTUP_FAILURE, SPOT_INSTANCE_TERMINATION, + SSH_BOOTSTRAP_FAILURE, STORAGE_DOWNLOAD_FAILURE, + STORAGE_DOWNLOAD_FAILURE_DUE_TO_MISCONFIG, + STORAGE_DOWNLOAD_FAILURE_SLOW, + STORAGE_DOWNLOAD_FAILURE_THROTTLED, STS_CLIENT_SETUP_FAILURE, SUBNET_EXHAUSTED_FAILURE, TEMPORARILY_UNAVAILABLE, TRIAL_EXPIRED, UNEXPECTED_LAUNCH_FAILURE, + UNEXPECTED_POD_RECREATION, UNKNOWN, UNSUPPORTED_INSTANCE_TYPE, UPDATE_INSTANCE_PROFILE_FAILURE, + USER_INITIATED_VM_TERMINATION, USER_REQUEST, WORKER_SETUP_FAILURE, WORKSPACE_CANCELLED_ERROR, WORKSPACE_CONFIGURATION_ERROR, + WORKSPACE_UPDATE, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinCluster.java index 2a7a5e8a6..9907dd125 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinCluster.java @@ -9,7 +9,7 @@ @Generated public class UnpinCluster { - /** */ + /** */ @JsonProperty("cluster_id") private String clusterId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateCluster.java index 0a50e9439..9d4ea8132 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateCluster.java @@ -18,10 +18,18 @@ public class UpdateCluster { private String clusterId; /** - * Specifies which fields of the cluster will be updated. This is required in the POST request. - * The update mask should be supplied as a single string. To specify multiple fields, separate - * them with commas (no spaces). To delete a field from a cluster configuration, add it to the - * `update_mask` string but omit it from the `cluster` object. + * Used to specify which cluster attributes and size fields to update. See + * https://google.aip.dev/161 for more details. + * + *

The field mask must be a single string, with multiple fields separated by commas (no + * spaces). The field path is relative to the resource object, using a dot (`.`) to navigate + * sub-fields (e.g., `author.given_name`). Specification of elements in sequence or map fields is + * not allowed, as only the entire collection field can be specified. Field names must exactly + * match the resource field names. + * + *

A field mask of `*` indicates full replacement. It’s recommended to always explicitly list + * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if + * the API changes in the future. */ @JsonProperty("update_mask") private String updateMask; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java index 6e9feab08..273736519 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java @@ -100,7 +100,7 @@ public class UpdateClusterResource { @JsonProperty("data_security_mode") private DataSecurityMode dataSecurityMode; - /** */ + /** Custom docker image BYOC */ @JsonProperty("docker_image") private DockerImage dockerImage; @@ -114,6 +114,10 @@ public class UpdateClusterResource { /** * The node type of the Spark driver. Note that this field is optional; if unset, the driver node * type will be set as the same value as `node_type_id` defined above. + * + *

This field, along with node_type_id, should not be set if virtual_cluster_size is set. If + * both driver_node_type_id, node_type_id, and virtual_cluster_size are specified, + * driver_node_type_id and node_type_id take precedence. */ @JsonProperty("driver_node_type_id") private String driverNodeTypeId; @@ -266,7 +270,7 @@ public class UpdateClusterResource { @JsonProperty("use_ml_runtime") private Boolean useMlRuntime; - /** */ + /** Cluster Attributes showing for clusters workload types. */ @JsonProperty("workload_type") private WorkloadType workloadType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfo.java index a65a5b555..33bc00832 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfo.java @@ -7,9 +7,13 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** A storage location back by UC Volumes. */ @Generated public class VolumesStorageInfo { - /** Unity Catalog volumes file destination, e.g. `/Volumes/catalog/schema/volume/dir/file` */ + /** + * UC Volumes destination, e.g. `/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` or + * `dbfs:/Volumes/catalog/schema/vol1/init-scripts/setup-datadog.sh` + */ @JsonProperty("destination") private String destination; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadType.java index 768528af8..6faf119c7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadType.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Cluster Attributes showing for clusters workload types. */ @Generated public class WorkloadType { /** defined what type of clients can use the cluster. E.g. Notebooks, Jobs */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java index 13d7ce1f8..0537dbb67 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java @@ -7,9 +7,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** A storage location in Workspace Filesystem (WSFS) */ @Generated public class WorkspaceStorageInfo { - /** workspace files destination, e.g. `/Users/user1@databricks.com/my-init.sh` */ + /** wsfs destination, e.g. `workspace:/cluster-init-scripts/setup-datadog.sh` */ @JsonProperty("destination") private String destination; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java index 253a6b9d8..81daf0c1b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java @@ -161,6 +161,27 @@ public GenieGetMessageQueryResultResponse executeMessageQuery( return impl.executeMessageQuery(request); } + public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult( + String spaceId, String conversationId, String messageId, String attachmentId) { + return generateDownloadFullQueryResult( + new GenieGenerateDownloadFullQueryResultRequest() + .setSpaceId(spaceId) + .setConversationId(conversationId) + .setMessageId(messageId) + .setAttachmentId(attachmentId)); + } + + /** + * Generate full query result download. + * + *

Initiate full SQL query result download and obtain a transient ID for tracking the download + * progress. This call initiates a new SQL execution to generate the query result. + */ + public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult( + GenieGenerateDownloadFullQueryResultRequest request) { + return impl.generateDownloadFullQueryResult(request); + } + public GenieMessage getMessage(String spaceId, String conversationId, String messageId) { return getMessage( new GenieGetConversationMessageRequest() diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java new file mode 100755 index 000000000..b1c8d9a74 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +/** Generate full query result download */ +@Generated +public class GenieGenerateDownloadFullQueryResultRequest { + /** Attachment ID */ + @JsonIgnore private String attachmentId; + + /** Conversation ID */ + @JsonIgnore private String conversationId; + + /** Message ID */ + @JsonIgnore private String messageId; + + /** Space ID */ + @JsonIgnore private String spaceId; + + public GenieGenerateDownloadFullQueryResultRequest setAttachmentId(String attachmentId) { + this.attachmentId = attachmentId; + return this; + } + + public String getAttachmentId() { + return attachmentId; + } + + public GenieGenerateDownloadFullQueryResultRequest setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieGenerateDownloadFullQueryResultRequest setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieGenerateDownloadFullQueryResultRequest setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGenerateDownloadFullQueryResultRequest that = + (GenieGenerateDownloadFullQueryResultRequest) o; + return Objects.equals(attachmentId, that.attachmentId) + && Objects.equals(conversationId, that.conversationId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(attachmentId, conversationId, messageId, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieGenerateDownloadFullQueryResultRequest.class) + .add("attachmentId", attachmentId) + .add("conversationId", conversationId) + .add("messageId", messageId) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java new file mode 100755 index 000000000..3b8991cc1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java @@ -0,0 +1,78 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GenieGenerateDownloadFullQueryResultResponse { + /** Error message if Genie failed to download the result */ + @JsonProperty("error") + private String error; + + /** Download result status */ + @JsonProperty("status") + private MessageStatus status; + + /** + * Transient Statement ID. Use this ID to track the download request in subsequent polling calls + */ + @JsonProperty("transient_statement_id") + private String transientStatementId; + + public GenieGenerateDownloadFullQueryResultResponse setError(String error) { + this.error = error; + return this; + } + + public String getError() { + return error; + } + + public GenieGenerateDownloadFullQueryResultResponse setStatus(MessageStatus status) { + this.status = status; + return this; + } + + public MessageStatus getStatus() { + return status; + } + + public GenieGenerateDownloadFullQueryResultResponse setTransientStatementId( + String transientStatementId) { + this.transientStatementId = transientStatementId; + return this; + } + + public String getTransientStatementId() { + return transientStatementId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieGenerateDownloadFullQueryResultResponse that = + (GenieGenerateDownloadFullQueryResultResponse) o; + return Objects.equals(error, that.error) + && Objects.equals(status, that.status) + && Objects.equals(transientStatementId, that.transientStatementId); + } + + @Override + public int hashCode() { + return Objects.hash(error, status, transientStatementId); + } + + @Override + public String toString() { + return new ToStringer(GenieGenerateDownloadFullQueryResultResponse.class) + .add("error", error) + .add("status", status) + .add("transientStatementId", transientStatementId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java index af444a4ab..c536eda5e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java @@ -70,6 +70,26 @@ public GenieGetMessageQueryResultResponse executeMessageQuery( } } + @Override + public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult( + GenieGenerateDownloadFullQueryResultRequest request) { + String path = + String.format( + "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/attachments/%s/generate-download", + request.getSpaceId(), + request.getConversationId(), + request.getMessageId(), + request.getAttachmentId()); + try { + Request req = new Request("POST", path); + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + return apiClient.execute(req, GenieGenerateDownloadFullQueryResultResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GenieMessage getMessage(GenieGetConversationMessageRequest request) { String path = diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java index 540e9f01e..b9f3c75cb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java @@ -41,6 +41,15 @@ GenieGetMessageQueryResultResponse executeMessageAttachmentQuery( GenieGetMessageQueryResultResponse executeMessageQuery( GenieExecuteMessageQueryRequest genieExecuteMessageQueryRequest); + /** + * Generate full query result download. + * + *

Initiate full SQL query result download and obtain a transient ID for tracking the download + * progress. This call initiates a new SQL execution to generate the query result. + */ + GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult( + GenieGenerateDownloadFullQueryResultRequest genieGenerateDownloadFullQueryResultRequest); + /** * Get conversation message. * diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java index c1ba58d9d..35cd4d6b1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java @@ -20,6 +20,7 @@ public enum MessageErrorType { FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION, FUNCTION_ARGUMENTS_INVALID_TYPE_EXCEPTION, FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION, + GENERATED_SQL_QUERY_TOO_LONG_EXCEPTION, GENERIC_CHAT_COMPLETION_EXCEPTION, GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION, GENERIC_SQL_EXEC_API_CALL_EXCEPTION, @@ -34,6 +35,7 @@ public enum MessageErrorType { MESSAGE_CANCELLED_WHILE_EXECUTING_EXCEPTION, MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION, MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION, + MISSING_SQL_QUERY_EXCEPTION, NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE, NO_QUERY_TO_VISUALIZE_EXCEPTION, NO_TABLES_TO_QUERY_EXCEPTION, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsImpl.java index a429fe4f5..ec429f93b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsImpl.java @@ -85,7 +85,6 @@ public void patch(PartialUpdate request) { try { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PatchResponse.class); } catch (IOException e) { @@ -102,7 +101,6 @@ public void update(Group request) { try { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsImpl.java index fc614cefe..b576d22d2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountServicePrincipalsImpl.java @@ -87,7 +87,6 @@ public void patch(PartialUpdate request) { try { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PatchResponse.class); } catch (IOException e) { @@ -104,7 +103,6 @@ public void update(ServicePrincipal request) { try { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersImpl.java index a8e6fa307..47fd2d266 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountUsersImpl.java @@ -85,7 +85,6 @@ public void patch(PartialUpdate request) { try { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PatchResponse.class); } catch (IOException e) { @@ -102,7 +101,6 @@ public void update(User request) { try { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountGroupRequest.java index 9cec15427..bc96255a3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountGroupRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountGroupRequest.java @@ -7,7 +7,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Delete a group */ +/** Delete a group. */ @Generated public class DeleteAccountGroupRequest { /** Unique ID for a group in the Databricks account. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountServicePrincipalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountServicePrincipalRequest.java index c7997cffe..8970debe2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountServicePrincipalRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountServicePrincipalRequest.java @@ -7,7 +7,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Delete a service principal */ +/** Delete a service principal. */ @Generated public class DeleteAccountServicePrincipalRequest { /** Unique ID for a service principal in the Databricks account. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountUserRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountUserRequest.java index 13768c70c..3b41e0c7c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountUserRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteAccountUserRequest.java @@ -7,7 +7,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Delete a user */ +/** Delete a user. */ @Generated public class DeleteAccountUserRequest { /** Unique ID for a user in the Databricks account. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteGroupRequest.java index 29134794f..f7ca0d7ee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteGroupRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteGroupRequest.java @@ -7,7 +7,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Delete a group */ +/** Delete a group. */ @Generated public class DeleteGroupRequest { /** Unique ID for a group in the Databricks workspace. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteServicePrincipalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteServicePrincipalRequest.java index 0e2004777..e24962cbc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteServicePrincipalRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteServicePrincipalRequest.java @@ -7,7 +7,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Delete a service principal */ +/** Delete a service principal. */ @Generated public class DeleteServicePrincipalRequest { /** Unique ID for a service principal in the Databricks workspace. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteUserRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteUserRequest.java index ddd001d93..05f3dcad4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteUserRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/DeleteUserRequest.java @@ -7,7 +7,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Delete a user */ +/** Delete a user. */ @Generated public class DeleteUserRequest { /** Unique ID for a user in the Databricks workspace. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountGroupRequest.java index 543636751..03afecc6c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountGroupRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountGroupRequest.java @@ -7,7 +7,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Get group details */ +/** Get group details. */ @Generated public class GetAccountGroupRequest { /** Unique ID for a group in the Databricks account. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountServicePrincipalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountServicePrincipalRequest.java index 0734612e6..bc6682251 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountServicePrincipalRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountServicePrincipalRequest.java @@ -7,7 +7,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Get service principal details */ +/** Get service principal details. */ @Generated public class GetAccountServicePrincipalRequest { /** Unique ID for a service principal in the Databricks account. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountUserRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountUserRequest.java index f0dfb16fb..d36f2bc97 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountUserRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetAccountUserRequest.java @@ -8,7 +8,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Get user details */ +/** Get user details. */ @Generated public class GetAccountUserRequest { /** Comma-separated list of attributes to return in response. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetGroupRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetGroupRequest.java index 39d30e85a..45820a35f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetGroupRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetGroupRequest.java @@ -7,7 +7,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Get group details */ +/** Get group details. */ @Generated public class GetGroupRequest { /** Unique ID for a group in the Databricks workspace. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetServicePrincipalRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetServicePrincipalRequest.java index c028e74d3..b6f108016 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetServicePrincipalRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetServicePrincipalRequest.java @@ -7,7 +7,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Get service principal details */ +/** Get service principal details. */ @Generated public class GetServicePrincipalRequest { /** Unique ID for a service principal in the Databricks workspace. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetUserRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetUserRequest.java index 1fad13b9a..641367c1f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetUserRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetUserRequest.java @@ -8,7 +8,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** Get user details */ +/** Get user details. */ @Generated public class GetUserRequest { /** Comma-separated list of attributes to return in response. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java index e4e091671..282689a35 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GroupsImpl.java @@ -74,7 +74,6 @@ public void patch(PartialUpdate request) { try { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PatchResponse.class); } catch (IOException e) { @@ -88,7 +87,6 @@ public void update(Group request) { try { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountGroupsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountGroupsRequest.java index 5f95db956..0561e6669 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountGroupsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountGroupsRequest.java @@ -8,7 +8,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** List group details */ +/** List group details. */ @Generated public class ListAccountGroupsRequest { /** Comma-separated list of attributes to return in response. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountServicePrincipalsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountServicePrincipalsRequest.java index b197effee..23f3255c5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountServicePrincipalsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountServicePrincipalsRequest.java @@ -8,7 +8,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** List service principals */ +/** List service principals. */ @Generated public class ListAccountServicePrincipalsRequest { /** Comma-separated list of attributes to return in response. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountUsersRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountUsersRequest.java index 0daab2c20..4615abfbd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountUsersRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListAccountUsersRequest.java @@ -8,7 +8,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** List users */ +/** List users. */ @Generated public class ListAccountUsersRequest { /** Comma-separated list of attributes to return in response. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsRequest.java index 01391ea1d..85f9e831d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListGroupsRequest.java @@ -8,7 +8,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** List group details */ +/** List group details. */ @Generated public class ListGroupsRequest { /** Comma-separated list of attributes to return in response. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalsRequest.java index b247edb29..35003ac3c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListServicePrincipalsRequest.java @@ -8,7 +8,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** List service principals */ +/** List service principals. */ @Generated public class ListServicePrincipalsRequest { /** Comma-separated list of attributes to return in response. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersRequest.java index eb64f35f3..373ad0552 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ListUsersRequest.java @@ -8,7 +8,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Objects; -/** List users */ +/** List users. */ @Generated public class ListUsersRequest { /** Comma-separated list of attributes to return in response. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PartialUpdate.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PartialUpdate.java index b8993a2c8..8d3ad7796 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PartialUpdate.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PartialUpdate.java @@ -11,7 +11,7 @@ @Generated public class PartialUpdate { - /** Unique ID for a user in the Databricks workspace. */ + /** Unique ID in the Databricks workspace. */ @JsonIgnore private String id; /** */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java index 516abd5d2..0c6daef30 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/ServicePrincipalsImpl.java @@ -74,7 +74,6 @@ public void patch(PartialUpdate request) { try { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PatchResponse.class); } catch (IOException e) { @@ -88,7 +87,6 @@ public void update(ServicePrincipal request) { try { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/User.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/User.java index 5c8595b5c..b107ee340 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/User.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/User.java @@ -47,10 +47,7 @@ public class User { @JsonProperty("groups") private Collection groups; - /** - * Databricks user ID. This is automatically set by Databricks. Any value provided by the client - * will be ignored. - */ + /** Databricks user ID. */ @JsonProperty("id") private String id; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java index 309d6bd5a..eb980dd36 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersImpl.java @@ -98,7 +98,6 @@ public void patch(PartialUpdate request) { try { Request req = new Request("PATCH", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, PatchResponse.class); } catch (IOException e) { @@ -126,7 +125,6 @@ public void update(User request) { try { Request req = new Request("PUT", path, apiClient.serialize(request)); ApiClient.setQuery(req, request); - req.withHeader("Accept", "application/json"); req.withHeader("Content-Type", "application/json"); apiClient.execute(req, UpdateResponse.class); } catch (IOException e) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PerformanceTarget.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PerformanceTarget.java index 88f9fcd78..ddf83eae9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PerformanceTarget.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PerformanceTarget.java @@ -11,6 +11,7 @@ */ @Generated public enum PerformanceTarget { + BALANCED, COST_OPTIMIZED, PERFORMANCE_OPTIMIZED, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParentType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParentType.java index 55c6496f1..34367d9db 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParentType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParentType.java @@ -7,5 +7,6 @@ @Generated public enum FileParentType { LISTING, + LISTING_RESOURCE, PROVIDER, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Listing.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Listing.java index f73465e34..bf2edad5d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Listing.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Listing.java @@ -17,7 +17,7 @@ public class Listing { @JsonProperty("id") private String id; - /** Next Number: 26 */ + /** */ @JsonProperty("summary") private ListingSummary summary; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java index 0eb2f7d89..60e960e72 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java @@ -8,7 +8,6 @@ import java.util.Collection; import java.util.Objects; -/** Next Number: 26 */ @Generated public class ListingSummary { /** */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/MarketplaceFileType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/MarketplaceFileType.java index cb7135dc2..bf12bbacf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/MarketplaceFileType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/MarketplaceFileType.java @@ -6,6 +6,7 @@ @Generated public enum MarketplaceFileType { + APP, EMBEDDED_NOTEBOOK, PROVIDER_ICON, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java index d5b1322e3..e1ca8823b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CreateForecastingExperimentRequest.java @@ -17,24 +17,6 @@ public class CreateForecastingExperimentRequest { @JsonProperty("custom_weights_column") private String customWeightsColumn; - /** - * The quantity of the input data granularity. Together with data_granularity_unit field, this - * defines the time interval between consecutive rows in the time series data. For now, only 1 - * second, 1/5/10/15/30 minutes, 1 hour, 1 day, 1 week, 1 month, 1 quarter, 1 year are supported. - */ - @JsonProperty("data_granularity_quantity") - private Long dataGranularityQuantity; - - /** - * The time unit of the input data granularity. Together with data_granularity_quantity field, - * this defines the time interval between consecutive rows in the time series data. Possible - * values: * 'W' (weeks) * 'D' / 'days' / 'day' * 'hours' / 'hour' / 'hr' / 'h' * 'm' / 'minute' / - * 'min' / 'minutes' / 'T' * 'S' / 'seconds' / 'sec' / 'second' * 'M' / 'month' / 'months' * 'Q' / - * 'quarter' / 'quarters' * 'Y' / 'year' / 'years' - */ - @JsonProperty("data_granularity_unit") - private String dataGranularityUnit; - /** * The path to the created experiment. This is the path where the experiment will be stored in the * workspace. @@ -42,10 +24,18 @@ public class CreateForecastingExperimentRequest { @JsonProperty("experiment_path") private String experimentPath; + /** + * The granularity of the forecast. This defines the time interval between consecutive rows in the + * time series data. Possible values: '1 second', '1 minute', '5 minutes', '10 minutes', '15 + * minutes', '30 minutes', 'Hourly', 'Daily', 'Weekly', 'Monthly', 'Quarterly', 'Yearly'. + */ + @JsonProperty("forecast_granularity") + private String forecastGranularity; + /** * The number of time steps into the future for which predictions should be made. This value - * represents a multiple of data_granularity_unit and data_granularity_quantity determining how - * far ahead the model will forecast. + * represents a multiple of forecast_granularity determining how far ahead the model will + * forecast. */ @JsonProperty("forecast_horizon") private Long forecastHorizon; @@ -130,32 +120,22 @@ public String getCustomWeightsColumn() { return customWeightsColumn; } - public CreateForecastingExperimentRequest setDataGranularityQuantity( - Long dataGranularityQuantity) { - this.dataGranularityQuantity = dataGranularityQuantity; - return this; - } - - public Long getDataGranularityQuantity() { - return dataGranularityQuantity; - } - - public CreateForecastingExperimentRequest setDataGranularityUnit(String dataGranularityUnit) { - this.dataGranularityUnit = dataGranularityUnit; + public CreateForecastingExperimentRequest setExperimentPath(String experimentPath) { + this.experimentPath = experimentPath; return this; } - public String getDataGranularityUnit() { - return dataGranularityUnit; + public String getExperimentPath() { + return experimentPath; } - public CreateForecastingExperimentRequest setExperimentPath(String experimentPath) { - this.experimentPath = experimentPath; + public CreateForecastingExperimentRequest setForecastGranularity(String forecastGranularity) { + this.forecastGranularity = forecastGranularity; return this; } - public String getExperimentPath() { - return experimentPath; + public String getForecastGranularity() { + return forecastGranularity; } public CreateForecastingExperimentRequest setForecastHorizon(Long forecastHorizon) { @@ -274,9 +254,8 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; CreateForecastingExperimentRequest that = (CreateForecastingExperimentRequest) o; return Objects.equals(customWeightsColumn, that.customWeightsColumn) - && Objects.equals(dataGranularityQuantity, that.dataGranularityQuantity) - && Objects.equals(dataGranularityUnit, that.dataGranularityUnit) && Objects.equals(experimentPath, that.experimentPath) + && Objects.equals(forecastGranularity, that.forecastGranularity) && Objects.equals(forecastHorizon, that.forecastHorizon) && Objects.equals(holidayRegions, that.holidayRegions) && Objects.equals(maxRuntime, that.maxRuntime) @@ -295,9 +274,8 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( customWeightsColumn, - dataGranularityQuantity, - dataGranularityUnit, experimentPath, + forecastGranularity, forecastHorizon, holidayRegions, maxRuntime, @@ -316,9 +294,8 @@ public int hashCode() { public String toString() { return new ToStringer(CreateForecastingExperimentRequest.class) .add("customWeightsColumn", customWeightsColumn) - .add("dataGranularityQuantity", dataGranularityQuantity) - .add("dataGranularityUnit", dataGranularityUnit) .add("experimentPath", experimentPath) + .add("forecastGranularity", forecastGranularity) .add("forecastHorizon", forecastHorizon) .add("holidayRegions", holidayRegions) .add("maxRuntime", maxRuntime) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingAPI.java index e080315b6..ced569e1f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingAPI.java @@ -82,14 +82,14 @@ public Wait createEx String trainDataPath, String targetColumn, String timeColumn, - String dataGranularityUnit, + String forecastGranularity, long forecastHorizon) { return createExperiment( new CreateForecastingExperimentRequest() .setTrainDataPath(trainDataPath) .setTargetColumn(targetColumn) .setTimeColumn(timeColumn) - .setDataGranularityUnit(dataGranularityUnit) + .setForecastGranularity(forecastGranularity) .setForecastHorizon(forecastHorizon)); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OidcFederationPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OidcFederationPolicy.java index 390d2d234..c6c8453cf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OidcFederationPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/OidcFederationPolicy.java @@ -35,6 +35,16 @@ public class OidcFederationPolicy { @JsonProperty("jwks_json") private String jwksJson; + /** + * URL of the public keys used to validate the signature of federated tokens, in JWKS format. Most + * use cases should not need to specify this field. If jwks_uri and jwks_json are both unspecified + * (recommended), Databricks automatically fetches the public keys from your issuer’s well known + * endpoint. Databricks strongly recommends relying on your issuer’s well known endpoint for + * discovering public keys. + */ + @JsonProperty("jwks_uri") + private String jwksUri; + /** * The required token subject, as specified in the subject claim of federated tokens. Must be * specified for service principal federation policies. Must not be specified for account @@ -76,6 +86,15 @@ public String getJwksJson() { return jwksJson; } + public OidcFederationPolicy setJwksUri(String jwksUri) { + this.jwksUri = jwksUri; + return this; + } + + public String getJwksUri() { + return jwksUri; + } + public OidcFederationPolicy setSubject(String subject) { this.subject = subject; return this; @@ -102,13 +121,14 @@ public boolean equals(Object o) { return Objects.equals(audiences, that.audiences) && Objects.equals(issuer, that.issuer) && Objects.equals(jwksJson, that.jwksJson) + && Objects.equals(jwksUri, that.jwksUri) && Objects.equals(subject, that.subject) && Objects.equals(subjectClaim, that.subjectClaim); } @Override public int hashCode() { - return Objects.hash(audiences, issuer, jwksJson, subject, subjectClaim); + return Objects.hash(audiences, issuer, jwksJson, jwksUri, subject, subjectClaim); } @Override @@ -117,6 +137,7 @@ public String toString() { .add("audiences", audiences) .add("issuer", issuer) .add("jwksJson", jwksJson) + .add("jwksUri", jwksUri) .add("subject", subject) .add("subjectClaim", subjectClaim) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java index 5a6676c85..6188069dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java @@ -60,6 +60,10 @@ public class CreatePipeline { @JsonProperty("edition") private String edition; + /** Event log configuration for this pipeline */ + @JsonProperty("event_log") + private EventLogSpec eventLog; + /** Filters on which Pipeline packages to include in the deployed graph. */ @JsonProperty("filters") private Filters filters; @@ -74,7 +78,7 @@ public class CreatePipeline { /** * The configuration for a managed ingestion pipeline. These settings cannot be used with the - * 'libraries', 'target' or 'catalog' settings. + * 'libraries', 'schema', 'target', or 'catalog' settings. */ @JsonProperty("ingestion_definition") private IngestionPipelineDefinition ingestionDefinition; @@ -110,10 +114,7 @@ public class CreatePipeline { @JsonProperty("run_as") private RunAs runAs; - /** - * The default schema (database) where tables are read from or published to. The presence of this - * field implies that the pipeline is in direct publishing mode. - */ + /** The default schema (database) where tables are read from or published to. */ @JsonProperty("schema") private String schema; @@ -126,9 +127,9 @@ public class CreatePipeline { private String storage; /** - * Target schema (database) to add tables in this pipeline to. If not specified, no data is - * published to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify - * `catalog`. + * Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` + * must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is + * deprecated for pipeline creation in favor of the `schema` field. */ @JsonProperty("target") private String target; @@ -236,6 +237,15 @@ public String getEdition() { return edition; } + public CreatePipeline setEventLog(EventLogSpec eventLog) { + this.eventLog = eventLog; + return this; + } + + public EventLogSpec getEventLog() { + return eventLog; + } + public CreatePipeline setFilters(Filters filters) { this.filters = filters; return this; @@ -387,6 +397,7 @@ public boolean equals(Object o) { && Objects.equals(development, that.development) && Objects.equals(dryRun, that.dryRun) && Objects.equals(edition, that.edition) + && Objects.equals(eventLog, that.eventLog) && Objects.equals(filters, that.filters) && Objects.equals(gatewayDefinition, that.gatewayDefinition) && Objects.equals(id, that.id) @@ -418,6 +429,7 @@ public int hashCode() { development, dryRun, edition, + eventLog, filters, gatewayDefinition, id, @@ -449,6 +461,7 @@ public String toString() { .add("development", development) .add("dryRun", dryRun) .add("edition", edition) + .add("eventLog", eventLog) .add("filters", filters) .add("gatewayDefinition", gatewayDefinition) .add("id", id) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java index 6604d6864..4abb17864 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java @@ -58,6 +58,10 @@ public class EditPipeline { @JsonProperty("edition") private String edition; + /** Event log configuration for this pipeline */ + @JsonProperty("event_log") + private EventLogSpec eventLog; + /** * If present, the last-modified time of the pipeline settings before the edit. If the settings * were modified after that time, then the request will fail with a conflict. @@ -79,7 +83,7 @@ public class EditPipeline { /** * The configuration for a managed ingestion pipeline. These settings cannot be used with the - * 'libraries', 'target' or 'catalog' settings. + * 'libraries', 'schema', 'target', or 'catalog' settings. */ @JsonProperty("ingestion_definition") private IngestionPipelineDefinition ingestionDefinition; @@ -119,10 +123,7 @@ public class EditPipeline { @JsonProperty("run_as") private RunAs runAs; - /** - * The default schema (database) where tables are read from or published to. The presence of this - * field implies that the pipeline is in direct publishing mode. - */ + /** The default schema (database) where tables are read from or published to. */ @JsonProperty("schema") private String schema; @@ -135,9 +136,9 @@ public class EditPipeline { private String storage; /** - * Target schema (database) to add tables in this pipeline to. If not specified, no data is - * published to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify - * `catalog`. + * Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` + * must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is + * deprecated for pipeline creation in favor of the `schema` field. */ @JsonProperty("target") private String target; @@ -236,6 +237,15 @@ public String getEdition() { return edition; } + public EditPipeline setEventLog(EventLogSpec eventLog) { + this.eventLog = eventLog; + return this; + } + + public EventLogSpec getEventLog() { + return eventLog; + } + public EditPipeline setExpectedLastModified(Long expectedLastModified) { this.expectedLastModified = expectedLastModified; return this; @@ -404,6 +414,7 @@ public boolean equals(Object o) { && Objects.equals(deployment, that.deployment) && Objects.equals(development, that.development) && Objects.equals(edition, that.edition) + && Objects.equals(eventLog, that.eventLog) && Objects.equals(expectedLastModified, that.expectedLastModified) && Objects.equals(filters, that.filters) && Objects.equals(gatewayDefinition, that.gatewayDefinition) @@ -436,6 +447,7 @@ public int hashCode() { deployment, development, edition, + eventLog, expectedLastModified, filters, gatewayDefinition, @@ -468,6 +480,7 @@ public String toString() { .add("deployment", deployment) .add("development", development) .add("edition", edition) + .add("eventLog", eventLog) .add("expectedLastModified", expectedLastModified) .add("filters", filters) .add("gatewayDefinition", gatewayDefinition) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLogSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLogSpec.java new file mode 100755 index 000000000..9bf0d882b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLogSpec.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Configurable event log parameters. */ +@Generated +public class EventLogSpec { + /** The UC catalog the event log is published under. */ + @JsonProperty("catalog") + private String catalog; + + /** The name the event log is published to in UC. */ + @JsonProperty("name") + private String name; + + /** The UC schema the event log is published under. */ + @JsonProperty("schema") + private String schema; + + public EventLogSpec setCatalog(String catalog) { + this.catalog = catalog; + return this; + } + + public String getCatalog() { + return catalog; + } + + public EventLogSpec setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public EventLogSpec setSchema(String schema) { + this.schema = schema; + return this; + } + + public String getSchema() { + return schema; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EventLogSpec that = (EventLogSpec) o; + return Objects.equals(catalog, that.catalog) + && Objects.equals(name, that.name) + && Objects.equals(schema, that.schema); + } + + @Override + public int hashCode() { + return Objects.hash(catalog, name, schema); + } + + @Override + public String toString() { + return new ToStringer(EventLogSpec.class) + .add("catalog", catalog) + .add("name", name) + .add("schema", schema) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java index 26aecddf5..a20b36e95 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java @@ -52,6 +52,10 @@ public class PipelineSpec { @JsonProperty("edition") private String edition; + /** Event log configuration for this pipeline */ + @JsonProperty("event_log") + private EventLogSpec eventLog; + /** Filters on which Pipeline packages to include in the deployed graph. */ @JsonProperty("filters") private Filters filters; @@ -66,7 +70,7 @@ public class PipelineSpec { /** * The configuration for a managed ingestion pipeline. These settings cannot be used with the - * 'libraries', 'target' or 'catalog' settings. + * 'libraries', 'schema', 'target', or 'catalog' settings. */ @JsonProperty("ingestion_definition") private IngestionPipelineDefinition ingestionDefinition; @@ -91,10 +95,7 @@ public class PipelineSpec { @JsonProperty("restart_window") private RestartWindow restartWindow; - /** - * The default schema (database) where tables are read from or published to. The presence of this - * field implies that the pipeline is in direct publishing mode. - */ + /** The default schema (database) where tables are read from or published to. */ @JsonProperty("schema") private String schema; @@ -107,9 +108,9 @@ public class PipelineSpec { private String storage; /** - * Target schema (database) to add tables in this pipeline to. If not specified, no data is - * published to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify - * `catalog`. + * Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` + * must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is + * deprecated for pipeline creation in favor of the `schema` field. */ @JsonProperty("target") private String target; @@ -199,6 +200,15 @@ public String getEdition() { return edition; } + public PipelineSpec setEventLog(EventLogSpec eventLog) { + this.eventLog = eventLog; + return this; + } + + public EventLogSpec getEventLog() { + return eventLog; + } + public PipelineSpec setFilters(Filters filters) { this.filters = filters; return this; @@ -339,6 +349,7 @@ public boolean equals(Object o) { && Objects.equals(deployment, that.deployment) && Objects.equals(development, that.development) && Objects.equals(edition, that.edition) + && Objects.equals(eventLog, that.eventLog) && Objects.equals(filters, that.filters) && Objects.equals(gatewayDefinition, that.gatewayDefinition) && Objects.equals(id, that.id) @@ -367,6 +378,7 @@ public int hashCode() { deployment, development, edition, + eventLog, filters, gatewayDefinition, id, @@ -395,6 +407,7 @@ public String toString() { .add("deployment", deployment) .add("development", development) .add("edition", edition) + .add("eventLog", eventLog) .add("filters", filters) .add("gatewayDefinition", gatewayDefinition) .add("id", id) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfig.java index 9267fb173..6c957db0a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfig.java @@ -10,6 +10,13 @@ @Generated public class AiGatewayConfig { + /** + * Configuration for traffic fallback which auto fallbacks to other served entities if the request + * to a served entity fails with certain error codes, to increase availability. + */ + @JsonProperty("fallback_config") + private FallbackConfig fallbackConfig; + /** * Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and * responses. @@ -35,6 +42,15 @@ public class AiGatewayConfig { @JsonProperty("usage_tracking_config") private AiGatewayUsageTrackingConfig usageTrackingConfig; + public AiGatewayConfig setFallbackConfig(FallbackConfig fallbackConfig) { + this.fallbackConfig = fallbackConfig; + return this; + } + + public FallbackConfig getFallbackConfig() { + return fallbackConfig; + } + public AiGatewayConfig setGuardrails(AiGatewayGuardrails guardrails) { this.guardrails = guardrails; return this; @@ -77,7 +93,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AiGatewayConfig that = (AiGatewayConfig) o; - return Objects.equals(guardrails, that.guardrails) + return Objects.equals(fallbackConfig, that.fallbackConfig) + && Objects.equals(guardrails, that.guardrails) && Objects.equals(inferenceTableConfig, that.inferenceTableConfig) && Objects.equals(rateLimits, that.rateLimits) && Objects.equals(usageTrackingConfig, that.usageTrackingConfig); @@ -85,12 +102,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(guardrails, inferenceTableConfig, rateLimits, usageTrackingConfig); + return Objects.hash( + fallbackConfig, guardrails, inferenceTableConfig, rateLimits, usageTrackingConfig); } @Override public String toString() { return new ToStringer(AiGatewayConfig.class) + .add("fallbackConfig", fallbackConfig) .add("guardrails", guardrails) .add("inferenceTableConfig", inferenceTableConfig) .add("rateLimits", rateLimits) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ApiKeyAuth.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ApiKeyAuth.java new file mode 100755 index 000000000..0a87654f7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ApiKeyAuth.java @@ -0,0 +1,80 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ApiKeyAuth { + /** The name of the API key parameter used for authentication. */ + @JsonProperty("key") + private String key; + + /** + * The Databricks secret key reference for an API Key. If you prefer to paste your token directly, + * see `value_plaintext`. + */ + @JsonProperty("value") + private String value; + + /** + * The API Key provided as a plaintext string. If you prefer to reference your token using + * Databricks Secrets, see `value`. + */ + @JsonProperty("value_plaintext") + private String valuePlaintext; + + public ApiKeyAuth setKey(String key) { + this.key = key; + return this; + } + + public String getKey() { + return key; + } + + public ApiKeyAuth setValue(String value) { + this.value = value; + return this; + } + + public String getValue() { + return value; + } + + public ApiKeyAuth setValuePlaintext(String valuePlaintext) { + this.valuePlaintext = valuePlaintext; + return this; + } + + public String getValuePlaintext() { + return valuePlaintext; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ApiKeyAuth that = (ApiKeyAuth) o; + return Objects.equals(key, that.key) + && Objects.equals(value, that.value) + && Objects.equals(valuePlaintext, that.valuePlaintext); + } + + @Override + public int hashCode() { + return Objects.hash(key, value, valuePlaintext); + } + + @Override + public String toString() { + return new ToStringer(ApiKeyAuth.class) + .add("key", key) + .add("value", value) + .add("valuePlaintext", valuePlaintext) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BearerTokenAuth.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BearerTokenAuth.java new file mode 100755 index 000000000..8f1db2a3b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BearerTokenAuth.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class BearerTokenAuth { + /** + * The Databricks secret key reference for a token. If you prefer to paste your token directly, + * see `token_plaintext`. + */ + @JsonProperty("token") + private String token; + + /** + * The token provided as a plaintext string. If you prefer to reference your token using + * Databricks Secrets, see `token`. + */ + @JsonProperty("token_plaintext") + private String tokenPlaintext; + + public BearerTokenAuth setToken(String token) { + this.token = token; + return this; + } + + public String getToken() { + return token; + } + + public BearerTokenAuth setTokenPlaintext(String tokenPlaintext) { + this.tokenPlaintext = tokenPlaintext; + return this; + } + + public String getTokenPlaintext() { + return tokenPlaintext; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BearerTokenAuth that = (BearerTokenAuth) o; + return Objects.equals(token, that.token) && Objects.equals(tokenPlaintext, that.tokenPlaintext); + } + + @Override + public int hashCode() { + return Objects.hash(token, tokenPlaintext); + } + + @Override + public String toString() { + return new ToStringer(BearerTokenAuth.class) + .add("token", token) + .add("tokenPlaintext", tokenPlaintext) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CustomProviderConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CustomProviderConfig.java new file mode 100755 index 000000000..715bf402a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/CustomProviderConfig.java @@ -0,0 +1,81 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Configs needed to create a custom provider model route. */ +@Generated +public class CustomProviderConfig { + /** + * This is a field to provide API key authentication for the custom provider API. You can only + * specify one authentication method. + */ + @JsonProperty("api_key_auth") + private ApiKeyAuth apiKeyAuth; + + /** + * This is a field to provide bearer token authentication for the custom provider API. You can + * only specify one authentication method. + */ + @JsonProperty("bearer_token_auth") + private BearerTokenAuth bearerTokenAuth; + + /** This is a field to provide the URL of the custom provider API. */ + @JsonProperty("custom_provider_url") + private String customProviderUrl; + + public CustomProviderConfig setApiKeyAuth(ApiKeyAuth apiKeyAuth) { + this.apiKeyAuth = apiKeyAuth; + return this; + } + + public ApiKeyAuth getApiKeyAuth() { + return apiKeyAuth; + } + + public CustomProviderConfig setBearerTokenAuth(BearerTokenAuth bearerTokenAuth) { + this.bearerTokenAuth = bearerTokenAuth; + return this; + } + + public BearerTokenAuth getBearerTokenAuth() { + return bearerTokenAuth; + } + + public CustomProviderConfig setCustomProviderUrl(String customProviderUrl) { + this.customProviderUrl = customProviderUrl; + return this; + } + + public String getCustomProviderUrl() { + return customProviderUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CustomProviderConfig that = (CustomProviderConfig) o; + return Objects.equals(apiKeyAuth, that.apiKeyAuth) + && Objects.equals(bearerTokenAuth, that.bearerTokenAuth) + && Objects.equals(customProviderUrl, that.customProviderUrl); + } + + @Override + public int hashCode() { + return Objects.hash(apiKeyAuth, bearerTokenAuth, customProviderUrl); + } + + @Override + public String toString() { + return new ToStringer(CustomProviderConfig.class) + .add("apiKeyAuth", apiKeyAuth) + .add("bearerTokenAuth", bearerTokenAuth) + .add("customProviderUrl", customProviderUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java index d8e6c7f7d..4c013e026 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java @@ -25,6 +25,10 @@ public class ExternalModel { @JsonProperty("cohere_config") private CohereConfig cohereConfig; + /** Custom Provider Config. Only required if the provider is 'custom'. */ + @JsonProperty("custom_provider_config") + private CustomProviderConfig customProviderConfig; + /** * Databricks Model Serving Config. Only required if the provider is 'databricks-model-serving'. */ @@ -95,6 +99,15 @@ public CohereConfig getCohereConfig() { return cohereConfig; } + public ExternalModel setCustomProviderConfig(CustomProviderConfig customProviderConfig) { + this.customProviderConfig = customProviderConfig; + return this; + } + + public CustomProviderConfig getCustomProviderConfig() { + return customProviderConfig; + } + public ExternalModel setDatabricksModelServingConfig( DatabricksModelServingConfig databricksModelServingConfig) { this.databricksModelServingConfig = databricksModelServingConfig; @@ -169,6 +182,7 @@ public boolean equals(Object o) { && Objects.equals(amazonBedrockConfig, that.amazonBedrockConfig) && Objects.equals(anthropicConfig, that.anthropicConfig) && Objects.equals(cohereConfig, that.cohereConfig) + && Objects.equals(customProviderConfig, that.customProviderConfig) && Objects.equals(databricksModelServingConfig, that.databricksModelServingConfig) && Objects.equals(googleCloudVertexAiConfig, that.googleCloudVertexAiConfig) && Objects.equals(name, that.name) @@ -185,6 +199,7 @@ public int hashCode() { amazonBedrockConfig, anthropicConfig, cohereConfig, + customProviderConfig, databricksModelServingConfig, googleCloudVertexAiConfig, name, @@ -201,6 +216,7 @@ public String toString() { .add("amazonBedrockConfig", amazonBedrockConfig) .add("anthropicConfig", anthropicConfig) .add("cohereConfig", cohereConfig) + .add("customProviderConfig", customProviderConfig) .add("databricksModelServingConfig", databricksModelServingConfig) .add("googleCloudVertexAiConfig", googleCloudVertexAiConfig) .add("name", name) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelProvider.java index 0d3553cb6..6c68b40a8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelProvider.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelProvider.java @@ -19,6 +19,9 @@ public enum ExternalModelProvider { @JsonProperty("cohere") COHERE, + @JsonProperty("custom") + CUSTOM, + @JsonProperty("databricks-model-serving") DATABRICKS_MODEL_SERVING, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FallbackConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FallbackConfig.java new file mode 100755 index 000000000..2d521100a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/FallbackConfig.java @@ -0,0 +1,48 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.serving; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class FallbackConfig { + /** + * Whether to enable traffic fallback. When a served entity in the serving endpoint returns + * specific error codes (e.g. 500), the request will automatically be round-robin attempted with + * other served entities in the same endpoint, following the order of served entity list, until a + * successful response is returned. If all attempts fail, return the last response with the error + * code. + */ + @JsonProperty("enabled") + private Boolean enabled; + + public FallbackConfig setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FallbackConfig that = (FallbackConfig) o; + return Objects.equals(enabled, that.enabled); + } + + @Override + public int hashCode() { + return Objects.hash(enabled); + } + + @Override + public String toString() { + return new ToStringer(FallbackConfig.class).add("enabled", enabled).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequest.java index 84d652b28..f0f3be4d0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequest.java @@ -11,6 +11,13 @@ @Generated public class PutAiGatewayRequest { + /** + * Configuration for traffic fallback which auto fallbacks to other served entities if the request + * to a served entity fails with certain error codes, to increase availability. + */ + @JsonProperty("fallback_config") + private FallbackConfig fallbackConfig; + /** * Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and * responses. @@ -39,6 +46,15 @@ public class PutAiGatewayRequest { @JsonProperty("usage_tracking_config") private AiGatewayUsageTrackingConfig usageTrackingConfig; + public PutAiGatewayRequest setFallbackConfig(FallbackConfig fallbackConfig) { + this.fallbackConfig = fallbackConfig; + return this; + } + + public FallbackConfig getFallbackConfig() { + return fallbackConfig; + } + public PutAiGatewayRequest setGuardrails(AiGatewayGuardrails guardrails) { this.guardrails = guardrails; return this; @@ -91,7 +107,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PutAiGatewayRequest that = (PutAiGatewayRequest) o; - return Objects.equals(guardrails, that.guardrails) + return Objects.equals(fallbackConfig, that.fallbackConfig) + && Objects.equals(guardrails, that.guardrails) && Objects.equals(inferenceTableConfig, that.inferenceTableConfig) && Objects.equals(name, that.name) && Objects.equals(rateLimits, that.rateLimits) @@ -100,12 +117,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(guardrails, inferenceTableConfig, name, rateLimits, usageTrackingConfig); + return Objects.hash( + fallbackConfig, guardrails, inferenceTableConfig, name, rateLimits, usageTrackingConfig); } @Override public String toString() { return new ToStringer(PutAiGatewayRequest.class) + .add("fallbackConfig", fallbackConfig) .add("guardrails", guardrails) .add("inferenceTableConfig", inferenceTableConfig) .add("name", name) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponse.java index 021c964c7..9b1a9c166 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponse.java @@ -10,6 +10,13 @@ @Generated public class PutAiGatewayResponse { + /** + * Configuration for traffic fallback which auto fallbacks to other served entities if the request + * to a served entity fails with certain error codes, to increase availability. + */ + @JsonProperty("fallback_config") + private FallbackConfig fallbackConfig; + /** * Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and * responses. @@ -35,6 +42,15 @@ public class PutAiGatewayResponse { @JsonProperty("usage_tracking_config") private AiGatewayUsageTrackingConfig usageTrackingConfig; + public PutAiGatewayResponse setFallbackConfig(FallbackConfig fallbackConfig) { + this.fallbackConfig = fallbackConfig; + return this; + } + + public FallbackConfig getFallbackConfig() { + return fallbackConfig; + } + public PutAiGatewayResponse setGuardrails(AiGatewayGuardrails guardrails) { this.guardrails = guardrails; return this; @@ -78,7 +94,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PutAiGatewayResponse that = (PutAiGatewayResponse) o; - return Objects.equals(guardrails, that.guardrails) + return Objects.equals(fallbackConfig, that.fallbackConfig) + && Objects.equals(guardrails, that.guardrails) && Objects.equals(inferenceTableConfig, that.inferenceTableConfig) && Objects.equals(rateLimits, that.rateLimits) && Objects.equals(usageTrackingConfig, that.usageTrackingConfig); @@ -86,12 +103,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(guardrails, inferenceTableConfig, rateLimits, usageTrackingConfig); + return Objects.hash( + fallbackConfig, guardrails, inferenceTableConfig, rateLimits, usageTrackingConfig); } @Override public String toString() { return new ToStringer(PutAiGatewayResponse.class) + .add("fallbackConfig", fallbackConfig) .add("guardrails", guardrails) .add("inferenceTableConfig", inferenceTableConfig) .add("rateLimits", rateLimits) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadType.java index 64e5315e9..04be5a3d2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServedModelInputWorkloadType.java @@ -4,6 +4,7 @@ import com.databricks.sdk.support.Generated; +/** Please keep this in sync with with workload types in InferenceEndpointEntities.scala */ @Generated public enum ServedModelInputWorkloadType { CPU, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingModelWorkloadType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingModelWorkloadType.java index 7c67cd562..63d031e96 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingModelWorkloadType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingModelWorkloadType.java @@ -4,6 +4,7 @@ import com.databricks.sdk.support.Generated; +/** Please keep this in sync with with workload types in InferenceEndpointEntities.scala */ @Generated public enum ServingModelWorkloadType { CPU, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenType.java index cbd1e3bcd..03b9635ab 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenType.java @@ -9,5 +9,7 @@ public enum TokenType { ARCLIGHT_AZURE_EXCHANGE_TOKEN, ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY, + ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN, + ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY, AZURE_ACTIVE_DIRECTORY_TOKEN, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Function.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunction.java similarity index 82% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Function.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunction.java index e68e2a31b..6f59535c9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Function.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/DeltaSharingFunction.java @@ -9,7 +9,7 @@ import java.util.Objects; @Generated -public class Function { +public class DeltaSharingFunction { /** The aliass of registered model. */ @JsonProperty("aliases") private Collection aliases; @@ -74,7 +74,7 @@ public class Function { @JsonProperty("tags") private Collection tags; - public Function setAliases(Collection aliases) { + public DeltaSharingFunction setAliases(Collection aliases) { this.aliases = aliases; return this; } @@ -83,7 +83,7 @@ public Collection getAliases() { return aliases; } - public Function setComment(String comment) { + public DeltaSharingFunction setComment(String comment) { this.comment = comment; return this; } @@ -92,7 +92,7 @@ public String getComment() { return comment; } - public Function setDataType(ColumnTypeName dataType) { + public DeltaSharingFunction setDataType(ColumnTypeName dataType) { this.dataType = dataType; return this; } @@ -101,7 +101,7 @@ public ColumnTypeName getDataType() { return dataType; } - public Function setDependencyList(DeltaSharingDependencyList dependencyList) { + public DeltaSharingFunction setDependencyList(DeltaSharingDependencyList dependencyList) { this.dependencyList = dependencyList; return this; } @@ -110,7 +110,7 @@ public DeltaSharingDependencyList getDependencyList() { return dependencyList; } - public Function setFullDataType(String fullDataType) { + public DeltaSharingFunction setFullDataType(String fullDataType) { this.fullDataType = fullDataType; return this; } @@ -119,7 +119,7 @@ public String getFullDataType() { return fullDataType; } - public Function setId(String id) { + public DeltaSharingFunction setId(String id) { this.id = id; return this; } @@ -128,7 +128,7 @@ public String getId() { return id; } - public Function setInputParams(FunctionParameterInfos inputParams) { + public DeltaSharingFunction setInputParams(FunctionParameterInfos inputParams) { this.inputParams = inputParams; return this; } @@ -137,7 +137,7 @@ public FunctionParameterInfos getInputParams() { return inputParams; } - public Function setName(String name) { + public DeltaSharingFunction setName(String name) { this.name = name; return this; } @@ -146,7 +146,7 @@ public String getName() { return name; } - public Function setProperties(String properties) { + public DeltaSharingFunction setProperties(String properties) { this.properties = properties; return this; } @@ -155,7 +155,7 @@ public String getProperties() { return properties; } - public Function setRoutineDefinition(String routineDefinition) { + public DeltaSharingFunction setRoutineDefinition(String routineDefinition) { this.routineDefinition = routineDefinition; return this; } @@ -164,7 +164,7 @@ public String getRoutineDefinition() { return routineDefinition; } - public Function setSchema(String schema) { + public DeltaSharingFunction setSchema(String schema) { this.schema = schema; return this; } @@ -173,7 +173,7 @@ public String getSchema() { return schema; } - public Function setSecurableKind(SharedSecurableKind securableKind) { + public DeltaSharingFunction setSecurableKind(SharedSecurableKind securableKind) { this.securableKind = securableKind; return this; } @@ -182,7 +182,7 @@ public SharedSecurableKind getSecurableKind() { return securableKind; } - public Function setShare(String share) { + public DeltaSharingFunction setShare(String share) { this.share = share; return this; } @@ -191,7 +191,7 @@ public String getShare() { return share; } - public Function setShareId(String shareId) { + public DeltaSharingFunction setShareId(String shareId) { this.shareId = shareId; return this; } @@ -200,7 +200,7 @@ public String getShareId() { return shareId; } - public Function setStorageLocation(String storageLocation) { + public DeltaSharingFunction setStorageLocation(String storageLocation) { this.storageLocation = storageLocation; return this; } @@ -209,7 +209,8 @@ public String getStorageLocation() { return storageLocation; } - public Function setTags(Collection tags) { + public DeltaSharingFunction setTags( + Collection tags) { this.tags = tags; return this; } @@ -222,7 +223,7 @@ public Collection getTags() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - Function that = (Function) o; + DeltaSharingFunction that = (DeltaSharingFunction) o; return Objects.equals(aliases, that.aliases) && Objects.equals(comment, that.comment) && Objects.equals(dataType, that.dataType) @@ -264,7 +265,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(Function.class) + return new ToStringer(DeltaSharingFunction.class) .add("aliases", aliases) .add("comment", comment) .add("dataType", dataType) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponse.java index 45252078d..aca64ba2d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ListProviderShareAssetsResponse.java @@ -13,7 +13,7 @@ public class ListProviderShareAssetsResponse { /** The list of functions in the share. */ @JsonProperty("functions") - private Collection functions; + private Collection functions; /** The list of notebooks in the share. */ @JsonProperty("notebooks") @@ -27,12 +27,12 @@ public class ListProviderShareAssetsResponse { @JsonProperty("volumes") private Collection volumes; - public ListProviderShareAssetsResponse setFunctions(Collection functions) { + public ListProviderShareAssetsResponse setFunctions(Collection functions) { this.functions = functions; return this; } - public Collection getFunctions() { + public Collection getFunctions() { return functions; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySource.java new file mode 100755 index 000000000..762b2f47a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySource.java @@ -0,0 +1,135 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ExternalQuerySource { + /** The canonical identifier for this SQL alert */ + @JsonProperty("alert_id") + private String alertId; + + /** The canonical identifier for this Lakeview dashboard */ + @JsonProperty("dashboard_id") + private String dashboardId; + + /** The canonical identifier for this Genie space */ + @JsonProperty("genie_space_id") + private String genieSpaceId; + + /** */ + @JsonProperty("job_info") + private ExternalQuerySourceJobInfo jobInfo; + + /** The canonical identifier for this legacy dashboard */ + @JsonProperty("legacy_dashboard_id") + private String legacyDashboardId; + + /** The canonical identifier for this notebook */ + @JsonProperty("notebook_id") + private String notebookId; + + /** The canonical identifier for this SQL query */ + @JsonProperty("sql_query_id") + private String sqlQueryId; + + public ExternalQuerySource setAlertId(String alertId) { + this.alertId = alertId; + return this; + } + + public String getAlertId() { + return alertId; + } + + public ExternalQuerySource setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public ExternalQuerySource setGenieSpaceId(String genieSpaceId) { + this.genieSpaceId = genieSpaceId; + return this; + } + + public String getGenieSpaceId() { + return genieSpaceId; + } + + public ExternalQuerySource setJobInfo(ExternalQuerySourceJobInfo jobInfo) { + this.jobInfo = jobInfo; + return this; + } + + public ExternalQuerySourceJobInfo getJobInfo() { + return jobInfo; + } + + public ExternalQuerySource setLegacyDashboardId(String legacyDashboardId) { + this.legacyDashboardId = legacyDashboardId; + return this; + } + + public String getLegacyDashboardId() { + return legacyDashboardId; + } + + public ExternalQuerySource setNotebookId(String notebookId) { + this.notebookId = notebookId; + return this; + } + + public String getNotebookId() { + return notebookId; + } + + public ExternalQuerySource setSqlQueryId(String sqlQueryId) { + this.sqlQueryId = sqlQueryId; + return this; + } + + public String getSqlQueryId() { + return sqlQueryId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalQuerySource that = (ExternalQuerySource) o; + return Objects.equals(alertId, that.alertId) + && Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(genieSpaceId, that.genieSpaceId) + && Objects.equals(jobInfo, that.jobInfo) + && Objects.equals(legacyDashboardId, that.legacyDashboardId) + && Objects.equals(notebookId, that.notebookId) + && Objects.equals(sqlQueryId, that.sqlQueryId); + } + + @Override + public int hashCode() { + return Objects.hash( + alertId, dashboardId, genieSpaceId, jobInfo, legacyDashboardId, notebookId, sqlQueryId); + } + + @Override + public String toString() { + return new ToStringer(ExternalQuerySource.class) + .add("alertId", alertId) + .add("dashboardId", dashboardId) + .add("genieSpaceId", genieSpaceId) + .add("jobInfo", jobInfo) + .add("legacyDashboardId", legacyDashboardId) + .add("notebookId", notebookId) + .add("sqlQueryId", sqlQueryId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySourceJobInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySourceJobInfo.java new file mode 100755 index 000000000..92a351a14 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ExternalQuerySourceJobInfo.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sql; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ExternalQuerySourceJobInfo { + /** The canonical identifier for this job. */ + @JsonProperty("job_id") + private String jobId; + + /** The canonical identifier of the run. This ID is unique across all runs of all jobs. */ + @JsonProperty("job_run_id") + private String jobRunId; + + /** The canonical identifier of the task run. */ + @JsonProperty("job_task_run_id") + private String jobTaskRunId; + + public ExternalQuerySourceJobInfo setJobId(String jobId) { + this.jobId = jobId; + return this; + } + + public String getJobId() { + return jobId; + } + + public ExternalQuerySourceJobInfo setJobRunId(String jobRunId) { + this.jobRunId = jobRunId; + return this; + } + + public String getJobRunId() { + return jobRunId; + } + + public ExternalQuerySourceJobInfo setJobTaskRunId(String jobTaskRunId) { + this.jobTaskRunId = jobTaskRunId; + return this; + } + + public String getJobTaskRunId() { + return jobTaskRunId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExternalQuerySourceJobInfo that = (ExternalQuerySourceJobInfo) o; + return Objects.equals(jobId, that.jobId) + && Objects.equals(jobRunId, that.jobRunId) + && Objects.equals(jobTaskRunId, that.jobTaskRunId); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, jobRunId, jobTaskRunId); + } + + @Override + public String toString() { + return new ToStringer(ExternalQuerySourceJobInfo.class) + .add("jobId", jobId) + .add("jobRunId", jobRunId) + .add("jobTaskRunId", jobTaskRunId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java index 21f5bdede..86de0c875 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java @@ -61,6 +61,14 @@ public class QueryInfo { @JsonProperty("query_id") private String queryId; + /** + * A struct that contains key-value pairs representing Databricks entities that were involved in + * the execution of this statement, such as jobs, notebooks, or dashboards. This field only + * records Databricks entities. + */ + @JsonProperty("query_source") + private ExternalQuerySource querySource; + /** The time the query started. */ @JsonProperty("query_start_time_ms") private Long queryStartTimeMs; @@ -220,6 +228,15 @@ public String getQueryId() { return queryId; } + public QueryInfo setQuerySource(ExternalQuerySource querySource) { + this.querySource = querySource; + return this; + } + + public ExternalQuerySource getQuerySource() { + return querySource; + } + public QueryInfo setQueryStartTimeMs(Long queryStartTimeMs) { this.queryStartTimeMs = queryStartTimeMs; return this; @@ -319,6 +336,7 @@ public boolean equals(Object o) { && Objects.equals(plansState, that.plansState) && Objects.equals(queryEndTimeMs, that.queryEndTimeMs) && Objects.equals(queryId, that.queryId) + && Objects.equals(querySource, that.querySource) && Objects.equals(queryStartTimeMs, that.queryStartTimeMs) && Objects.equals(queryText, that.queryText) && Objects.equals(rowsProduced, that.rowsProduced) @@ -346,6 +364,7 @@ public int hashCode() { plansState, queryEndTimeMs, queryId, + querySource, queryStartTimeMs, queryText, rowsProduced, @@ -373,6 +392,7 @@ public String toString() { .add("plansState", plansState) .add("queryEndTimeMs", queryEndTimeMs) .add("queryId", queryId) + .add("querySource", querySource) .add("queryStartTimeMs", queryStartTimeMs) .add("queryText", queryText) .add("rowsProduced", rowsProduced)