diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 90102e0a3..153783450 100755 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -bbafbee4a953dd371abfc51ab8806d33eba2b734 \ No newline at end of file +11ae6f9d98f0d0838a5e53c27032f178fecc4ee0 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index c56fb04a2..6fc8f76f1 100755 --- a/.gitattributes +++ b/.gitattributes @@ -66,6 +66,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissions databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResource.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceApp.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceAppAppPermission.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceDatabase.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceDatabaseDatabasePermission.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceExperiment.java linguist-generated=true @@ -85,6 +86,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceSql databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurable.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurablePermission.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceUcSecurableUcSecurableType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppThumbnail.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdate.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppUpdateUpdateStatusUpdateState.java linguist-generated=true @@ -108,6 +110,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateSpaceReq databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CustomTemplate.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DatabricksServiceExceptionWithDetailsProto.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppThumbnailRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteCustomTemplateRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteSpaceOperation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteSpaceRequest.java linguist-generated=true @@ -144,6 +147,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StopAppRequest databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/TelemetryExportDestination.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UnityCatalog.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppThumbnailRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateCustomTemplateRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateSpaceOperation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateSpaceRequest.java linguist-generated=true @@ -261,6 +265,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleR databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsSqsQueue.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureActiveDirectoryToken.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureEncryptionSettings.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java linguist-generated=true @@ -370,6 +375,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePr databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnablePredictiveOptimization.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EncryptionDetails.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EncryptionSettings.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignmentsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignmentsImpl.java linguist-generated=true @@ -985,6 +991,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCon databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversationSummary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateConversationMessageRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateEvalRunRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateMessageCommentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateSpaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieDeleteConversationMessageRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieDeleteConversationRequest.java linguist-generated=true @@ -1011,6 +1018,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGet databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetQueryResultByAttachmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetSpaceRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationCommentsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationCommentsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationMessagesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationMessagesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationsRequest.java linguist-generated=true @@ -1019,9 +1028,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieLis databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalResultsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalRunsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListEvalRunsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListMessageCommentsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListMessageCommentsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListSpacesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessageComment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieResultMetadata.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSendMessageFeedbackRequest.java linguist-generated=true @@ -1068,6 +1080,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscrip databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUser.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachmentPurpose.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Thought.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ThoughtType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java linguist-generated=true @@ -1880,10 +1894,12 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreAPI. databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureStoreService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FeatureTag.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FieldDefinition.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FileInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FinalizeLoggedModelResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FirstFunction.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FlatSchema.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperiment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ForecastingExperimentState.java linguist-generated=true @@ -2012,6 +2028,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransition databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RejectTransitionRequestResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RenameModelResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RequestSource.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreExperiment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRun.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RestoreRuns.java linguist-generated=true @@ -2022,6 +2039,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfo.java lin databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInfoStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunInputs.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RunTag.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ScalarDataType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SchemaConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperiments.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SearchExperimentsResponse.java linguist-generated=true @@ -2146,11 +2164,14 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateAccoun databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateCustomAppIntegration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdatePublishedAppIntegration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/UpdateServicePrincipalFederationPolicyRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequestResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/AutoFullRefreshPolicy.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CloneMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ClonePipelineRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ClonePipelineResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectionParameters.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectorOptions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectorType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipelineResponse.java linguist-generated=true @@ -2164,6 +2185,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipel databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ErrorDetail.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLevel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EventLogSpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileFilter.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileIngestionOptions.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileIngestionOptionsFileFormat.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileIngestionOptionsSchemaEvolutionMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileLibrary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Filters.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelinePermissionLevelsRequest.java linguist-generated=true @@ -2174,6 +2199,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipeli databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponseHealth.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetUpdateResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleAdsOptions.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleDriveOptions.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleDriveOptionsGoogleDriveEntityType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionGatewayPipelineDefinition.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java linguist-generated=true @@ -2228,6 +2256,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RunAs.jav databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Sequencing.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SerializedException.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SharepointOptions.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SharepointOptionsSharepointEntityType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SourceCatalogConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SourceConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StackFrame.java linguist-generated=true @@ -2238,6 +2268,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/StopReque databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfigScdType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TikTokAdsOptions.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TikTokAdsOptionsTikTokDataLevel.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TikTokAdsOptionsTikTokReportType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Truncation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TruncationTruncationDetail.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/UpdateInfo.java linguist-generated=true @@ -2635,13 +2668,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablem databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicy.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAppsDestination.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAuthentication.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAuthenticationIdentity.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAuthenticationIdentityPrincipalType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAuthenticationIdentityType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyIpRanges.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyLakebaseDestination.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicAccess.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicAccessRestrictionMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyPublicIngressRule.java linguist-generated=true @@ -2760,6 +2791,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTo databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/FetchIpAccessListResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GcpEndpoint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GenericWebhookConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessEnableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAccountIpAccessListRequest.java linguist-generated=true @@ -3352,6 +3384,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Endpoi databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetEndpointRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetIndexRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/IndexSubtype.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListEndpointsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ListIndexesRequest.java linguist-generated=true diff --git a/.github/workflows/next-changelog.yml b/.github/workflows/next-changelog.yml index b321edd6e..847aadb8e 100755 --- a/.github/workflows/next-changelog.yml +++ b/.github/workflows/next-changelog.yml @@ -4,16 +4,19 @@ name: Check for NEXT_CHANGELOG.md Changes on: # Use pull_request_target to have access to GitHub API pull_request_target: + types: [opened, synchronize, reopened, edited] jobs: check-next-changelog: + # Allow Dependabot PRs to pass without a changelog entry + if: github.actor != 'dependabot[bot]' runs-on: group: databricks-deco-testing-runner-group labels: ubuntu-latest-deco steps: - name: Checkout code - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0 + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Fetch list of changed files id: changed-files diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 8017d10a0..75cba1e67 100755 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -29,3 +29,34 @@ * Add `defaultBranch` field for `com.databricks.sdk.service.postgres.ProjectSpec`. * Add `defaultBranch` field for `com.databricks.sdk.service.postgres.ProjectStatus`. * Add `ingress` and `ingressDryRun` fields for `com.databricks.sdk.service.settings.AccountNetworkPolicy`. +* Add `deleteAppThumbnail()` and `updateAppThumbnail()` methods for `workspaceClient.apps()` service. +* Add `createMessageComment()`, `listConversationComments()` and `listMessageComments()` methods for `workspaceClient.genie()` service. +* Add `applyEnvironment()` method for `workspaceClient.pipelines()` service. +* Add `name` and `permission` fields for `com.databricks.sdk.service.apps.AppResourceApp`. +* Add `managedEncryptionSettings` field for `com.databricks.sdk.service.catalog.CatalogInfo`. +* Add `managedEncryptionSettings` field for `com.databricks.sdk.service.catalog.CreateCatalog`. +* Add `managedEncryptionSettings` field for `com.databricks.sdk.service.catalog.UpdateCatalog`. +* Add `comment` field for `com.databricks.sdk.service.dashboards.GenieFeedback`. +* Add `thoughts` field for `com.databricks.sdk.service.dashboards.GenieQueryAttachment`. +* Add `comment` field for `com.databricks.sdk.service.dashboards.GenieSendMessageFeedbackRequest`. +* Add `requestSource` field for `com.databricks.sdk.service.ml.DataSource`. +* Add `isOnline` field for `com.databricks.sdk.service.ml.MaterializedFeature`. +* Add `connectorOptions` field for `com.databricks.sdk.service.pipelines.SchemaSpec`. +* Add `connectorOptions` field for `com.databricks.sdk.service.pipelines.TableSpec`. +* Add `scopes` field for `com.databricks.sdk.service.settings.CreateOboTokenRequest`. +* Add `gcpEndpoint` field for `com.databricks.sdk.service.settings.CreatePrivateEndpointRule`. +* Add `scopes` field for `com.databricks.sdk.service.settings.CreateTokenRequest`. +* Add `gcpEndpoint` field for `com.databricks.sdk.service.settings.NccPrivateEndpointRule`. +* Add `gcpEndpoint` field for `com.databricks.sdk.service.settings.UpdatePrivateEndpointRule`. +* Add `sqlState` field for `com.databricks.sdk.service.sql.StatementStatus`. +* Add `usagePolicyId` field for `com.databricks.sdk.service.vectorsearch.CreateEndpoint`. +* Add `indexSubtype` field for `com.databricks.sdk.service.vectorsearch.CreateVectorIndexRequest`. +* Add `budgetPolicyId` field for `com.databricks.sdk.service.vectorsearch.EndpointInfo`. +* Add `indexSubtype` field for `com.databricks.sdk.service.vectorsearch.MiniVectorIndex`. +* Add `budgetPolicyId` field for `com.databricks.sdk.service.vectorsearch.PatchEndpointBudgetPolicyResponse`. +* Add `indexSubtype` field for `com.databricks.sdk.service.vectorsearch.VectorIndex`. +* Add `TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL_DELTASHARING` enum value for `com.databricks.sdk.service.catalog.SecurableKind`. +* Add `GOOGLE_DRIVE` enum value for `com.databricks.sdk.service.pipelines.IngestionSourceType`. +* Add `STORAGE_OPTIMIZED` enum value for `com.databricks.sdk.service.vectorsearch.EndpointType`. +* [Breaking] Remove `project` field for `com.databricks.sdk.service.postgres.SyncedTableSyncedTableSpec`. +* [Breaking] Remove `apps` and `lakebase` fields for `com.databricks.sdk.service.settings.CustomerFacingIngressNetworkPolicyRequestDestination`. \ No newline at end of file diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java index d750badd9..3b26a002b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java @@ -1142,6 +1142,7 @@ public WorkspaceClient getWorkspaceClient(Workspace workspace) { workspaceConfig.setWorkspaceId(String.valueOf(workspace.getWorkspaceId())); return new WorkspaceClient(workspaceConfig); } + // Traditional: use the deployment URL DatabricksConfig config = this.config.newWithWorkspaceHost(host); AzureUtils.getAzureWorkspaceResourceId(workspace).map(config::setAzureWorkspaceResourceId); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceApp.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceApp.java index 5a51792a0..94f288a2d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceApp.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceApp.java @@ -4,25 +4,55 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; @Generated public class AppResourceApp { + /** */ + @JsonProperty("name") + private String name; + + /** */ + @JsonProperty("permission") + private AppResourceAppAppPermission permission; + + public AppResourceApp setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public AppResourceApp setPermission(AppResourceAppAppPermission permission) { + this.permission = permission; + return this; + } + + public AppResourceAppAppPermission getPermission() { + return permission; + } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - return true; + AppResourceApp that = (AppResourceApp) o; + return Objects.equals(name, that.name) && Objects.equals(permission, that.permission); } @Override public int hashCode() { - return Objects.hash(); + return Objects.hash(name, permission); } @Override public String toString() { - return new ToStringer(AppResourceApp.class).toString(); + return new ToStringer(AppResourceApp.class) + .add("name", name) + .add("permission", permission) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceAppAppPermission.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceAppAppPermission.java new file mode 100755 index 000000000..2d26a070c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppResourceAppAppPermission.java @@ -0,0 +1,10 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AppResourceAppAppPermission { + CAN_USE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppThumbnail.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppThumbnail.java new file mode 100755 index 000000000..ef06affcc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppThumbnail.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** The thumbnail for an app. */ +@Generated +public class AppThumbnail { + /** The thumbnail image bytes. */ + @JsonProperty("thumbnail") + private String thumbnail; + + public AppThumbnail setThumbnail(String thumbnail) { + this.thumbnail = thumbnail; + return this; + } + + public String getThumbnail() { + return thumbnail; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AppThumbnail that = (AppThumbnail) o; + return Objects.equals(thumbnail, that.thumbnail); + } + + @Override + public int hashCode() { + return Objects.hash(thumbnail); + } + + @Override + public String toString() { + return new ToStringer(AppThumbnail.class).add("thumbnail", thumbnail).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java index ae0263216..b07bc80ea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java @@ -261,6 +261,15 @@ public App delete(DeleteAppRequest request) { return impl.delete(request); } + public void deleteAppThumbnail(String name) { + deleteAppThumbnail(new DeleteAppThumbnailRequest().setName(name)); + } + + /** Deletes the thumbnail for an app. */ + public void deleteAppThumbnail(DeleteAppThumbnailRequest request) { + impl.deleteAppThumbnail(request); + } + public DeleteSpaceOperation deleteSpace(String name) { return deleteSpace(new DeleteSpaceRequest().setName(name)); } @@ -421,6 +430,11 @@ public App update(UpdateAppRequest request) { return impl.update(request); } + /** Updates the thumbnail for an app. */ + public AppThumbnail updateAppThumbnail(UpdateAppThumbnailRequest request) { + return impl.updateAppThumbnail(request); + } + /** Updates the permissions on an app. Apps can inherit permissions from their root object. */ public AppPermissions updatePermissions(AppPermissionsRequest request) { return impl.updatePermissions(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java index 48c68405c..c2740591d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java @@ -87,6 +87,23 @@ public App delete(DeleteAppRequest request) { } } + @Override + public void deleteAppThumbnail(DeleteAppThumbnailRequest request) { + String path = String.format("/api/2.0/apps/%s/thumbnail", request.getName()); + try { + Request req = new Request("DELETE", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + apiClient.execute(req, Void.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public Operation deleteSpace(DeleteSpaceRequest request) { String path = String.format("/api/2.0/app-spaces/%s", request.getName()); @@ -367,6 +384,24 @@ public App update(UpdateAppRequest request) { } } + @Override + public AppThumbnail updateAppThumbnail(UpdateAppThumbnailRequest request) { + String path = String.format("/api/2.0/apps/%s/thumbnail", request.getName()); + try { + Request req = new Request("PATCH", path, apiClient.serialize(request)); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, AppThumbnail.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public AppPermissions updatePermissions(AppPermissionsRequest request) { String path = String.format("/api/2.0/permissions/apps/%s", request.getAppName()); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java index 3108a4851..ac6340281 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java @@ -28,6 +28,9 @@ public interface AppsService { /** Deletes an app. */ App delete(DeleteAppRequest deleteAppRequest); + /** Deletes the thumbnail for an app. */ + void deleteAppThumbnail(DeleteAppThumbnailRequest deleteAppThumbnailRequest); + /** Deletes an app space. */ Operation deleteSpace(DeleteSpaceRequest deleteSpaceRequest); @@ -80,6 +83,9 @@ GetAppPermissionLevelsResponse getPermissionLevels( /** Updates the app with the supplied name. */ App update(UpdateAppRequest updateAppRequest); + /** Updates the thumbnail for an app. */ + AppThumbnail updateAppThumbnail(UpdateAppThumbnailRequest updateAppThumbnailRequest); + /** Updates the permissions on an app. Apps can inherit permissions from their root object. */ AppPermissions updatePermissions(AppPermissionsRequest appPermissionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppThumbnailRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppThumbnailRequest.java new file mode 100755 index 000000000..17b48a2ae --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppThumbnailRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class DeleteAppThumbnailRequest { + /** The name of the app. */ + @JsonIgnore private String name; + + public DeleteAppThumbnailRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteAppThumbnailRequest that = (DeleteAppThumbnailRequest) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + + @Override + public String toString() { + return new ToStringer(DeleteAppThumbnailRequest.class).add("name", name).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppThumbnailRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppThumbnailRequest.java new file mode 100755 index 000000000..1746c52ba --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppThumbnailRequest.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.apps; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateAppThumbnailRequest { + /** The app thumbnail to set. */ + @JsonProperty("app_thumbnail") + private AppThumbnail appThumbnail; + + /** The name of the app. */ + @JsonIgnore private String name; + + public UpdateAppThumbnailRequest setAppThumbnail(AppThumbnail appThumbnail) { + this.appThumbnail = appThumbnail; + return this; + } + + public AppThumbnail getAppThumbnail() { + return appThumbnail; + } + + public UpdateAppThumbnailRequest setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateAppThumbnailRequest that = (UpdateAppThumbnailRequest) o; + return Objects.equals(appThumbnail, that.appThumbnail) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(appThumbnail, name); + } + + @Override + public String toString() { + return new ToStringer(UpdateAppThumbnailRequest.class) + .add("appThumbnail", appThumbnail) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureEncryptionSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureEncryptionSettings.java new file mode 100755 index 000000000..27932bf57 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureEncryptionSettings.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AzureEncryptionSettings { + /** */ + @JsonProperty("azure_cmk_access_connector_id") + private String azureCmkAccessConnectorId; + + /** */ + @JsonProperty("azure_cmk_managed_identity_id") + private String azureCmkManagedIdentityId; + + /** */ + @JsonProperty("azure_tenant_id") + private String azureTenantId; + + public AzureEncryptionSettings setAzureCmkAccessConnectorId(String azureCmkAccessConnectorId) { + this.azureCmkAccessConnectorId = azureCmkAccessConnectorId; + return this; + } + + public String getAzureCmkAccessConnectorId() { + return azureCmkAccessConnectorId; + } + + public AzureEncryptionSettings setAzureCmkManagedIdentityId(String azureCmkManagedIdentityId) { + this.azureCmkManagedIdentityId = azureCmkManagedIdentityId; + return this; + } + + public String getAzureCmkManagedIdentityId() { + return azureCmkManagedIdentityId; + } + + public AzureEncryptionSettings setAzureTenantId(String azureTenantId) { + this.azureTenantId = azureTenantId; + return this; + } + + public String getAzureTenantId() { + return azureTenantId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureEncryptionSettings that = (AzureEncryptionSettings) o; + return Objects.equals(azureCmkAccessConnectorId, that.azureCmkAccessConnectorId) + && Objects.equals(azureCmkManagedIdentityId, that.azureCmkManagedIdentityId) + && Objects.equals(azureTenantId, that.azureTenantId); + } + + @Override + public int hashCode() { + return Objects.hash(azureCmkAccessConnectorId, azureCmkManagedIdentityId, azureTenantId); + } + + @Override + public String toString() { + return new ToStringer(AzureEncryptionSettings.class) + .add("azureCmkAccessConnectorId", azureCmkAccessConnectorId) + .add("azureCmkManagedIdentityId", azureCmkManagedIdentityId) + .add("azureTenantId", azureTenantId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java index 170f10432..9b30a22ce 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java @@ -56,6 +56,10 @@ public class CatalogInfo { @JsonProperty("isolation_mode") private CatalogIsolationMode isolationMode; + /** Control CMK encryption for managed catalog data */ + @JsonProperty("managed_encryption_settings") + private EncryptionSettings managedEncryptionSettings; + /** Unique identifier of parent metastore. */ @JsonProperty("metastore_id") private String metastoreId; @@ -205,6 +209,15 @@ public CatalogIsolationMode getIsolationMode() { return isolationMode; } + public CatalogInfo setManagedEncryptionSettings(EncryptionSettings managedEncryptionSettings) { + this.managedEncryptionSettings = managedEncryptionSettings; + return this; + } + + public EncryptionSettings getManagedEncryptionSettings() { + return managedEncryptionSettings; + } + public CatalogInfo setMetastoreId(String metastoreId) { this.metastoreId = metastoreId; return this; @@ -338,6 +351,7 @@ public boolean equals(Object o) { && Objects.equals(enablePredictiveOptimization, that.enablePredictiveOptimization) && Objects.equals(fullName, that.fullName) && Objects.equals(isolationMode, that.isolationMode) + && Objects.equals(managedEncryptionSettings, that.managedEncryptionSettings) && Objects.equals(metastoreId, that.metastoreId) && Objects.equals(name, that.name) && Objects.equals(options, that.options) @@ -366,6 +380,7 @@ public int hashCode() { enablePredictiveOptimization, fullName, isolationMode, + managedEncryptionSettings, metastoreId, name, options, @@ -394,6 +409,7 @@ public String toString() { .add("enablePredictiveOptimization", enablePredictiveOptimization) .add("fullName", fullName) .add("isolationMode", isolationMode) + .add("managedEncryptionSettings", managedEncryptionSettings) .add("metastoreId", metastoreId) .add("name", name) .add("options", options) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java index 6aad7c8f3..4b5c6bb3e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionInfo.java @@ -8,7 +8,7 @@ import java.util.Map; import java.util.Objects; -/** Next ID: 24 */ +/** Next ID: 25 */ @Generated public class ConnectionInfo { /** User-provided free-form text description. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java index cdd501ecb..a289a2e5a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionType.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Next Id: 72 */ +/** Next Id: 75 */ @Generated public enum ConnectionType { BIGQUERY, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java index 2d8d187df..40ab2d8c2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java @@ -18,6 +18,10 @@ public class CreateCatalog { @JsonProperty("connection_name") private String connectionName; + /** Control CMK encryption for managed catalog data */ + @JsonProperty("managed_encryption_settings") + private EncryptionSettings managedEncryptionSettings; + /** Name of catalog. */ @JsonProperty("name") private String name; @@ -65,6 +69,15 @@ public String getConnectionName() { return connectionName; } + public CreateCatalog setManagedEncryptionSettings(EncryptionSettings managedEncryptionSettings) { + this.managedEncryptionSettings = managedEncryptionSettings; + return this; + } + + public EncryptionSettings getManagedEncryptionSettings() { + return managedEncryptionSettings; + } + public CreateCatalog setName(String name) { this.name = name; return this; @@ -126,6 +139,7 @@ public boolean equals(Object o) { CreateCatalog that = (CreateCatalog) o; return Objects.equals(comment, that.comment) && Objects.equals(connectionName, that.connectionName) + && Objects.equals(managedEncryptionSettings, that.managedEncryptionSettings) && Objects.equals(name, that.name) && Objects.equals(options, that.options) && Objects.equals(properties, that.properties) @@ -137,7 +151,15 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - comment, connectionName, name, options, properties, providerName, shareName, storageRoot); + comment, + connectionName, + managedEncryptionSettings, + name, + options, + properties, + providerName, + shareName, + storageRoot); } @Override @@ -145,6 +167,7 @@ public String toString() { return new ToStringer(CreateCatalog.class) .add("comment", comment) .add("connectionName", connectionName) + .add("managedEncryptionSettings", managedEncryptionSettings) .add("name", name) .add("options", options) .add("properties", properties) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java index cba701d06..d0bfd4a74 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CredentialType.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Next Id: 17 */ +/** Next Id: 18 */ @Generated public enum CredentialType { ANY_STATIC_CREDENTIAL, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteEntityTagAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteEntityTagAssignmentRequest.java index 35871f23c..f66f7e9f7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteEntityTagAssignmentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteEntityTagAssignmentRequest.java @@ -12,10 +12,7 @@ public class DeleteEntityTagAssignmentRequest { /** The fully qualified name of the entity to which the tag is assigned */ @JsonIgnore private String entityName; - /** - * The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, - * tables, columns, volumes. - */ + /** The type of the entity to which the tag is assigned. */ @JsonIgnore private String entityType; /** Required. The key of the tag to delete */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EncryptionSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EncryptionSettings.java new file mode 100755 index 000000000..fbd0cae9d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EncryptionSettings.java @@ -0,0 +1,79 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Encryption Settings are used to carry metadata for securable encryption at rest. Currently used + * for catalogs, we can use the information supplied here to interact with a CMK. + */ +@Generated +public class EncryptionSettings { + /** optional Azure settings - only required if an Azure CMK is used. */ + @JsonProperty("azure_encryption_settings") + private AzureEncryptionSettings azureEncryptionSettings; + + /** the AKV URL in Azure, null otherwise. */ + @JsonProperty("azure_key_vault_key_id") + private String azureKeyVaultKeyId; + + /** the CMK uuid in AWS and GCP, null otherwise. */ + @JsonProperty("customer_managed_key_id") + private String customerManagedKeyId; + + public EncryptionSettings setAzureEncryptionSettings( + AzureEncryptionSettings azureEncryptionSettings) { + this.azureEncryptionSettings = azureEncryptionSettings; + return this; + } + + public AzureEncryptionSettings getAzureEncryptionSettings() { + return azureEncryptionSettings; + } + + public EncryptionSettings setAzureKeyVaultKeyId(String azureKeyVaultKeyId) { + this.azureKeyVaultKeyId = azureKeyVaultKeyId; + return this; + } + + public String getAzureKeyVaultKeyId() { + return azureKeyVaultKeyId; + } + + public EncryptionSettings setCustomerManagedKeyId(String customerManagedKeyId) { + this.customerManagedKeyId = customerManagedKeyId; + return this; + } + + public String getCustomerManagedKeyId() { + return customerManagedKeyId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EncryptionSettings that = (EncryptionSettings) o; + return Objects.equals(azureEncryptionSettings, that.azureEncryptionSettings) + && Objects.equals(azureKeyVaultKeyId, that.azureKeyVaultKeyId) + && Objects.equals(customerManagedKeyId, that.customerManagedKeyId); + } + + @Override + public int hashCode() { + return Objects.hash(azureEncryptionSettings, azureKeyVaultKeyId, customerManagedKeyId); + } + + @Override + public String toString() { + return new ToStringer(EncryptionSettings.class) + .add("azureEncryptionSettings", azureEncryptionSettings) + .add("azureKeyVaultKeyId", azureKeyVaultKeyId) + .add("customerManagedKeyId", customerManagedKeyId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignment.java index 1ac630031..728af2cfa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EntityTagAssignment.java @@ -15,10 +15,7 @@ public class EntityTagAssignment { @JsonProperty("entity_name") private String entityName; - /** - * The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, - * tables, columns, volumes. - */ + /** The type of the entity to which the tag is assigned. */ @JsonProperty("entity_type") private String entityType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEntityTagAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEntityTagAssignmentRequest.java index 166141bfe..2077538c3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEntityTagAssignmentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEntityTagAssignmentRequest.java @@ -12,10 +12,7 @@ public class GetEntityTagAssignmentRequest { /** The fully qualified name of the entity to which the tag is assigned */ @JsonIgnore private String entityName; - /** - * The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, - * tables, columns, volumes. - */ + /** The type of the entity to which the tag is assigned. */ @JsonIgnore private String entityType; /** Required. The key of the tag */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListEntityTagAssignmentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListEntityTagAssignmentsRequest.java index c21d87e93..920bae4b2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListEntityTagAssignmentsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListEntityTagAssignmentsRequest.java @@ -13,10 +13,7 @@ public class ListEntityTagAssignmentsRequest { /** The fully qualified name of the entity to which the tag is assigned */ @JsonIgnore private String entityName; - /** - * The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, - * tables, columns, volumes. - */ + /** The type of the entity to which the tag is assigned. */ @JsonIgnore private String entityType; /** Optional. Maximum number of tag assignments to return in a single page */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java index 0c5e2082d..9db62bb12 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Latest kind: CONNECTION_GOOGLE_DRIVE_SERVICE_ACCOUNT = 301; Next id: 302 */ +/** Latest kind: CONNECTION_VEEVA_VAULT_OAUTH_M2M = 311; Next id: 312 */ @Generated public enum SecurableKind { TABLE_DB_STORAGE, @@ -17,6 +17,7 @@ public enum SecurableKind { TABLE_DELTA_ICEBERG_MANAGED, TABLE_DELTA_UNIFORM_HUDI_EXTERNAL, TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL, + TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL_DELTASHARING, TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_DELTASHARING, TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_EXTERNAL, TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_MANAGED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java index b817347f1..1c8be7d4b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java @@ -26,6 +26,10 @@ public class UpdateCatalog { @JsonProperty("isolation_mode") private CatalogIsolationMode isolationMode; + /** Control CMK encryption for managed catalog data */ + @JsonProperty("managed_encryption_settings") + private EncryptionSettings managedEncryptionSettings; + /** The name of the catalog. */ @JsonIgnore private String name; @@ -73,6 +77,15 @@ public CatalogIsolationMode getIsolationMode() { return isolationMode; } + public UpdateCatalog setManagedEncryptionSettings(EncryptionSettings managedEncryptionSettings) { + this.managedEncryptionSettings = managedEncryptionSettings; + return this; + } + + public EncryptionSettings getManagedEncryptionSettings() { + return managedEncryptionSettings; + } + public UpdateCatalog setName(String name) { this.name = name; return this; @@ -126,6 +139,7 @@ public boolean equals(Object o) { return Objects.equals(comment, that.comment) && Objects.equals(enablePredictiveOptimization, that.enablePredictiveOptimization) && Objects.equals(isolationMode, that.isolationMode) + && Objects.equals(managedEncryptionSettings, that.managedEncryptionSettings) && Objects.equals(name, that.name) && Objects.equals(newName, that.newName) && Objects.equals(options, that.options) @@ -139,6 +153,7 @@ public int hashCode() { comment, enablePredictiveOptimization, isolationMode, + managedEncryptionSettings, name, newName, options, @@ -152,6 +167,7 @@ public String toString() { .add("comment", comment) .add("enablePredictiveOptimization", enablePredictiveOptimization) .add("isolationMode", isolationMode) + .add("managedEncryptionSettings", managedEncryptionSettings) .add("name", name) .add("newName", newName) .add("options", options) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateEntityTagAssignmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateEntityTagAssignmentRequest.java index 589c6888d..46758b6d3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateEntityTagAssignmentRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateEntityTagAssignmentRequest.java @@ -14,10 +14,7 @@ public class UpdateEntityTagAssignmentRequest { /** The fully qualified name of the entity to which the tag is assigned */ @JsonIgnore private String entityName; - /** - * The type of the entity to which the tag is assigned. Allowed values are: catalogs, schemas, - * tables, columns, volumes. - */ + /** The type of the entity to which the tag is assigned. */ @JsonIgnore private String entityType; /** */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java index e264065ec..dc6582a1f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java @@ -16,12 +16,13 @@ @Generated public class Environment { /** - * The `base_environment` key refers to an `env.yaml` file that specifies an environment version - * and a collection of dependencies required for the environment setup. This `env.yaml` file may - * itself include a `base_environment` reference pointing to another `env_1.yaml` file. However, - * when used as a base environment, `env_1.yaml` (or further nested references) will not be - * processed or included in the final environment, meaning that the resolution of - * `base_environment` references is not recursive. + * The base environment this environment is built on top of. A base environment defines the + * environment version and a list of dependencies for serverless compute. The value can be a file + * path to a custom `env.yaml` file (e.g., `/Workspace/path/to/env.yaml`). Support for a + * Databricks-provided base environment ID (e.g., `workspace-base-environments/databricks_ai_v4`) + * and workspace base environment ID (e.g., + * `workspace-base-environments/dbe_b849b66e-b31a-4cb5-b161-1f2b10877fb7`) is in Beta. Either + * `environment_version` or `base_environment` can be provided. For more information, see */ @JsonProperty("base_environment") private String baseEnvironment; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java index 977c2abf4..57b28467f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java @@ -110,6 +110,11 @@ public Wait createMessage( response); } + /** Create a comment on a conversation message. */ + public GenieMessageComment createMessageComment(GenieCreateMessageCommentRequest request) { + return impl.createMessageComment(request); + } + /** Creates a Genie space from a serialized payload. */ public GenieSpace createSpace(GenieCreateSpaceRequest request) { return impl.createSpace(request); @@ -354,6 +359,20 @@ public GenieSpace getSpace(GenieGetSpaceRequest request) { return impl.getSpace(request); } + public GenieListConversationCommentsResponse listConversationComments( + String spaceId, String conversationId) { + return listConversationComments( + new GenieListConversationCommentsRequest() + .setSpaceId(spaceId) + .setConversationId(conversationId)); + } + + /** List all comments across all messages in a conversation. */ + public GenieListConversationCommentsResponse listConversationComments( + GenieListConversationCommentsRequest request) { + return impl.listConversationComments(request); + } + public GenieListConversationMessagesResponse listConversationMessages( String spaceId, String conversationId) { return listConversationMessages( @@ -377,6 +396,21 @@ public GenieListConversationsResponse listConversations(GenieListConversationsRe return impl.listConversations(request); } + public GenieListMessageCommentsResponse listMessageComments( + String spaceId, String conversationId, String messageId) { + return listMessageComments( + new GenieListMessageCommentsRequest() + .setSpaceId(spaceId) + .setConversationId(conversationId) + .setMessageId(messageId)); + } + + /** List comments on a specific conversation message. */ + public GenieListMessageCommentsResponse listMessageComments( + GenieListMessageCommentsRequest request) { + return impl.listMessageComments(request); + } + /** Get list of Genie Spaces. */ public GenieListSpacesResponse listSpaces(GenieListSpacesRequest request) { return impl.listSpaces(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateMessageCommentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateMessageCommentRequest.java new file mode 100755 index 000000000..f90f40bc2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCreateMessageCommentRequest.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GenieCreateMessageCommentRequest { + /** Comment text content. */ + @JsonProperty("content") + private String content; + + /** The ID associated with the conversation. */ + @JsonIgnore private String conversationId; + + /** The ID associated with the message. */ + @JsonIgnore private String messageId; + + /** The ID associated with the Genie space. */ + @JsonIgnore private String spaceId; + + public GenieCreateMessageCommentRequest setContent(String content) { + this.content = content; + return this; + } + + public String getContent() { + return content; + } + + public GenieCreateMessageCommentRequest setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieCreateMessageCommentRequest setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieCreateMessageCommentRequest setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieCreateMessageCommentRequest that = (GenieCreateMessageCommentRequest) o; + return Objects.equals(content, that.content) + && Objects.equals(conversationId, that.conversationId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(content, conversationId, messageId, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieCreateMessageCommentRequest.class) + .add("content", content) + .add("conversationId", conversationId) + .add("messageId", messageId) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResultDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResultDetails.java index b8a65fa18..df49b4607 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResultDetails.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieEvalResultDetails.java @@ -19,7 +19,49 @@ public class GenieEvalResultDetails { @JsonProperty("assessment") private GenieEvalAssessment assessment; - /** Reasons for the assessment score. */ + /** + * Reasons for the assessment score. + * + *

Assessment reasons describe why a Genie response was scored as BAD. + * + *

Deterministic values (compared against the ground truth result): - EMPTY_RESULT: Genie's + * generated SQL results were empty for this benchmark question. - RESULT_MISSING_ROWS: Genie's + * generated SQL response is missing rows from the provided ground truth SQL. - RESULT_EXTRA_ROWS: + * Genie's generated SQL response has more rows than the provided ground truth SQL. - + * RESULT_MISSING_COLUMNS: Genie's generated SQL response is missing columns from the provided + * ground truth SQL. - RESULT_EXTRA_COLUMNS: Genie's generated SQL response has more columns than + * the provided ground truth SQL. - SINGLE_CELL_DIFFERENCE: Single value result was produced but + * differs from ground truth result. - EMPTY_GOOD_SQL: The benchmark SQL returned an empty result. + * - COLUMN_TYPE_DIFFERENCE: The values between the results match but the column type is + * different. + * + *

LLM judge ratings explain the factors driving BAD results: - + * LLM_JUDGE_MISSING_OR_INCORRECT_FILTER: Genie's generated SQL is missing a WHERE clause + * condition or has incorrect filter logic that excludes/includes wrong data. - + * LLM_JUDGE_INCOMPLETE_OR_PARTIAL_OUTPUT: Genie's generated SQL returns only some of the + * requested data or columns, missing parts of what the ground truth SQL returns. - + * LLM_JUDGE_MISINTERPRETATION_OF_USER_REQUEST: Genie's generated SQL fundamentally misunderstands + * what the user is asking for, addressing the wrong question or goal. - + * LLM_JUDGE_INSTRUCTION_COMPLIANCE_OR_MISSING_BUSINESS_LOGIC: Genie's generated SQL fails to + * apply specified instructions or business logic that should be followed. - + * LLM_JUDGE_INCORRECT_METRIC_CALCULATION: Genie's generated SQL uses incorrect logic or makes + * wrong assumptions when calculating metrics. - LLM_JUDGE_INCORRECT_TABLE_OR_FIELD_USAGE: Genie's + * generated SQL references wrong tables, columns, or uses fields that don't match the ground + * truth SQL's intent. - LLM_JUDGE_INCORRECT_FUNCTION_USAGE: Genie's generated SQL uses SQL + * functions incorrectly or inappropriately (wrong parameters, wrong function for the task, etc.). + * - LLM_JUDGE_MISSING_OR_INCORRECT_JOIN: Genie's generated SQL is missing necessary joins between + * tables or has incorrect join conditions/types that produce wrong results. - + * LLM_JUDGE_MISSING_OR_INCORRECT_AGGREGATION: Genie's generated SQL is missing GROUP BY clauses + * or has incorrect grouping that doesn't match the requested aggregation level. - + * LLM_JUDGE_FORMATTING_ERROR: Genie's generated SQL output has incorrect formatting, ordering + * (ORDER BY), or presentation issues that don't match expectations. - LLM_JUDGE_OTHER: LLM judge + * identified an error that doesn't fall into other categories. + * + *

Deprecated LLM judge values (kept for backward compatibility, do not use): - + * LLM_JUDGE_MISSING_JOIN (deprecated) - LLM_JUDGE_WRONG_FILTER (deprecated) - + * LLM_JUDGE_WRONG_AGGREGATION (deprecated) - LLM_JUDGE_WRONG_COLUMNS (deprecated) - + * LLM_JUDGE_SYNTAX_ERROR (deprecated) - LLM_JUDGE_SEMANTIC_ERROR (deprecated) + */ @JsonProperty("assessment_reasons") private Collection assessmentReasons; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedback.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedback.java index 92d35fc98..86339a735 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedback.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieFeedback.java @@ -10,10 +10,23 @@ /** Feedback containing rating and optional comment */ @Generated public class GenieFeedback { + /** Optional feedback comment text */ + @JsonProperty("comment") + private String comment; + /** The feedback rating */ @JsonProperty("rating") private GenieFeedbackRating rating; + public GenieFeedback setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + public GenieFeedback setRating(GenieFeedbackRating rating) { this.rating = rating; return this; @@ -28,16 +41,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GenieFeedback that = (GenieFeedback) o; - return Objects.equals(rating, that.rating); + return Objects.equals(comment, that.comment) && Objects.equals(rating, that.rating); } @Override public int hashCode() { - return Objects.hash(rating); + return Objects.hash(comment, rating); } @Override public String toString() { - return new ToStringer(GenieFeedback.class).add("rating", rating).toString(); + return new ToStringer(GenieFeedback.class) + .add("comment", comment) + .add("rating", rating) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java index f9200fa9e..dbef6d98e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java @@ -37,6 +37,27 @@ public GenieMessage createMessage(GenieCreateConversationMessageRequest request) } } + @Override + public GenieMessageComment createMessageComment(GenieCreateMessageCommentRequest request) { + String path = + String.format( + "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/comments", + request.getSpaceId(), request.getConversationId(), request.getMessageId()); + try { + Request req = new Request("POST", path, apiClient.serialize(request)); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + req.withHeader("Content-Type", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, GenieMessageComment.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GenieSpace createSpace(GenieCreateSpaceRequest request) { String path = "/api/2.0/genie/spaces"; @@ -390,6 +411,27 @@ public GenieSpace getSpace(GenieGetSpaceRequest request) { } } + @Override + public GenieListConversationCommentsResponse listConversationComments( + GenieListConversationCommentsRequest request) { + String path = + String.format( + "/api/2.0/genie/spaces/%s/conversations/%s/list-comments", + request.getSpaceId(), request.getConversationId()); + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, GenieListConversationCommentsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GenieListConversationMessagesResponse listConversationMessages( GenieListConversationMessagesRequest request) { @@ -428,6 +470,27 @@ public GenieListConversationsResponse listConversations(GenieListConversationsRe } } + @Override + public GenieListMessageCommentsResponse listMessageComments( + GenieListMessageCommentsRequest request) { + String path = + String.format( + "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/comments", + request.getSpaceId(), request.getConversationId(), request.getMessageId()); + try { + Request req = new Request("GET", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, GenieListMessageCommentsResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public GenieListSpacesResponse listSpaces(GenieListSpacesRequest request) { String path = "/api/2.0/genie/spaces"; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationCommentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationCommentsRequest.java new file mode 100755 index 000000000..386ce1731 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationCommentsRequest.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GenieListConversationCommentsRequest { + /** The ID associated with the conversation. */ + @JsonIgnore private String conversationId; + + /** Maximum number of comments to return per page. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** Pagination token for getting the next page of results. */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + /** The ID associated with the Genie space. */ + @JsonIgnore private String spaceId; + + public GenieListConversationCommentsRequest setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieListConversationCommentsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public GenieListConversationCommentsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public GenieListConversationCommentsRequest setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieListConversationCommentsRequest that = (GenieListConversationCommentsRequest) o; + return Objects.equals(conversationId, that.conversationId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(conversationId, pageSize, pageToken, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieListConversationCommentsRequest.class) + .add("conversationId", conversationId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationCommentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationCommentsResponse.java new file mode 100755 index 000000000..392699912 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListConversationCommentsResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class GenieListConversationCommentsResponse { + /** List of comments in the conversation. */ + @JsonProperty("comments") + private Collection comments; + + /** Token to get the next page of results. */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public GenieListConversationCommentsResponse setComments( + Collection comments) { + this.comments = comments; + return this; + } + + public Collection getComments() { + return comments; + } + + public GenieListConversationCommentsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieListConversationCommentsResponse that = (GenieListConversationCommentsResponse) o; + return Objects.equals(comments, that.comments) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(comments, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(GenieListConversationCommentsResponse.class) + .add("comments", comments) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListMessageCommentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListMessageCommentsRequest.java new file mode 100755 index 000000000..23a060474 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListMessageCommentsRequest.java @@ -0,0 +1,104 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class GenieListMessageCommentsRequest { + /** The ID associated with the conversation. */ + @JsonIgnore private String conversationId; + + /** The ID associated with the message. */ + @JsonIgnore private String messageId; + + /** Maximum number of comments to return per page. */ + @JsonIgnore + @QueryParam("page_size") + private Long pageSize; + + /** Pagination token for getting the next page of results. */ + @JsonIgnore + @QueryParam("page_token") + private String pageToken; + + /** The ID associated with the Genie space. */ + @JsonIgnore private String spaceId; + + public GenieListMessageCommentsRequest setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieListMessageCommentsRequest setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieListMessageCommentsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public GenieListMessageCommentsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public GenieListMessageCommentsRequest setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieListMessageCommentsRequest that = (GenieListMessageCommentsRequest) o; + return Objects.equals(conversationId, that.conversationId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(spaceId, that.spaceId); + } + + @Override + public int hashCode() { + return Objects.hash(conversationId, messageId, pageSize, pageToken, spaceId); + } + + @Override + public String toString() { + return new ToStringer(GenieListMessageCommentsRequest.class) + .add("conversationId", conversationId) + .add("messageId", messageId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("spaceId", spaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListMessageCommentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListMessageCommentsResponse.java new file mode 100755 index 000000000..235e0b1ab --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieListMessageCommentsResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class GenieListMessageCommentsResponse { + /** List of comments on the message. */ + @JsonProperty("comments") + private Collection comments; + + /** Token to get the next page of results. */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public GenieListMessageCommentsResponse setComments(Collection comments) { + this.comments = comments; + return this; + } + + public Collection getComments() { + return comments; + } + + public GenieListMessageCommentsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieListMessageCommentsResponse that = (GenieListMessageCommentsResponse) o; + return Objects.equals(comments, that.comments) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(comments, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(GenieListMessageCommentsResponse.class) + .add("comments", comments) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessageComment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessageComment.java new file mode 100755 index 000000000..fae2eb223 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessageComment.java @@ -0,0 +1,136 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A comment on a Genie conversation message. */ +@Generated +public class GenieMessageComment { + /** Comment text content */ + @JsonProperty("content") + private String content; + + /** Conversation ID */ + @JsonProperty("conversation_id") + private String conversationId; + + /** Timestamp when the comment was created */ + @JsonProperty("created_timestamp") + private Long createdTimestamp; + + /** Comment ID */ + @JsonProperty("message_comment_id") + private String messageCommentId; + + /** Message ID */ + @JsonProperty("message_id") + private String messageId; + + /** Genie space ID */ + @JsonProperty("space_id") + private String spaceId; + + /** ID of the user who created the comment */ + @JsonProperty("user_id") + private Long userId; + + public GenieMessageComment setContent(String content) { + this.content = content; + return this; + } + + public String getContent() { + return content; + } + + public GenieMessageComment setConversationId(String conversationId) { + this.conversationId = conversationId; + return this; + } + + public String getConversationId() { + return conversationId; + } + + public GenieMessageComment setCreatedTimestamp(Long createdTimestamp) { + this.createdTimestamp = createdTimestamp; + return this; + } + + public Long getCreatedTimestamp() { + return createdTimestamp; + } + + public GenieMessageComment setMessageCommentId(String messageCommentId) { + this.messageCommentId = messageCommentId; + return this; + } + + public String getMessageCommentId() { + return messageCommentId; + } + + public GenieMessageComment setMessageId(String messageId) { + this.messageId = messageId; + return this; + } + + public String getMessageId() { + return messageId; + } + + public GenieMessageComment setSpaceId(String spaceId) { + this.spaceId = spaceId; + return this; + } + + public String getSpaceId() { + return spaceId; + } + + public GenieMessageComment setUserId(Long userId) { + this.userId = userId; + return this; + } + + public Long getUserId() { + return userId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GenieMessageComment that = (GenieMessageComment) o; + return Objects.equals(content, that.content) + && Objects.equals(conversationId, that.conversationId) + && Objects.equals(createdTimestamp, that.createdTimestamp) + && Objects.equals(messageCommentId, that.messageCommentId) + && Objects.equals(messageId, that.messageId) + && Objects.equals(spaceId, that.spaceId) + && Objects.equals(userId, that.userId); + } + + @Override + public int hashCode() { + return Objects.hash( + content, conversationId, createdTimestamp, messageCommentId, messageId, spaceId, userId); + } + + @Override + public String toString() { + return new ToStringer(GenieMessageComment.class) + .add("content", content) + .add("conversationId", conversationId) + .add("createdTimestamp", createdTimestamp) + .add("messageCommentId", messageCommentId) + .add("messageId", messageId) + .add("spaceId", spaceId) + .add("userId", userId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java index cc0a48ab5..b24fff598 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieQueryAttachment.java @@ -41,6 +41,10 @@ public class GenieQueryAttachment { @JsonProperty("statement_id") private String statementId; + /** Insights into how Genie came to generate the SQL. */ + @JsonProperty("thoughts") + private Collection thoughts; + /** Name of the query */ @JsonProperty("title") private String title; @@ -108,6 +112,15 @@ public String getStatementId() { return statementId; } + public GenieQueryAttachment setThoughts(Collection thoughts) { + this.thoughts = thoughts; + return this; + } + + public Collection getThoughts() { + return thoughts; + } + public GenieQueryAttachment setTitle(String title) { this.title = title; return this; @@ -129,6 +142,7 @@ public boolean equals(Object o) { && Objects.equals(query, that.query) && Objects.equals(queryResultMetadata, that.queryResultMetadata) && Objects.equals(statementId, that.statementId) + && Objects.equals(thoughts, that.thoughts) && Objects.equals(title, that.title); } @@ -142,6 +156,7 @@ public int hashCode() { query, queryResultMetadata, statementId, + thoughts, title); } @@ -155,6 +170,7 @@ public String toString() { .add("query", query) .add("queryResultMetadata", queryResultMetadata) .add("statementId", statementId) + .add("thoughts", thoughts) .add("title", title) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSendMessageFeedbackRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSendMessageFeedbackRequest.java index 2ddc8b835..656cfabbf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSendMessageFeedbackRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSendMessageFeedbackRequest.java @@ -10,6 +10,10 @@ @Generated public class GenieSendMessageFeedbackRequest { + /** Optional text feedback that will be stored as a comment. */ + @JsonProperty("comment") + private String comment; + /** The ID associated with the conversation. */ @JsonIgnore private String conversationId; @@ -23,6 +27,15 @@ public class GenieSendMessageFeedbackRequest { /** The ID associated with the Genie space where the message is located. */ @JsonIgnore private String spaceId; + public GenieSendMessageFeedbackRequest setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + public GenieSendMessageFeedbackRequest setConversationId(String conversationId) { this.conversationId = conversationId; return this; @@ -64,7 +77,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GenieSendMessageFeedbackRequest that = (GenieSendMessageFeedbackRequest) o; - return Objects.equals(conversationId, that.conversationId) + return Objects.equals(comment, that.comment) + && Objects.equals(conversationId, that.conversationId) && Objects.equals(messageId, that.messageId) && Objects.equals(rating, that.rating) && Objects.equals(spaceId, that.spaceId); @@ -72,12 +86,13 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(conversationId, messageId, rating, spaceId); + return Objects.hash(comment, conversationId, messageId, rating, spaceId); } @Override public String toString() { return new ToStringer(GenieSendMessageFeedbackRequest.class) + .add("comment", comment) .add("conversationId", conversationId) .add("messageId", messageId) .add("rating", rating) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java index e6fd1a6a9..262956231 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java @@ -22,6 +22,10 @@ public interface GenieService { GenieMessage createMessage( GenieCreateConversationMessageRequest genieCreateConversationMessageRequest); + /** Create a comment on a conversation message. */ + GenieMessageComment createMessageComment( + GenieCreateMessageCommentRequest genieCreateMessageCommentRequest); + /** Creates a Genie space from a serialized payload. */ GenieSpace createSpace(GenieCreateSpaceRequest genieCreateSpaceRequest); @@ -142,6 +146,10 @@ GenieGetMessageQueryResultResponse getMessageQueryResultByAttachment( /** Get details of a Genie Space. */ GenieSpace getSpace(GenieGetSpaceRequest genieGetSpaceRequest); + /** List all comments across all messages in a conversation. */ + GenieListConversationCommentsResponse listConversationComments( + GenieListConversationCommentsRequest genieListConversationCommentsRequest); + /** List messages in a conversation */ GenieListConversationMessagesResponse listConversationMessages( GenieListConversationMessagesRequest genieListConversationMessagesRequest); @@ -150,6 +158,10 @@ GenieListConversationMessagesResponse listConversationMessages( GenieListConversationsResponse listConversations( GenieListConversationsRequest genieListConversationsRequest); + /** List comments on a specific conversation message. */ + GenieListMessageCommentsResponse listMessageComments( + GenieListMessageCommentsRequest genieListMessageCommentsRequest); + /** Get list of Genie Spaces. */ GenieListSpacesResponse listSpaces(GenieListSpacesRequest genieListSpacesRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Thought.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Thought.java new file mode 100755 index 000000000..25ff5955c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Thought.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** A single thought in the AI's reasoning process for a query. */ +@Generated +public class Thought { + /** The md formatted content for this thought. */ + @JsonProperty("content") + private String content; + + /** The category of this thought. */ + @JsonProperty("thought_type") + private ThoughtType thoughtType; + + public Thought setContent(String content) { + this.content = content; + return this; + } + + public String getContent() { + return content; + } + + public Thought setThoughtType(ThoughtType thoughtType) { + this.thoughtType = thoughtType; + return this; + } + + public ThoughtType getThoughtType() { + return thoughtType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Thought that = (Thought) o; + return Objects.equals(content, that.content) && Objects.equals(thoughtType, that.thoughtType); + } + + @Override + public int hashCode() { + return Objects.hash(content, thoughtType); + } + + @Override + public String toString() { + return new ToStringer(Thought.class) + .add("content", content) + .add("thoughtType", thoughtType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ThoughtType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ThoughtType.java new file mode 100755 index 000000000..a31ef433f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ThoughtType.java @@ -0,0 +1,23 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; + +/** + * ThoughtType. The possible values are: * `THOUGHT_TYPE_UNSPECIFIED`: Default value that should not + * be used. * `THOUGHT_TYPE_DESCRIPTION`: A high-level description of how the question was + * interpreted. * `THOUGHT_TYPE_UNDERSTANDING`: How ambiguous parts of the question were resolved. * + * `THOUGHT_TYPE_DATA_SOURCING`: Which tables or datasets were identified as relevant. * + * `THOUGHT_TYPE_INSTRUCTIONS`: Which author-defined instructions were referenced. * + * `THOUGHT_TYPE_STEPS`: The logical steps taken to compute the answer. The category of a Thought. + * Additional values may be added in the future. + */ +@Generated +public enum ThoughtType { + THOUGHT_TYPE_DATA_SOURCING, // Which tables or datasets were identified as relevant. + THOUGHT_TYPE_DESCRIPTION, // A high-level description of how the question was interpreted. + THOUGHT_TYPE_INSTRUCTIONS, // Which author-defined instructions were referenced. + THOUGHT_TYPE_STEPS, // The logical steps taken to compute the answer. + THOUGHT_TYPE_UNDERSTANDING, // How ambiguous parts of the question were resolved. +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java index a222eef0c..364a69437 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java @@ -11,7 +11,10 @@ /** Used when outputting a child run, in GetRun or ListRuns. */ @Generated public class RunTask { - /** New alert v2 task */ + /** + * The task evaluates a Databricks alert and sends notifications to subscribers when the + * `alert_task` field is present. + */ @JsonProperty("alert_task") private AlertTask alertTask; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java index 0d7cbaa26..24c725803 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java @@ -10,7 +10,10 @@ @Generated public class SubmitTask { - /** New alert v2 task */ + /** + * The task evaluates a Databricks alert and sends notifications to subscribers when the + * `alert_task` field is present. + */ @JsonProperty("alert_task") private AlertTask alertTask; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java index 66307b4f3..a17190a54 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java @@ -10,7 +10,10 @@ @Generated public class Task { - /** New alert v2 task */ + /** + * The task evaluates a Databricks alert and sends notifications to subscribers when the + * `alert_task` field is present. + */ @JsonProperty("alert_task") private AlertTask alertTask; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java index c585a4eda..23c06ff1f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java @@ -40,7 +40,9 @@ * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size * limit. * `DISABLED`: The run was never executed because it was disabled explicitly by the user. * * `BREAKING_CHANGE`: Run failed because of an intentional breaking change in Spark, but it will be - * retried with a mitigation config. + * retried with a mitigation config. * `CLUSTER_TERMINATED_BY_USER`: The run failed because the + * externally managed cluster entered an unusable state, likely due to the user terminating or + * restarting it outside the jobs service. * *

[Link]: * https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/AvgFunction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/AvgFunction.java index 86509b724..3d04bca5e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/AvgFunction.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/AvgFunction.java @@ -10,7 +10,12 @@ /** Computes the average of values. */ @Generated public class AvgFunction { - /** The input column from which the average is computed. */ + /** + * The input column from which the average is computed. For Kafka sources, use dot-prefixed path + * notation (e.g., "value.amount"). For nested fields, the leaf node name is used. TODO(FS-939): + * Colon-prefixed notation (e.g., "value:amount") is supported for backwards compatibility but is + * deprecated; migrate to dot notation. + */ @JsonProperty("input") private String input; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ColumnIdentifier.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ColumnIdentifier.java index d5b5558dd..a8454c82e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ColumnIdentifier.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ColumnIdentifier.java @@ -10,9 +10,10 @@ @Generated public class ColumnIdentifier { /** - * String representation of the column name or variant expression path. For nested fields, the - * leaf value is what will be present in materialized tables and expected to match at query time. - * For example, the leaf node of value:trip_details.location_details.pickup_zip is pickup_zip. + * String representation of the column name using dot-prefixed path notation. For nested fields, + * the leaf value is what will be present in materialized tables and expected to match at query + * time. For example, the leaf node of value.trip_details.location_details.pickup_zip is + * pickup_zip. */ @JsonProperty("variant_expr_path") private String variantExprPath; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CountFunction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CountFunction.java index 56bec6a10..78ea3774e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CountFunction.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/CountFunction.java @@ -10,7 +10,12 @@ /** Computes the count of values. */ @Generated public class CountFunction { - /** The input column from which the count is computed. */ + /** + * The input column from which the count is computed. For Kafka sources, use dot-prefixed path + * notation (e.g., "value.amount"). For nested fields, the leaf node name is used. TODO(FS-939): + * Colon-prefixed notation (e.g., "value:amount") is supported for backwards compatibility but is + * deprecated; migrate to dot notation. + */ @JsonProperty("input") private String input; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DataSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DataSource.java index 722123889..6b6378a2a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DataSource.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/DataSource.java @@ -7,16 +7,21 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Specifies the data source backing a feature. Exactly one source type must be set. */ @Generated public class DataSource { - /** */ + /** A Delta table data source. */ @JsonProperty("delta_table_source") private DeltaTableSource deltaTableSource; - /** */ + /** A Kafka stream data source. */ @JsonProperty("kafka_source") private KafkaSource kafkaSource; + /** A request-time data source. */ + @JsonProperty("request_source") + private RequestSource requestSource; + public DataSource setDeltaTableSource(DeltaTableSource deltaTableSource) { this.deltaTableSource = deltaTableSource; return this; @@ -35,18 +40,28 @@ public KafkaSource getKafkaSource() { return kafkaSource; } + public DataSource setRequestSource(RequestSource requestSource) { + this.requestSource = requestSource; + return this; + } + + public RequestSource getRequestSource() { + return requestSource; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DataSource that = (DataSource) o; return Objects.equals(deltaTableSource, that.deltaTableSource) - && Objects.equals(kafkaSource, that.kafkaSource); + && Objects.equals(kafkaSource, that.kafkaSource) + && Objects.equals(requestSource, that.requestSource); } @Override public int hashCode() { - return Objects.hash(deltaTableSource, kafkaSource); + return Objects.hash(deltaTableSource, kafkaSource, requestSource); } @Override @@ -54,6 +69,7 @@ public String toString() { return new ToStringer(DataSource.class) .add("deltaTableSource", deltaTableSource) .add("kafkaSource", kafkaSource) + .add("requestSource", requestSource) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/EntityColumn.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/EntityColumn.java index 3fbca81f8..17e0cf207 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/EntityColumn.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/EntityColumn.java @@ -9,7 +9,14 @@ @Generated public class EntityColumn { - /** The name of the entity column. */ + /** + * The name of the entity column. For Kafka sources, use dot-prefixed path notation to reference + * fields within the key or value schema (e.g., "value.user_id", "key.partition_key"). For nested + * fields, the leaf node name (e.g., "user_id" from "value.trip_details.user_id") is what will be + * present in materialized tables and expected to match at query time. TODO(FS-939): + * Colon-prefixed notation (e.g., "value:user_id") is supported for backwards compatibility but is + * deprecated; migrate to dot notation. + */ @JsonProperty("name") private String name; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FieldDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FieldDefinition.java new file mode 100755 index 000000000..8c87b90f6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FieldDefinition.java @@ -0,0 +1,62 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * A single field definition within a FlatSchema, specifying the field name and its scalar data + * type. Does not support nested or complex types (arrays, maps, structs). + */ +@Generated +public class FieldDefinition { + /** The scalar data type of the field. */ + @JsonProperty("data_type") + private ScalarDataType dataType; + + /** The name of the field. */ + @JsonProperty("name") + private String name; + + public FieldDefinition setDataType(ScalarDataType dataType) { + this.dataType = dataType; + return this; + } + + public ScalarDataType getDataType() { + return dataType; + } + + public FieldDefinition setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FieldDefinition that = (FieldDefinition) o; + return Objects.equals(dataType, that.dataType) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(dataType, name); + } + + @Override + public String toString() { + return new ToStringer(FieldDefinition.class) + .add("dataType", dataType) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FlatSchema.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FlatSchema.java new file mode 100755 index 000000000..504508dd3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/FlatSchema.java @@ -0,0 +1,47 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * A flat (non-nested) schema for request-time fields, defined as an ordered list of field + * definitions. This schema only supports scalar types. + */ +@Generated +public class FlatSchema { + /** The list of fields in this schema. */ + @JsonProperty("fields") + private Collection fields; + + public FlatSchema setFields(Collection fields) { + this.fields = fields; + return this; + } + + public Collection getFields() { + return fields; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FlatSchema that = (FlatSchema) o; + return Objects.equals(fields, that.fields); + } + + @Override + public int hashCode() { + return Objects.hash(fields); + } + + @Override + public String toString() { + return new ToStringer(FlatSchema.class).add("fields", fields).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeature.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeature.java index 52cf9210b..0351eaf8c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeature.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/MaterializedFeature.java @@ -21,6 +21,12 @@ public class MaterializedFeature { @JsonProperty("feature_name") private String featureName; + /** + * True if this is an online materialized feature. False if it is an offline materialized feature. + */ + @JsonProperty("is_online") + private Boolean isOnline; + /** * The timestamp when the pipeline last ran and updated the materialized feature values. If the * pipeline has not run yet, this field will be null. @@ -69,6 +75,15 @@ public String getFeatureName() { return featureName; } + public MaterializedFeature setIsOnline(Boolean isOnline) { + this.isOnline = isOnline; + return this; + } + + public Boolean getIsOnline() { + return isOnline; + } + public MaterializedFeature setLastMaterializationTime(String lastMaterializationTime) { this.lastMaterializationTime = lastMaterializationTime; return this; @@ -131,6 +146,7 @@ public boolean equals(Object o) { MaterializedFeature that = (MaterializedFeature) o; return Objects.equals(cronSchedule, that.cronSchedule) && Objects.equals(featureName, that.featureName) + && Objects.equals(isOnline, that.isOnline) && Objects.equals(lastMaterializationTime, that.lastMaterializationTime) && Objects.equals(materializedFeatureId, that.materializedFeatureId) && Objects.equals(offlineStoreConfig, that.offlineStoreConfig) @@ -144,6 +160,7 @@ public int hashCode() { return Objects.hash( cronSchedule, featureName, + isOnline, lastMaterializationTime, materializedFeatureId, offlineStoreConfig, @@ -157,6 +174,7 @@ public String toString() { return new ToStringer(MaterializedFeature.class) .add("cronSchedule", cronSchedule) .add("featureName", featureName) + .add("isOnline", isOnline) .add("lastMaterializationTime", lastMaterializationTime) .add("materializedFeatureId", materializedFeatureId) .add("offlineStoreConfig", offlineStoreConfig) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RequestSource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RequestSource.java new file mode 100755 index 000000000..16c7ee6aa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RequestSource.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * A request-time data source whose value is provided at inference time: offline batch scoring or + * online serving endpoint + */ +@Generated +public class RequestSource { + /** A flat schema with scalar-typed fields only. */ + @JsonProperty("flat_schema") + private FlatSchema flatSchema; + + public RequestSource setFlatSchema(FlatSchema flatSchema) { + this.flatSchema = flatSchema; + return this; + } + + public FlatSchema getFlatSchema() { + return flatSchema; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RequestSource that = (RequestSource) o; + return Objects.equals(flatSchema, that.flatSchema); + } + + @Override + public int hashCode() { + return Objects.hash(flatSchema); + } + + @Override + public String toString() { + return new ToStringer(RequestSource.class).add("flatSchema", flatSchema).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ScalarDataType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ScalarDataType.java new file mode 100755 index 000000000..e0a6f2073 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ScalarDataType.java @@ -0,0 +1,23 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.ml; + +import com.databricks.sdk.support.Generated; + +/** + * Scalar data types for request-time field definitions. Only flat (non-nested) types are supported. + */ +@Generated +public enum ScalarDataType { + BINARY, + BOOLEAN, + DATE, + DECIMAL, + DOUBLE, + FLOAT, + INTEGER, + LONG, + SHORT, + STRING, + TIMESTAMP, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/StddevPopFunction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/StddevPopFunction.java index 3d2d70b97..b9e551b15 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/StddevPopFunction.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/StddevPopFunction.java @@ -10,7 +10,12 @@ /** Computes the population standard deviation. */ @Generated public class StddevPopFunction { - /** The input column from which the population standard deviation is computed. */ + /** + * The input column from which the population standard deviation is computed. For Kafka sources, + * use dot-prefixed path notation (e.g., "value.amount"). For nested fields, the leaf node name is + * used. TODO(FS-939): Colon-prefixed notation (e.g., "value:amount") is supported for backwards + * compatibility but is deprecated; migrate to dot notation. + */ @JsonProperty("input") private String input; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SumFunction.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SumFunction.java index 13e7eb318..8da774fe0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SumFunction.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/SumFunction.java @@ -10,7 +10,12 @@ /** Computes the sum of values. */ @Generated public class SumFunction { - /** The input column from which the sum is computed. */ + /** + * The input column from which the sum is computed. For Kafka sources, use dot-prefixed path + * notation (e.g., "value.amount"). For nested fields, the leaf node name is used. TODO(FS-939): + * Colon-prefixed notation (e.g., "value:amount") is supported for backwards compatibility but is + * deprecated; migrate to dot notation. + */ @JsonProperty("input") private String input; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TimeseriesColumn.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TimeseriesColumn.java index 7404855ce..fef3a9662 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TimeseriesColumn.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/TimeseriesColumn.java @@ -9,7 +9,14 @@ @Generated public class TimeseriesColumn { - /** The name of the timeseries column. */ + /** + * The name of the timeseries column. For Kafka sources, use dot-prefixed path notation to + * reference fields within the key or value schema (e.g., "value.event_timestamp"). For nested + * fields, the leaf node name (e.g., "event_timestamp" from "value.event_details.event_timestamp") + * is what will be present in materialized tables and expected to match at query time. + * TODO(FS-939): Colon-prefixed notation (e.g., "value:event_timestamp") is supported for + * backwards compatibility but is deprecated; migrate to dot notation. + */ @JsonProperty("name") private String name; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequest.java new file mode 100755 index 000000000..00ce73ecb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.util.Objects; + +@Generated +public class ApplyEnvironmentRequest { + /** */ + @JsonIgnore private String pipelineId; + + public ApplyEnvironmentRequest setPipelineId(String pipelineId) { + this.pipelineId = pipelineId; + return this; + } + + public String getPipelineId() { + return pipelineId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ApplyEnvironmentRequest that = (ApplyEnvironmentRequest) o; + return Objects.equals(pipelineId, that.pipelineId); + } + + @Override + public int hashCode() { + return Objects.hash(pipelineId); + } + + @Override + public String toString() { + return new ToStringer(ApplyEnvironmentRequest.class).add("pipelineId", pipelineId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequestResponse.java new file mode 100755 index 000000000..9dd9d8c06 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ApplyEnvironmentRequestResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class ApplyEnvironmentRequestResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(ApplyEnvironmentRequestResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectorOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectorOptions.java new file mode 100755 index 000000000..b5ae9aef3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ConnectorOptions.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Wrapper message for source-specific options to support multiple connector types */ +@Generated +public class ConnectorOptions { + /** */ + @JsonProperty("gdrive_options") + private GoogleDriveOptions gdriveOptions; + + /** */ + @JsonProperty("google_ads_options") + private GoogleAdsOptions googleAdsOptions; + + /** */ + @JsonProperty("sharepoint_options") + private SharepointOptions sharepointOptions; + + /** */ + @JsonProperty("tiktok_ads_options") + private TikTokAdsOptions tiktokAdsOptions; + + public ConnectorOptions setGdriveOptions(GoogleDriveOptions gdriveOptions) { + this.gdriveOptions = gdriveOptions; + return this; + } + + public GoogleDriveOptions getGdriveOptions() { + return gdriveOptions; + } + + public ConnectorOptions setGoogleAdsOptions(GoogleAdsOptions googleAdsOptions) { + this.googleAdsOptions = googleAdsOptions; + return this; + } + + public GoogleAdsOptions getGoogleAdsOptions() { + return googleAdsOptions; + } + + public ConnectorOptions setSharepointOptions(SharepointOptions sharepointOptions) { + this.sharepointOptions = sharepointOptions; + return this; + } + + public SharepointOptions getSharepointOptions() { + return sharepointOptions; + } + + public ConnectorOptions setTiktokAdsOptions(TikTokAdsOptions tiktokAdsOptions) { + this.tiktokAdsOptions = tiktokAdsOptions; + return this; + } + + public TikTokAdsOptions getTiktokAdsOptions() { + return tiktokAdsOptions; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConnectorOptions that = (ConnectorOptions) o; + return Objects.equals(gdriveOptions, that.gdriveOptions) + && Objects.equals(googleAdsOptions, that.googleAdsOptions) + && Objects.equals(sharepointOptions, that.sharepointOptions) + && Objects.equals(tiktokAdsOptions, that.tiktokAdsOptions); + } + + @Override + public int hashCode() { + return Objects.hash(gdriveOptions, googleAdsOptions, sharepointOptions, tiktokAdsOptions); + } + + @Override + public String toString() { + return new ToStringer(ConnectorOptions.class) + .add("gdriveOptions", gdriveOptions) + .add("googleAdsOptions", googleAdsOptions) + .add("sharepointOptions", sharepointOptions) + .add("tiktokAdsOptions", tiktokAdsOptions) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileFilter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileFilter.java new file mode 100755 index 000000000..d9706fa24 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileFilter.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class FileFilter { + /** + * Include files with modification times occurring after the specified time. Timestamp format: + * YYYY-MM-DDTHH:mm:ss (e.g. 2020-06-01T13:00:00) Based on + * https://spark.apache.org/docs/latest/sql-data-sources-generic-options.html#modification-time-path-filters + */ + @JsonProperty("modified_after") + private String modifiedAfter; + + /** + * Include files with modification times occurring before the specified time. Timestamp format: + * YYYY-MM-DDTHH:mm:ss (e.g. 2020-06-01T13:00:00) Based on + * https://spark.apache.org/docs/latest/sql-data-sources-generic-options.html#modification-time-path-filters + */ + @JsonProperty("modified_before") + private String modifiedBefore; + + /** + * Include files with file names matching the pattern Based on + * https://spark.apache.org/docs/latest/sql-data-sources-generic-options.html#path-glob-filter + */ + @JsonProperty("path_filter") + private String pathFilter; + + public FileFilter setModifiedAfter(String modifiedAfter) { + this.modifiedAfter = modifiedAfter; + return this; + } + + public String getModifiedAfter() { + return modifiedAfter; + } + + public FileFilter setModifiedBefore(String modifiedBefore) { + this.modifiedBefore = modifiedBefore; + return this; + } + + public String getModifiedBefore() { + return modifiedBefore; + } + + public FileFilter setPathFilter(String pathFilter) { + this.pathFilter = pathFilter; + return this; + } + + public String getPathFilter() { + return pathFilter; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileFilter that = (FileFilter) o; + return Objects.equals(modifiedAfter, that.modifiedAfter) + && Objects.equals(modifiedBefore, that.modifiedBefore) + && Objects.equals(pathFilter, that.pathFilter); + } + + @Override + public int hashCode() { + return Objects.hash(modifiedAfter, modifiedBefore, pathFilter); + } + + @Override + public String toString() { + return new ToStringer(FileFilter.class) + .add("modifiedAfter", modifiedAfter) + .add("modifiedBefore", modifiedBefore) + .add("pathFilter", pathFilter) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileIngestionOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileIngestionOptions.java new file mode 100755 index 000000000..aeb959842 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileIngestionOptions.java @@ -0,0 +1,217 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; + +@Generated +public class FileIngestionOptions { + /** */ + @JsonProperty("corrupt_record_column") + private String corruptRecordColumn; + + /** Generic options */ + @JsonProperty("file_filters") + private Collection fileFilters; + + /** required for TableSpec */ + @JsonProperty("format") + private FileIngestionOptionsFileFormat format; + + /** + * Format-specific options Based on + * https://docs.databricks.com/aws/en/ingestion/cloud-object-storage/auto-loader/options#file-format-options + */ + @JsonProperty("format_options") + private Map formatOptions; + + /** */ + @JsonProperty("ignore_corrupt_files") + private Boolean ignoreCorruptFiles; + + /** */ + @JsonProperty("infer_column_types") + private Boolean inferColumnTypes; + + /** + * Column name case sensitivity + * https://docs.databricks.com/aws/en/ingestion/cloud-object-storage/auto-loader/schema#change-case-sensitive-behavior + */ + @JsonProperty("reader_case_sensitive") + private Boolean readerCaseSensitive; + + /** */ + @JsonProperty("rescued_data_column") + private String rescuedDataColumn; + + /** */ + @JsonProperty("schema_evolution_mode") + private FileIngestionOptionsSchemaEvolutionMode schemaEvolutionMode; + + /** + * Override inferred schema of specific columns Based on + * https://docs.databricks.com/aws/en/ingestion/cloud-object-storage/auto-loader/schema#override-schema-inference-with-schema-hints + */ + @JsonProperty("schema_hints") + private String schemaHints; + + /** */ + @JsonProperty("single_variant_column") + private String singleVariantColumn; + + public FileIngestionOptions setCorruptRecordColumn(String corruptRecordColumn) { + this.corruptRecordColumn = corruptRecordColumn; + return this; + } + + public String getCorruptRecordColumn() { + return corruptRecordColumn; + } + + public FileIngestionOptions setFileFilters(Collection fileFilters) { + this.fileFilters = fileFilters; + return this; + } + + public Collection getFileFilters() { + return fileFilters; + } + + public FileIngestionOptions setFormat(FileIngestionOptionsFileFormat format) { + this.format = format; + return this; + } + + public FileIngestionOptionsFileFormat getFormat() { + return format; + } + + public FileIngestionOptions setFormatOptions(Map formatOptions) { + this.formatOptions = formatOptions; + return this; + } + + public Map getFormatOptions() { + return formatOptions; + } + + public FileIngestionOptions setIgnoreCorruptFiles(Boolean ignoreCorruptFiles) { + this.ignoreCorruptFiles = ignoreCorruptFiles; + return this; + } + + public Boolean getIgnoreCorruptFiles() { + return ignoreCorruptFiles; + } + + public FileIngestionOptions setInferColumnTypes(Boolean inferColumnTypes) { + this.inferColumnTypes = inferColumnTypes; + return this; + } + + public Boolean getInferColumnTypes() { + return inferColumnTypes; + } + + public FileIngestionOptions setReaderCaseSensitive(Boolean readerCaseSensitive) { + this.readerCaseSensitive = readerCaseSensitive; + return this; + } + + public Boolean getReaderCaseSensitive() { + return readerCaseSensitive; + } + + public FileIngestionOptions setRescuedDataColumn(String rescuedDataColumn) { + this.rescuedDataColumn = rescuedDataColumn; + return this; + } + + public String getRescuedDataColumn() { + return rescuedDataColumn; + } + + public FileIngestionOptions setSchemaEvolutionMode( + FileIngestionOptionsSchemaEvolutionMode schemaEvolutionMode) { + this.schemaEvolutionMode = schemaEvolutionMode; + return this; + } + + public FileIngestionOptionsSchemaEvolutionMode getSchemaEvolutionMode() { + return schemaEvolutionMode; + } + + public FileIngestionOptions setSchemaHints(String schemaHints) { + this.schemaHints = schemaHints; + return this; + } + + public String getSchemaHints() { + return schemaHints; + } + + public FileIngestionOptions setSingleVariantColumn(String singleVariantColumn) { + this.singleVariantColumn = singleVariantColumn; + return this; + } + + public String getSingleVariantColumn() { + return singleVariantColumn; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileIngestionOptions that = (FileIngestionOptions) o; + return Objects.equals(corruptRecordColumn, that.corruptRecordColumn) + && Objects.equals(fileFilters, that.fileFilters) + && Objects.equals(format, that.format) + && Objects.equals(formatOptions, that.formatOptions) + && Objects.equals(ignoreCorruptFiles, that.ignoreCorruptFiles) + && Objects.equals(inferColumnTypes, that.inferColumnTypes) + && Objects.equals(readerCaseSensitive, that.readerCaseSensitive) + && Objects.equals(rescuedDataColumn, that.rescuedDataColumn) + && Objects.equals(schemaEvolutionMode, that.schemaEvolutionMode) + && Objects.equals(schemaHints, that.schemaHints) + && Objects.equals(singleVariantColumn, that.singleVariantColumn); + } + + @Override + public int hashCode() { + return Objects.hash( + corruptRecordColumn, + fileFilters, + format, + formatOptions, + ignoreCorruptFiles, + inferColumnTypes, + readerCaseSensitive, + rescuedDataColumn, + schemaEvolutionMode, + schemaHints, + singleVariantColumn); + } + + @Override + public String toString() { + return new ToStringer(FileIngestionOptions.class) + .add("corruptRecordColumn", corruptRecordColumn) + .add("fileFilters", fileFilters) + .add("format", format) + .add("formatOptions", formatOptions) + .add("ignoreCorruptFiles", ignoreCorruptFiles) + .add("inferColumnTypes", inferColumnTypes) + .add("readerCaseSensitive", readerCaseSensitive) + .add("rescuedDataColumn", rescuedDataColumn) + .add("schemaEvolutionMode", schemaEvolutionMode) + .add("schemaHints", schemaHints) + .add("singleVariantColumn", singleVariantColumn) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileIngestionOptionsFileFormat.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileIngestionOptionsFileFormat.java new file mode 100755 index 000000000..1b0535fb8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileIngestionOptionsFileFormat.java @@ -0,0 +1,17 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum FileIngestionOptionsFileFormat { + AVRO, + BINARYFILE, + CSV, + EXCEL, + JSON, + ORC, + PARQUET, + XML, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileIngestionOptionsSchemaEvolutionMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileIngestionOptionsSchemaEvolutionMode.java new file mode 100755 index 000000000..5a3ac9571 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/FileIngestionOptionsSchemaEvolutionMode.java @@ -0,0 +1,18 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; + +/** + * Based on + * https://docs.databricks.com/aws/en/ingestion/cloud-object-storage/auto-loader/schema#how-does-auto-loader-schema-evolution-work + */ +@Generated +public enum FileIngestionOptionsSchemaEvolutionMode { + ADD_NEW_COLUMNS, + ADD_NEW_COLUMNS_WITH_TYPE_WIDENING, + FAIL_ON_NEW_COLUMNS, + NONE, + RESCUE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleAdsOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleAdsOptions.java new file mode 100755 index 000000000..c7ff54d80 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleAdsOptions.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** + * Google Ads specific options for ingestion (object-level). When set, these values override the + * corresponding fields in GoogleAdsConfig (source_configurations). + */ +@Generated +public class GoogleAdsOptions { + /** + * (Optional) Number of days to look back for report tables to capture late-arriving data. If not + * specified, defaults to 30 days. + */ + @JsonProperty("lookback_window_days") + private Long lookbackWindowDays; + + /** + * (Optional at this level) Manager Account ID (also called MCC Account ID) used to list and + * access customer accounts under this manager account. Overrides + * GoogleAdsConfig.manager_account_id from source_configurations when set. + */ + @JsonProperty("manager_account_id") + private String managerAccountId; + + /** + * (Optional) Start date for the initial sync of report tables in YYYY-MM-DD format. This + * determines the earliest date from which to sync historical data. If not specified, defaults to + * 2 years of historical data. + */ + @JsonProperty("sync_start_date") + private String syncStartDate; + + public GoogleAdsOptions setLookbackWindowDays(Long lookbackWindowDays) { + this.lookbackWindowDays = lookbackWindowDays; + return this; + } + + public Long getLookbackWindowDays() { + return lookbackWindowDays; + } + + public GoogleAdsOptions setManagerAccountId(String managerAccountId) { + this.managerAccountId = managerAccountId; + return this; + } + + public String getManagerAccountId() { + return managerAccountId; + } + + public GoogleAdsOptions setSyncStartDate(String syncStartDate) { + this.syncStartDate = syncStartDate; + return this; + } + + public String getSyncStartDate() { + return syncStartDate; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GoogleAdsOptions that = (GoogleAdsOptions) o; + return Objects.equals(lookbackWindowDays, that.lookbackWindowDays) + && Objects.equals(managerAccountId, that.managerAccountId) + && Objects.equals(syncStartDate, that.syncStartDate); + } + + @Override + public int hashCode() { + return Objects.hash(lookbackWindowDays, managerAccountId, syncStartDate); + } + + @Override + public String toString() { + return new ToStringer(GoogleAdsOptions.class) + .add("lookbackWindowDays", lookbackWindowDays) + .add("managerAccountId", managerAccountId) + .add("syncStartDate", syncStartDate) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleDriveOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleDriveOptions.java new file mode 100755 index 000000000..72cd97fcb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleDriveOptions.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GoogleDriveOptions { + /** */ + @JsonProperty("entity_type") + private GoogleDriveOptionsGoogleDriveEntityType entityType; + + /** */ + @JsonProperty("file_ingestion_options") + private FileIngestionOptions fileIngestionOptions; + + /** Google Drive URL. */ + @JsonProperty("url") + private String url; + + public GoogleDriveOptions setEntityType(GoogleDriveOptionsGoogleDriveEntityType entityType) { + this.entityType = entityType; + return this; + } + + public GoogleDriveOptionsGoogleDriveEntityType getEntityType() { + return entityType; + } + + public GoogleDriveOptions setFileIngestionOptions(FileIngestionOptions fileIngestionOptions) { + this.fileIngestionOptions = fileIngestionOptions; + return this; + } + + public FileIngestionOptions getFileIngestionOptions() { + return fileIngestionOptions; + } + + public GoogleDriveOptions setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GoogleDriveOptions that = (GoogleDriveOptions) o; + return Objects.equals(entityType, that.entityType) + && Objects.equals(fileIngestionOptions, that.fileIngestionOptions) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash(entityType, fileIngestionOptions, url); + } + + @Override + public String toString() { + return new ToStringer(GoogleDriveOptions.class) + .add("entityType", entityType) + .add("fileIngestionOptions", fileIngestionOptions) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleDriveOptionsGoogleDriveEntityType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleDriveOptionsGoogleDriveEntityType.java new file mode 100755 index 000000000..913a3d453 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GoogleDriveOptionsGoogleDriveEntityType.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum GoogleDriveOptionsGoogleDriveEntityType { + FILE, + FILE_METADATA, + PERMISSION, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java index 9e68c3c76..410c7c90e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionSourceType.java @@ -10,6 +10,7 @@ public enum IngestionSourceType { DYNAMICS365, FOREIGN_CATALOG, GA4_RAW_DATA, + GOOGLE_DRIVE, MANAGED_POSTGRESQL, MYSQL, NETSUITE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java index 5199846ad..bff0d6640 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java @@ -89,6 +89,14 @@ public GetPipelineResponse waitGetPipelineIdle( throw new TimeoutException(String.format("timed out after %s: %s", timeout, statusMessage)); } + /** + * * Applies the current pipeline environment onto the pipeline compute. The environment applied + * can be used by subsequent dev-mode updates. + */ + public ApplyEnvironmentRequestResponse applyEnvironment(ApplyEnvironmentRequest request) { + return impl.applyEnvironment(request); + } + /** * Creates a new pipeline using Unity Catalog from a pipeline using Hive Metastore. This method * returns the ID of the newly created clone. Additionally, this method starts an update for the diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java index ec7c1791f..bda5af574 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java @@ -16,6 +16,23 @@ public PipelinesImpl(ApiClient apiClient) { this.apiClient = apiClient; } + @Override + public ApplyEnvironmentRequestResponse applyEnvironment(ApplyEnvironmentRequest request) { + String path = String.format("/api/2.0/pipelines/%s/environment/apply", request.getPipelineId()); + try { + Request req = new Request("POST", path); + + ApiClient.setQuery(req, request); + req.withHeader("Accept", "application/json"); + if (apiClient.workspaceId() != null) { + req.withHeader("X-Databricks-Org-Id", apiClient.workspaceId()); + } + return apiClient.execute(req, ApplyEnvironmentRequestResponse.class); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + @Override public ClonePipelineResponse clone(ClonePipelineRequest request) { String path = String.format("/api/2.0/pipelines/%s/clone", request.getPipelineId()); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java index ffb8ed81d..f9ef025f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java @@ -24,6 +24,12 @@ */ @Generated public interface PipelinesService { + /** + * * Applies the current pipeline environment onto the pipeline compute. The environment applied + * can be used by subsequent dev-mode updates. + */ + ApplyEnvironmentRequestResponse applyEnvironment(ApplyEnvironmentRequest applyEnvironmentRequest); + /** * Creates a new pipeline using Unity Catalog from a pipeline using Hive Metastore. This method * returns the ID of the newly created clone. Additionally, this method starts an update for the diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java index 6f25d25f9..845fd1fb5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java @@ -9,6 +9,10 @@ @Generated public class SchemaSpec { + /** (Optional) Source Specific Connector Options */ + @JsonProperty("connector_options") + private ConnectorOptions connectorOptions; + /** Required. Destination catalog to store tables. */ @JsonProperty("destination_catalog") private String destinationCatalog; @@ -37,6 +41,15 @@ public class SchemaSpec { @JsonProperty("table_configuration") private TableSpecificConfig tableConfiguration; + public SchemaSpec setConnectorOptions(ConnectorOptions connectorOptions) { + this.connectorOptions = connectorOptions; + return this; + } + + public ConnectorOptions getConnectorOptions() { + return connectorOptions; + } + public SchemaSpec setDestinationCatalog(String destinationCatalog) { this.destinationCatalog = destinationCatalog; return this; @@ -87,7 +100,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SchemaSpec that = (SchemaSpec) o; - return Objects.equals(destinationCatalog, that.destinationCatalog) + return Objects.equals(connectorOptions, that.connectorOptions) + && Objects.equals(destinationCatalog, that.destinationCatalog) && Objects.equals(destinationSchema, that.destinationSchema) && Objects.equals(sourceCatalog, that.sourceCatalog) && Objects.equals(sourceSchema, that.sourceSchema) @@ -97,12 +111,18 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - destinationCatalog, destinationSchema, sourceCatalog, sourceSchema, tableConfiguration); + connectorOptions, + destinationCatalog, + destinationSchema, + sourceCatalog, + sourceSchema, + tableConfiguration); } @Override public String toString() { return new ToStringer(SchemaSpec.class) + .add("connectorOptions", connectorOptions) .add("destinationCatalog", destinationCatalog) .add("destinationSchema", destinationSchema) .add("sourceCatalog", sourceCatalog) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SharepointOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SharepointOptions.java new file mode 100755 index 000000000..ee83dbd6f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SharepointOptions.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class SharepointOptions { + /** (Optional) The type of SharePoint entity to ingest. If not specified, defaults to FILE. */ + @JsonProperty("entity_type") + private SharepointOptionsSharepointEntityType entityType; + + /** (Optional) File ingestion options for processing files. */ + @JsonProperty("file_ingestion_options") + private FileIngestionOptions fileIngestionOptions; + + /** Required. The SharePoint URL. */ + @JsonProperty("url") + private String url; + + public SharepointOptions setEntityType(SharepointOptionsSharepointEntityType entityType) { + this.entityType = entityType; + return this; + } + + public SharepointOptionsSharepointEntityType getEntityType() { + return entityType; + } + + public SharepointOptions setFileIngestionOptions(FileIngestionOptions fileIngestionOptions) { + this.fileIngestionOptions = fileIngestionOptions; + return this; + } + + public FileIngestionOptions getFileIngestionOptions() { + return fileIngestionOptions; + } + + public SharepointOptions setUrl(String url) { + this.url = url; + return this; + } + + public String getUrl() { + return url; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SharepointOptions that = (SharepointOptions) o; + return Objects.equals(entityType, that.entityType) + && Objects.equals(fileIngestionOptions, that.fileIngestionOptions) + && Objects.equals(url, that.url); + } + + @Override + public int hashCode() { + return Objects.hash(entityType, fileIngestionOptions, url); + } + + @Override + public String toString() { + return new ToStringer(SharepointOptions.class) + .add("entityType", entityType) + .add("fileIngestionOptions", fileIngestionOptions) + .add("url", url) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SharepointOptionsSharepointEntityType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SharepointOptionsSharepointEntityType.java new file mode 100755 index 000000000..04dcc34aa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SharepointOptionsSharepointEntityType.java @@ -0,0 +1,13 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum SharepointOptionsSharepointEntityType { + FILE, + FILE_METADATA, + LIST, + PERMISSION, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java index 619922530..6b85d657e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java @@ -9,6 +9,10 @@ @Generated public class TableSpec { + /** (Optional) Source Specific Connector Options */ + @JsonProperty("connector_options") + private ConnectorOptions connectorOptions; + /** Required. Destination catalog to store table. */ @JsonProperty("destination_catalog") private String destinationCatalog; @@ -43,6 +47,15 @@ public class TableSpec { @JsonProperty("table_configuration") private TableSpecificConfig tableConfiguration; + public TableSpec setConnectorOptions(ConnectorOptions connectorOptions) { + this.connectorOptions = connectorOptions; + return this; + } + + public ConnectorOptions getConnectorOptions() { + return connectorOptions; + } + public TableSpec setDestinationCatalog(String destinationCatalog) { this.destinationCatalog = destinationCatalog; return this; @@ -111,7 +124,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TableSpec that = (TableSpec) o; - return Objects.equals(destinationCatalog, that.destinationCatalog) + return Objects.equals(connectorOptions, that.connectorOptions) + && Objects.equals(destinationCatalog, that.destinationCatalog) && Objects.equals(destinationSchema, that.destinationSchema) && Objects.equals(destinationTable, that.destinationTable) && Objects.equals(sourceCatalog, that.sourceCatalog) @@ -123,6 +137,7 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( + connectorOptions, destinationCatalog, destinationSchema, destinationTable, @@ -135,6 +150,7 @@ public int hashCode() { @Override public String toString() { return new ToStringer(TableSpec.class) + .add("connectorOptions", connectorOptions) .add("destinationCatalog", destinationCatalog) .add("destinationSchema", destinationSchema) .add("destinationTable", destinationTable) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java index a063aa86c..ad1ee1e0b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java @@ -61,7 +61,7 @@ public class TableSpecificConfig { @JsonProperty("salesforce_include_formula_fields") private Boolean salesforceIncludeFormulaFields; - /** The SCD type to use to ingest the table. */ + /** */ @JsonProperty("scd_type") private TableSpecificConfigScdType scdType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TikTokAdsOptions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TikTokAdsOptions.java new file mode 100755 index 000000000..79b7554cf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TikTokAdsOptions.java @@ -0,0 +1,160 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** TikTok Ads specific options for ingestion */ +@Generated +public class TikTokAdsOptions { + /** (Optional) Data level for the report. If not specified, defaults to AUCTION_CAMPAIGN. */ + @JsonProperty("data_level") + private TikTokAdsOptionsTikTokDataLevel dataLevel; + + /** + * (Optional) Dimensions to include in the report. Examples: "campaign_id", "adgroup_id", "ad_id", + * "stat_time_day", "stat_time_hour" If not specified, defaults to campaign_id. + */ + @JsonProperty("dimensions") + private Collection dimensions; + + /** + * (Optional) Number of days to look back for report tables during incremental sync to capture + * late-arriving conversions and attribution data. If not specified, defaults to 7 days. + */ + @JsonProperty("lookback_window_days") + private Long lookbackWindowDays; + + /** + * (Optional) Metrics to include in the report. Examples: "spend", "impressions", "clicks", + * "conversion", "cpc" If not specified, defaults to basic metrics (spend, impressions, clicks, + * etc.) + */ + @JsonProperty("metrics") + private Collection metrics; + + /** + * (Optional) Whether to request lifetime metrics (all-time aggregated data). When true, the + * report returns all-time data. If not specified, defaults to false. + */ + @JsonProperty("query_lifetime") + private Boolean queryLifetime; + + /** (Optional) Report type for the TikTok Ads API. If not specified, defaults to BASIC. */ + @JsonProperty("report_type") + private TikTokAdsOptionsTikTokReportType reportType; + + /** + * (Optional) Start date for the initial sync of report tables in YYYY-MM-DD format. This + * determines the earliest date from which to sync historical data. If not specified, defaults to + * 1 year of historical data for daily reports and 30 days for hourly reports. + */ + @JsonProperty("sync_start_date") + private String syncStartDate; + + public TikTokAdsOptions setDataLevel(TikTokAdsOptionsTikTokDataLevel dataLevel) { + this.dataLevel = dataLevel; + return this; + } + + public TikTokAdsOptionsTikTokDataLevel getDataLevel() { + return dataLevel; + } + + public TikTokAdsOptions setDimensions(Collection dimensions) { + this.dimensions = dimensions; + return this; + } + + public Collection getDimensions() { + return dimensions; + } + + public TikTokAdsOptions setLookbackWindowDays(Long lookbackWindowDays) { + this.lookbackWindowDays = lookbackWindowDays; + return this; + } + + public Long getLookbackWindowDays() { + return lookbackWindowDays; + } + + public TikTokAdsOptions setMetrics(Collection metrics) { + this.metrics = metrics; + return this; + } + + public Collection getMetrics() { + return metrics; + } + + public TikTokAdsOptions setQueryLifetime(Boolean queryLifetime) { + this.queryLifetime = queryLifetime; + return this; + } + + public Boolean getQueryLifetime() { + return queryLifetime; + } + + public TikTokAdsOptions setReportType(TikTokAdsOptionsTikTokReportType reportType) { + this.reportType = reportType; + return this; + } + + public TikTokAdsOptionsTikTokReportType getReportType() { + return reportType; + } + + public TikTokAdsOptions setSyncStartDate(String syncStartDate) { + this.syncStartDate = syncStartDate; + return this; + } + + public String getSyncStartDate() { + return syncStartDate; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TikTokAdsOptions that = (TikTokAdsOptions) o; + return Objects.equals(dataLevel, that.dataLevel) + && Objects.equals(dimensions, that.dimensions) + && Objects.equals(lookbackWindowDays, that.lookbackWindowDays) + && Objects.equals(metrics, that.metrics) + && Objects.equals(queryLifetime, that.queryLifetime) + && Objects.equals(reportType, that.reportType) + && Objects.equals(syncStartDate, that.syncStartDate); + } + + @Override + public int hashCode() { + return Objects.hash( + dataLevel, + dimensions, + lookbackWindowDays, + metrics, + queryLifetime, + reportType, + syncStartDate); + } + + @Override + public String toString() { + return new ToStringer(TikTokAdsOptions.class) + .add("dataLevel", dataLevel) + .add("dimensions", dimensions) + .add("lookbackWindowDays", lookbackWindowDays) + .add("metrics", metrics) + .add("queryLifetime", queryLifetime) + .add("reportType", reportType) + .add("syncStartDate", syncStartDate) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TikTokAdsOptionsTikTokDataLevel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TikTokAdsOptionsTikTokDataLevel.java new file mode 100755 index 000000000..5ebcf5ad6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TikTokAdsOptionsTikTokDataLevel.java @@ -0,0 +1,14 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; + +/** Data level for TikTok Ads report aggregation. */ +@Generated +public enum TikTokAdsOptionsTikTokDataLevel { + AUCTION_AD, + AUCTION_ADGROUP, + AUCTION_ADVERTISER, + AUCTION_CAMPAIGN, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TikTokAdsOptionsTikTokReportType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TikTokAdsOptionsTikTokReportType.java new file mode 100755 index 000000000..ae5953a8a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TikTokAdsOptionsTikTokReportType.java @@ -0,0 +1,16 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; + +/** Report type for TikTok Ads API. */ +@Generated +public enum TikTokAdsOptionsTikTokReportType { + AUDIENCE, + BASIC, + BUSINESS_CENTER, + DSA, + GMV_MAX, + PLAYABLE_AD, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RoleRoleSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RoleRoleSpec.java index 9ec74a691..e8af2c9da 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RoleRoleSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/RoleRoleSpec.java @@ -19,8 +19,8 @@ public class RoleRoleSpec { * identity_type: * For the managed identities, OAUTH is used. * For the regular postgres roles, * authentication based on postgres passwords is used. * - *

NOTE: this is ignored for the Databricks identity type GROUP, and NO_LOGIN is implicitly - * assumed instead for the GROUP identity type. + *

NOTE: for the Databricks identity type GROUP, LAKEBASE_OAUTH_V1 is the default auth method + * (group can login as well). */ @JsonProperty("auth_method") private RoleAuthMethod authMethod; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTable.java index cde9daee3..26905590e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTable.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTable.java @@ -38,7 +38,7 @@ public class SyncedTable { @JsonProperty("status") private SyncedTableSyncedTableStatus status; - /** */ + /** The Unity Catalog table ID for this synced table. */ @JsonProperty("uid") private String uid; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableSyncedTableSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableSyncedTableSpec.java index ecccf27c8..745692c75 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableSyncedTableSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/postgres/SyncedTableSyncedTableSpec.java @@ -62,14 +62,6 @@ public class SyncedTableSyncedTableSpec { @JsonProperty("primary_key_columns") private Collection primaryKeyColumns; - /** - * The full resource name of the project associated with the table. - * - *

Format: "projects/{project_id}". - */ - @JsonProperty("project") - private String project; - /** Scheduling policy of the underlying pipeline. */ @JsonProperty("scheduling_policy") private SyncedTableSyncedTableSpecSyncedTableSchedulingPolicy schedulingPolicy; @@ -144,15 +136,6 @@ public Collection getPrimaryKeyColumns() { return primaryKeyColumns; } - public SyncedTableSyncedTableSpec setProject(String project) { - this.project = project; - return this; - } - - public String getProject() { - return project; - } - public SyncedTableSyncedTableSpec setSchedulingPolicy( SyncedTableSyncedTableSpecSyncedTableSchedulingPolicy schedulingPolicy) { this.schedulingPolicy = schedulingPolicy; @@ -192,7 +175,6 @@ public boolean equals(Object o) { && Objects.equals(newPipelineSpec, that.newPipelineSpec) && Objects.equals(postgresDatabase, that.postgresDatabase) && Objects.equals(primaryKeyColumns, that.primaryKeyColumns) - && Objects.equals(project, that.project) && Objects.equals(schedulingPolicy, that.schedulingPolicy) && Objects.equals(sourceTableFullName, that.sourceTableFullName) && Objects.equals(timeseriesKey, that.timeseriesKey); @@ -207,7 +189,6 @@ public int hashCode() { newPipelineSpec, postgresDatabase, primaryKeyColumns, - project, schedulingPolicy, sourceTableFullName, timeseriesKey); @@ -222,7 +203,6 @@ public String toString() { .add("newPipelineSpec", newPipelineSpec) .add("postgresDatabase", postgresDatabase) .add("primaryKeyColumns", primaryKeyColumns) - .add("project", project) .add("schedulingPolicy", schedulingPolicy) .add("sourceTableFullName", sourceTableFullName) .add("timeseriesKey", timeseriesKey) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequest.java index 4ae7809b5..f2837aadf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequest.java @@ -5,6 +5,7 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; /** Configuration details for creating on-behalf tokens. */ @@ -22,6 +23,10 @@ public class CreateOboTokenRequest { @JsonProperty("lifetime_seconds") private Long lifetimeSeconds; + /** */ + @JsonProperty("scopes") + private Collection scopes; + public CreateOboTokenRequest setApplicationId(String applicationId) { this.applicationId = applicationId; return this; @@ -49,6 +54,15 @@ public Long getLifetimeSeconds() { return lifetimeSeconds; } + public CreateOboTokenRequest setScopes(Collection scopes) { + this.scopes = scopes; + return this; + } + + public Collection getScopes() { + return scopes; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -56,12 +70,13 @@ public boolean equals(Object o) { CreateOboTokenRequest that = (CreateOboTokenRequest) o; return Objects.equals(applicationId, that.applicationId) && Objects.equals(comment, that.comment) - && Objects.equals(lifetimeSeconds, that.lifetimeSeconds); + && Objects.equals(lifetimeSeconds, that.lifetimeSeconds) + && Objects.equals(scopes, that.scopes); } @Override public int hashCode() { - return Objects.hash(applicationId, comment, lifetimeSeconds); + return Objects.hash(applicationId, comment, lifetimeSeconds, scopes); } @Override @@ -70,6 +85,7 @@ public String toString() { .add("applicationId", applicationId) .add("comment", comment) .add("lifetimeSeconds", lifetimeSeconds) + .add("scopes", scopes) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java index fb1e1a5bd..91a5c6605 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreatePrivateEndpointRule.java @@ -34,6 +34,10 @@ public class CreatePrivateEndpointRule { @JsonProperty("error_message") private String errorMessage; + /** */ + @JsonProperty("gcp_endpoint") + private GcpEndpoint gcpEndpoint; + /** * Not used by customer-managed private endpoint services. * @@ -85,6 +89,15 @@ public String getErrorMessage() { return errorMessage; } + public CreatePrivateEndpointRule setGcpEndpoint(GcpEndpoint gcpEndpoint) { + this.gcpEndpoint = gcpEndpoint; + return this; + } + + public GcpEndpoint getGcpEndpoint() { + return gcpEndpoint; + } + public CreatePrivateEndpointRule setGroupId(String groupId) { this.groupId = groupId; return this; @@ -120,6 +133,7 @@ public boolean equals(Object o) { return Objects.equals(domainNames, that.domainNames) && Objects.equals(endpointService, that.endpointService) && Objects.equals(errorMessage, that.errorMessage) + && Objects.equals(gcpEndpoint, that.gcpEndpoint) && Objects.equals(groupId, that.groupId) && Objects.equals(resourceId, that.resourceId) && Objects.equals(resourceNames, that.resourceNames); @@ -128,7 +142,13 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - domainNames, endpointService, errorMessage, groupId, resourceId, resourceNames); + domainNames, + endpointService, + errorMessage, + gcpEndpoint, + groupId, + resourceId, + resourceNames); } @Override @@ -137,6 +157,7 @@ public String toString() { .add("domainNames", domainNames) .add("endpointService", endpointService) .add("errorMessage", errorMessage) + .add("gcpEndpoint", gcpEndpoint) .add("groupId", groupId) .add("resourceId", resourceId) .add("resourceNames", resourceNames) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenRequest.java index 91c3b4298..60e76814d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateTokenRequest.java @@ -5,6 +5,7 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; @Generated @@ -21,6 +22,10 @@ public class CreateTokenRequest { @JsonProperty("lifetime_seconds") private Long lifetimeSeconds; + /** Optional scopes of the token. */ + @JsonProperty("scopes") + private Collection scopes; + public CreateTokenRequest setComment(String comment) { this.comment = comment; return this; @@ -39,18 +44,28 @@ public Long getLifetimeSeconds() { return lifetimeSeconds; } + public CreateTokenRequest setScopes(Collection scopes) { + this.scopes = scopes; + return this; + } + + public Collection getScopes() { + return scopes; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CreateTokenRequest that = (CreateTokenRequest) o; return Objects.equals(comment, that.comment) - && Objects.equals(lifetimeSeconds, that.lifetimeSeconds); + && Objects.equals(lifetimeSeconds, that.lifetimeSeconds) + && Objects.equals(scopes, that.scopes); } @Override public int hashCode() { - return Objects.hash(comment, lifetimeSeconds); + return Objects.hash(comment, lifetimeSeconds, scopes); } @Override @@ -58,6 +73,7 @@ public String toString() { return new ToStringer(CreateTokenRequest.class) .add("comment", comment) .add("lifetimeSeconds", lifetimeSeconds) + .add("scopes", scopes) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAppsDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAppsDestination.java deleted file mode 100755 index c33e69b56..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyAppsDestination.java +++ /dev/null @@ -1,46 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.settings; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class CustomerFacingIngressNetworkPolicyAppsDestination { - /** Must be set to true. */ - @JsonProperty("all_destinations") - private Boolean allDestinations; - - public CustomerFacingIngressNetworkPolicyAppsDestination setAllDestinations( - Boolean allDestinations) { - this.allDestinations = allDestinations; - return this; - } - - public Boolean getAllDestinations() { - return allDestinations; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CustomerFacingIngressNetworkPolicyAppsDestination that = - (CustomerFacingIngressNetworkPolicyAppsDestination) o; - return Objects.equals(allDestinations, that.allDestinations); - } - - @Override - public int hashCode() { - return Objects.hash(allDestinations); - } - - @Override - public String toString() { - return new ToStringer(CustomerFacingIngressNetworkPolicyAppsDestination.class) - .add("allDestinations", allDestinations) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyLakebaseDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyLakebaseDestination.java deleted file mode 100755 index 80d66d007..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyLakebaseDestination.java +++ /dev/null @@ -1,46 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.settings; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class CustomerFacingIngressNetworkPolicyLakebaseDestination { - /** Must be set to true. */ - @JsonProperty("all_destinations") - private Boolean allDestinations; - - public CustomerFacingIngressNetworkPolicyLakebaseDestination setAllDestinations( - Boolean allDestinations) { - this.allDestinations = allDestinations; - return this; - } - - public Boolean getAllDestinations() { - return allDestinations; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CustomerFacingIngressNetworkPolicyLakebaseDestination that = - (CustomerFacingIngressNetworkPolicyLakebaseDestination) o; - return Objects.equals(allDestinations, that.allDestinations); - } - - @Override - public int hashCode() { - return Objects.hash(allDestinations); - } - - @Override - public String toString() { - return new ToStringer(CustomerFacingIngressNetworkPolicyLakebaseDestination.class) - .add("allDestinations", allDestinations) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyRequestDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyRequestDestination.java index 7eaaa3627..39a13b1fb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyRequestDestination.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CustomerFacingIngressNetworkPolicyRequestDestination.java @@ -16,19 +16,11 @@ public class CustomerFacingIngressNetworkPolicyRequestDestination { @JsonProperty("all_destinations") private Boolean allDestinations; - /** */ - @JsonProperty("apps") - private CustomerFacingIngressNetworkPolicyAppsDestination apps; - - /** */ - @JsonProperty("lakebase") - private CustomerFacingIngressNetworkPolicyLakebaseDestination lakebase; - /** */ @JsonProperty("workspace_api") private CustomerFacingIngressNetworkPolicyWorkspaceApiDestination workspaceApi; - /** */ + /** Workspace destinations */ @JsonProperty("workspace_ui") private CustomerFacingIngressNetworkPolicyWorkspaceUiDestination workspaceUi; @@ -42,26 +34,6 @@ public Boolean getAllDestinations() { return allDestinations; } - public CustomerFacingIngressNetworkPolicyRequestDestination setApps( - CustomerFacingIngressNetworkPolicyAppsDestination apps) { - this.apps = apps; - return this; - } - - public CustomerFacingIngressNetworkPolicyAppsDestination getApps() { - return apps; - } - - public CustomerFacingIngressNetworkPolicyRequestDestination setLakebase( - CustomerFacingIngressNetworkPolicyLakebaseDestination lakebase) { - this.lakebase = lakebase; - return this; - } - - public CustomerFacingIngressNetworkPolicyLakebaseDestination getLakebase() { - return lakebase; - } - public CustomerFacingIngressNetworkPolicyRequestDestination setWorkspaceApi( CustomerFacingIngressNetworkPolicyWorkspaceApiDestination workspaceApi) { this.workspaceApi = workspaceApi; @@ -89,23 +61,19 @@ public boolean equals(Object o) { CustomerFacingIngressNetworkPolicyRequestDestination that = (CustomerFacingIngressNetworkPolicyRequestDestination) o; return Objects.equals(allDestinations, that.allDestinations) - && Objects.equals(apps, that.apps) - && Objects.equals(lakebase, that.lakebase) && Objects.equals(workspaceApi, that.workspaceApi) && Objects.equals(workspaceUi, that.workspaceUi); } @Override public int hashCode() { - return Objects.hash(allDestinations, apps, lakebase, workspaceApi, workspaceUi); + return Objects.hash(allDestinations, workspaceApi, workspaceUi); } @Override public String toString() { return new ToStringer(CustomerFacingIngressNetworkPolicyRequestDestination.class) .add("allDestinations", allDestinations) - .add("apps", apps) - .add("lakebase", lakebase) .add("workspaceApi", workspaceApi) .add("workspaceUi", workspaceUi) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GcpEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GcpEndpoint.java new file mode 100755 index 000000000..c7e128029 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GcpEndpoint.java @@ -0,0 +1,62 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GcpEndpoint { + /** Output only. The URI of the created PSC endpoint. */ + @JsonProperty("psc_endpoint_uri") + private String pscEndpointUri; + + /** + * The full url of the target service attachment. Example: + * projects/my-gcp-project/regions/us-east4/serviceAttachments/my-service-attachment + */ + @JsonProperty("service_attachment") + private String serviceAttachment; + + public GcpEndpoint setPscEndpointUri(String pscEndpointUri) { + this.pscEndpointUri = pscEndpointUri; + return this; + } + + public String getPscEndpointUri() { + return pscEndpointUri; + } + + public GcpEndpoint setServiceAttachment(String serviceAttachment) { + this.serviceAttachment = serviceAttachment; + return this; + } + + public String getServiceAttachment() { + return serviceAttachment; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GcpEndpoint that = (GcpEndpoint) o; + return Objects.equals(pscEndpointUri, that.pscEndpointUri) + && Objects.equals(serviceAttachment, that.serviceAttachment); + } + + @Override + public int hashCode() { + return Objects.hash(pscEndpointUri, serviceAttachment); + } + + @Override + public String toString() { + return new ToStringer(GcpEndpoint.class) + .add("pscEndpointUri", pscEndpointUri) + .add("serviceAttachment", serviceAttachment) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java index d4e51fc01..6547dfbe4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccPrivateEndpointRule.java @@ -79,6 +79,10 @@ public class NccPrivateEndpointRule { @JsonProperty("error_message") private String errorMessage; + /** */ + @JsonProperty("gcp_endpoint") + private GcpEndpoint gcpEndpoint; + /** * Not used by customer-managed private endpoint services. * @@ -216,6 +220,15 @@ public String getErrorMessage() { return errorMessage; } + public NccPrivateEndpointRule setGcpEndpoint(GcpEndpoint gcpEndpoint) { + this.gcpEndpoint = gcpEndpoint; + return this; + } + + public GcpEndpoint getGcpEndpoint() { + return gcpEndpoint; + } + public NccPrivateEndpointRule setGroupId(String groupId) { this.groupId = groupId; return this; @@ -294,6 +307,7 @@ public boolean equals(Object o) { && Objects.equals(endpointName, that.endpointName) && Objects.equals(endpointService, that.endpointService) && Objects.equals(errorMessage, that.errorMessage) + && Objects.equals(gcpEndpoint, that.gcpEndpoint) && Objects.equals(groupId, that.groupId) && Objects.equals(networkConnectivityConfigId, that.networkConnectivityConfigId) && Objects.equals(resourceId, that.resourceId) @@ -316,6 +330,7 @@ public int hashCode() { endpointName, endpointService, errorMessage, + gcpEndpoint, groupId, networkConnectivityConfigId, resourceId, @@ -338,6 +353,7 @@ public String toString() { .add("endpointName", endpointName) .add("endpointService", endpointService) .add("errorMessage", errorMessage) + .add("gcpEndpoint", gcpEndpoint) .add("groupId", groupId) .add("networkConnectivityConfigId", networkConnectivityConfigId) .add("resourceId", resourceId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java index 1936954b2..bd10332c1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdatePrivateEndpointRule.java @@ -36,6 +36,10 @@ public class UpdatePrivateEndpointRule { @JsonProperty("error_message") private String errorMessage; + /** */ + @JsonProperty("gcp_endpoint") + private GcpEndpoint gcpEndpoint; + /** * Only used by private endpoints towards AWS S3 service. * @@ -74,6 +78,15 @@ public String getErrorMessage() { return errorMessage; } + public UpdatePrivateEndpointRule setGcpEndpoint(GcpEndpoint gcpEndpoint) { + this.gcpEndpoint = gcpEndpoint; + return this; + } + + public GcpEndpoint getGcpEndpoint() { + return gcpEndpoint; + } + public UpdatePrivateEndpointRule setResourceNames(Collection resourceNames) { this.resourceNames = resourceNames; return this; @@ -91,12 +104,13 @@ public boolean equals(Object o) { return Objects.equals(domainNames, that.domainNames) && Objects.equals(enabled, that.enabled) && Objects.equals(errorMessage, that.errorMessage) + && Objects.equals(gcpEndpoint, that.gcpEndpoint) && Objects.equals(resourceNames, that.resourceNames); } @Override public int hashCode() { - return Objects.hash(domainNames, enabled, errorMessage, resourceNames); + return Objects.hash(domainNames, enabled, errorMessage, gcpEndpoint, resourceNames); } @Override @@ -105,6 +119,7 @@ public String toString() { .add("domainNames", domainNames) .add("enabled", enabled) .add("errorMessage", errorMessage) + .add("gcpEndpoint", gcpEndpoint) .add("resourceNames", resourceNames) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java index 0fd5f703d..63f35231e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementStatus.java @@ -14,6 +14,13 @@ public class StatementStatus { @JsonProperty("error") private ServiceError error; + /** + * SQLSTATE error code returned when the statement execution fails. Only populated when the + * statement status is `FAILED`. + */ + @JsonProperty("sql_state") + private String sqlState; + /** * Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - * `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution @@ -33,6 +40,15 @@ public ServiceError getError() { return error; } + public StatementStatus setSqlState(String sqlState) { + this.sqlState = sqlState; + return this; + } + + public String getSqlState() { + return sqlState; + } + public StatementStatus setState(StatementState state) { this.state = state; return this; @@ -47,16 +63,22 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; StatementStatus that = (StatementStatus) o; - return Objects.equals(error, that.error) && Objects.equals(state, that.state); + return Objects.equals(error, that.error) + && Objects.equals(sqlState, that.sqlState) + && Objects.equals(state, that.state); } @Override public int hashCode() { - return Objects.hash(error, state); + return Objects.hash(error, sqlState, state); } @Override public String toString() { - return new ToStringer(StatementStatus.class).add("error", error).add("state", state).toString(); + return new ToStringer(StatementStatus.class) + .add("error", error) + .add("sqlState", sqlState) + .add("state", state) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java index 53f7759da..c8abf37c4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateEndpoint.java @@ -28,6 +28,10 @@ public class CreateEndpoint { @JsonProperty("name") private String name; + /** The usage policy id to be applied once we've migrated to usage policies */ + @JsonProperty("usage_policy_id") + private String usagePolicyId; + public CreateEndpoint setBudgetPolicyId(String budgetPolicyId) { this.budgetPolicyId = budgetPolicyId; return this; @@ -64,6 +68,15 @@ public String getName() { return name; } + public CreateEndpoint setUsagePolicyId(String usagePolicyId) { + this.usagePolicyId = usagePolicyId; + return this; + } + + public String getUsagePolicyId() { + return usagePolicyId; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -72,12 +85,13 @@ public boolean equals(Object o) { return Objects.equals(budgetPolicyId, that.budgetPolicyId) && Objects.equals(endpointType, that.endpointType) && Objects.equals(minQps, that.minQps) - && Objects.equals(name, that.name); + && Objects.equals(name, that.name) + && Objects.equals(usagePolicyId, that.usagePolicyId); } @Override public int hashCode() { - return Objects.hash(budgetPolicyId, endpointType, minQps, name); + return Objects.hash(budgetPolicyId, endpointType, minQps, name, usagePolicyId); } @Override @@ -87,6 +101,7 @@ public String toString() { .add("endpointType", endpointType) .add("minQps", minQps) .add("name", name) + .add("usagePolicyId", usagePolicyId) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java index 8c688ec23..f653abbc2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/CreateVectorIndexRequest.java @@ -21,6 +21,10 @@ public class CreateVectorIndexRequest { @JsonProperty("endpoint_name") private String endpointName; + /** The subtype of the index. Use `HYBRID` or `FULL_TEXT`. `VECTOR` is not supported. */ + @JsonProperty("index_subtype") + private IndexSubtype indexSubtype; + /** */ @JsonProperty("index_type") private VectorIndexType indexType; @@ -62,6 +66,15 @@ public String getEndpointName() { return endpointName; } + public CreateVectorIndexRequest setIndexSubtype(IndexSubtype indexSubtype) { + this.indexSubtype = indexSubtype; + return this; + } + + public IndexSubtype getIndexSubtype() { + return indexSubtype; + } + public CreateVectorIndexRequest setIndexType(VectorIndexType indexType) { this.indexType = indexType; return this; @@ -97,6 +110,7 @@ public boolean equals(Object o) { return Objects.equals(deltaSyncIndexSpec, that.deltaSyncIndexSpec) && Objects.equals(directAccessIndexSpec, that.directAccessIndexSpec) && Objects.equals(endpointName, that.endpointName) + && Objects.equals(indexSubtype, that.indexSubtype) && Objects.equals(indexType, that.indexType) && Objects.equals(name, that.name) && Objects.equals(primaryKey, that.primaryKey); @@ -105,7 +119,13 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - deltaSyncIndexSpec, directAccessIndexSpec, endpointName, indexType, name, primaryKey); + deltaSyncIndexSpec, + directAccessIndexSpec, + endpointName, + indexSubtype, + indexType, + name, + primaryKey); } @Override @@ -114,6 +134,7 @@ public String toString() { .add("deltaSyncIndexSpec", deltaSyncIndexSpec) .add("directAccessIndexSpec", directAccessIndexSpec) .add("endpointName", endpointName) + .add("indexSubtype", indexSubtype) .add("indexType", indexType) .add("name", name) .add("primaryKey", primaryKey) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointInfo.java index 5d9c6cbae..55fe83b3a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointInfo.java @@ -10,6 +10,14 @@ @Generated public class EndpointInfo { + /** + * Discussed here: https://databricks.atlassian.net/wiki/x/OQDlCQE Additional documentation: + * https://aip.dev.databricks.com/129 the user selected budget policy id for the endpoint + * (client-side) + */ + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + /** Timestamp of endpoint creation */ @JsonProperty("creation_timestamp") private Long creationTimestamp; @@ -58,6 +66,15 @@ public class EndpointInfo { @JsonProperty("scaling_info") private EndpointScalingInfo scalingInfo; + public EndpointInfo setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + public EndpointInfo setCreationTimestamp(Long creationTimestamp) { this.creationTimestamp = creationTimestamp; return this; @@ -171,7 +188,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; EndpointInfo that = (EndpointInfo) o; - return Objects.equals(creationTimestamp, that.creationTimestamp) + return Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(creationTimestamp, that.creationTimestamp) && Objects.equals(creator, that.creator) && Objects.equals(customTags, that.customTags) && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId) @@ -188,6 +206,7 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( + budgetPolicyId, creationTimestamp, creator, customTags, @@ -205,6 +224,7 @@ public int hashCode() { @Override public String toString() { return new ToStringer(EndpointInfo.class) + .add("budgetPolicyId", budgetPolicyId) .add("creationTimestamp", creationTimestamp) .add("creator", creator) .add("customTags", customTags) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointType.java index a02f073f2..6da418282 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/EndpointType.java @@ -8,4 +8,5 @@ @Generated public enum EndpointType { STANDARD, + STORAGE_OPTIMIZED, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/IndexSubtype.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/IndexSubtype.java new file mode 100755 index 000000000..75ed726eb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/IndexSubtype.java @@ -0,0 +1,18 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.vectorsearch; + +import com.databricks.sdk.support.Generated; + +/** + * The subtype of the vector search index, determining the indexing and retrieval strategy. - + * `VECTOR`: Not supported. Use `HYBRID` instead. - `FULL_TEXT`: An index that uses full-text search + * without vector embeddings. - `HYBRID`: An index that uses vector embeddings for similarity search + * and hybrid search. + */ +@Generated +public enum IndexSubtype { + FULL_TEXT, // An index that uses full-text search without vector embeddings. + HYBRID, // An index that uses vector embeddings for similarity search and hybrid search. + VECTOR, // Not supported. Use `HYBRID` instead. +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java index fbf2e8965..9ad729064 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/MiniVectorIndex.java @@ -17,6 +17,10 @@ public class MiniVectorIndex { @JsonProperty("endpoint_name") private String endpointName; + /** The subtype of the index. */ + @JsonProperty("index_subtype") + private IndexSubtype indexSubtype; + /** */ @JsonProperty("index_type") private VectorIndexType indexType; @@ -47,6 +51,15 @@ public String getEndpointName() { return endpointName; } + public MiniVectorIndex setIndexSubtype(IndexSubtype indexSubtype) { + this.indexSubtype = indexSubtype; + return this; + } + + public IndexSubtype getIndexSubtype() { + return indexSubtype; + } + public MiniVectorIndex setIndexType(VectorIndexType indexType) { this.indexType = indexType; return this; @@ -81,6 +94,7 @@ public boolean equals(Object o) { MiniVectorIndex that = (MiniVectorIndex) o; return Objects.equals(creator, that.creator) && Objects.equals(endpointName, that.endpointName) + && Objects.equals(indexSubtype, that.indexSubtype) && Objects.equals(indexType, that.indexType) && Objects.equals(name, that.name) && Objects.equals(primaryKey, that.primaryKey); @@ -88,7 +102,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(creator, endpointName, indexType, name, primaryKey); + return Objects.hash(creator, endpointName, indexSubtype, indexType, name, primaryKey); } @Override @@ -96,6 +110,7 @@ public String toString() { return new ToStringer(MiniVectorIndex.class) .add("creator", creator) .add("endpointName", endpointName) + .add("indexSubtype", indexSubtype) .add("indexType", indexType) .add("name", name) .add("primaryKey", primaryKey) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyResponse.java index 2a0e6337b..da8e10167 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/PatchEndpointBudgetPolicyResponse.java @@ -9,10 +9,23 @@ @Generated public class PatchEndpointBudgetPolicyResponse { + /** */ + @JsonProperty("budget_policy_id") + private String budgetPolicyId; + /** The budget policy applied to the vector search endpoint. */ @JsonProperty("effective_budget_policy_id") private String effectiveBudgetPolicyId; + public PatchEndpointBudgetPolicyResponse setBudgetPolicyId(String budgetPolicyId) { + this.budgetPolicyId = budgetPolicyId; + return this; + } + + public String getBudgetPolicyId() { + return budgetPolicyId; + } + public PatchEndpointBudgetPolicyResponse setEffectiveBudgetPolicyId( String effectiveBudgetPolicyId) { this.effectiveBudgetPolicyId = effectiveBudgetPolicyId; @@ -28,17 +41,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PatchEndpointBudgetPolicyResponse that = (PatchEndpointBudgetPolicyResponse) o; - return Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId); + return Objects.equals(budgetPolicyId, that.budgetPolicyId) + && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId); } @Override public int hashCode() { - return Objects.hash(effectiveBudgetPolicyId); + return Objects.hash(budgetPolicyId, effectiveBudgetPolicyId); } @Override public String toString() { return new ToStringer(PatchEndpointBudgetPolicyResponse.class) + .add("budgetPolicyId", budgetPolicyId) .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndex.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndex.java index a823544d7..769e515ff 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndex.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorIndex.java @@ -25,6 +25,10 @@ public class VectorIndex { @JsonProperty("endpoint_name") private String endpointName; + /** The subtype of the index. */ + @JsonProperty("index_subtype") + private IndexSubtype indexSubtype; + /** */ @JsonProperty("index_type") private VectorIndexType indexType; @@ -77,6 +81,15 @@ public String getEndpointName() { return endpointName; } + public VectorIndex setIndexSubtype(IndexSubtype indexSubtype) { + this.indexSubtype = indexSubtype; + return this; + } + + public IndexSubtype getIndexSubtype() { + return indexSubtype; + } + public VectorIndex setIndexType(VectorIndexType indexType) { this.indexType = indexType; return this; @@ -122,6 +135,7 @@ public boolean equals(Object o) { && Objects.equals(deltaSyncIndexSpec, that.deltaSyncIndexSpec) && Objects.equals(directAccessIndexSpec, that.directAccessIndexSpec) && Objects.equals(endpointName, that.endpointName) + && Objects.equals(indexSubtype, that.indexSubtype) && Objects.equals(indexType, that.indexType) && Objects.equals(name, that.name) && Objects.equals(primaryKey, that.primaryKey) @@ -135,6 +149,7 @@ public int hashCode() { deltaSyncIndexSpec, directAccessIndexSpec, endpointName, + indexSubtype, indexType, name, primaryKey, @@ -148,6 +163,7 @@ public String toString() { .add("deltaSyncIndexSpec", deltaSyncIndexSpec) .add("directAccessIndexSpec", directAccessIndexSpec) .add("endpointName", endpointName) + .add("indexSubtype", indexSubtype) .add("indexType", indexType) .add("name", name) .add("primaryKey", primaryKey) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java index 911c92fce..1f31a2159 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java @@ -32,10 +32,10 @@ public WorkspaceAPI(WorkspaceService mock) { } /** - * Deletes an object or a directory (and optionally recursively deletes all objects in the - * directory). * If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * - * If `path` is a non-empty directory and `recursive` is set to `false`, this call returns an - * error `DIRECTORY_NOT_EMPTY`. + * Deprecated: use WorkspaceHierarchyService.DeleteTreeNode instead. Deletes an object or a + * directory (and optionally recursively deletes all objects in the directory). * If `path` does + * not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a non-empty + * directory and `recursive` is set to `false`, this call returns an error `DIRECTORY_NOT_EMPTY`. * *

Object deletion cannot be undone and deleting a directory recursively is not atomic. */ @@ -94,8 +94,8 @@ public ObjectInfo getStatus(String path) { } /** - * Gets the status of an object or a directory. If `path` does not exist, this call returns an - * error `RESOURCE_DOES_NOT_EXIST`. + * Deprecated: use WorkspaceHierarchyService.GetTreeNode instead. Gets the status of an object or + * a directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. */ public ObjectInfo getStatus(GetStatusRequest request) { return impl.getStatus(request); @@ -117,8 +117,9 @@ public Iterable list(String path) { } /** - * Lists the contents of a directory, or the object if it is not a directory. If the input path - * does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. + * Deprecated: use WorkspaceHierarchyService.ListTreeNodes instead. Lists the contents of a + * directory, or the object if it is not a directory. If the input path does not exist, this call + * returns an error `RESOURCE_DOES_NOT_EXIST`. */ public Iterable list(ListWorkspaceRequest request) { return new Paginator<>(request, impl::list, ListResponse::getObjects, response -> null); @@ -129,9 +130,10 @@ public void mkdirs(String path) { } /** - * Creates the specified directory (and necessary parent directories if they do not exist). If - * there is an object (not a directory) at any prefix of the input path, this call returns an - * error `RESOURCE_ALREADY_EXISTS`. + * Deprecated: use WorkspaceHierarchyService.CreateTreeNode instead. Creates the specified + * directory (and necessary parent directories if they do not exist). If there is an object (not a + * directory) at any prefix of the input path, this call returns an error + * `RESOURCE_ALREADY_EXISTS`. * *

Note that if this operation fails it may have succeeded in creating some of the necessary * parent directories. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java index c7705bcdf..a40367da1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceService.java @@ -18,10 +18,10 @@ @Generated public interface WorkspaceService { /** - * Deletes an object or a directory (and optionally recursively deletes all objects in the - * directory). * If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * - * If `path` is a non-empty directory and `recursive` is set to `false`, this call returns an - * error `DIRECTORY_NOT_EMPTY`. + * Deprecated: use WorkspaceHierarchyService.DeleteTreeNode instead. Deletes an object or a + * directory (and optionally recursively deletes all objects in the directory). * If `path` does + * not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a non-empty + * directory and `recursive` is set to `false`, this call returns an error `DIRECTORY_NOT_EMPTY`. * *

Object deletion cannot be undone and deleting a directory recursively is not atomic. */ @@ -49,8 +49,8 @@ WorkspaceObjectPermissions getPermissions( GetWorkspaceObjectPermissionsRequest getWorkspaceObjectPermissionsRequest); /** - * Gets the status of an object or a directory. If `path` does not exist, this call returns an - * error `RESOURCE_DOES_NOT_EXIST`. + * Deprecated: use WorkspaceHierarchyService.GetTreeNode instead. Gets the status of an object or + * a directory. If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. */ ObjectInfo getStatus(GetStatusRequest getStatusRequest); @@ -64,15 +64,17 @@ WorkspaceObjectPermissions getPermissions( void importContent(Import importContent); /** - * Lists the contents of a directory, or the object if it is not a directory. If the input path - * does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. + * Deprecated: use WorkspaceHierarchyService.ListTreeNodes instead. Lists the contents of a + * directory, or the object if it is not a directory. If the input path does not exist, this call + * returns an error `RESOURCE_DOES_NOT_EXIST`. */ ListResponse list(ListWorkspaceRequest listWorkspaceRequest); /** - * Creates the specified directory (and necessary parent directories if they do not exist). If - * there is an object (not a directory) at any prefix of the input path, this call returns an - * error `RESOURCE_ALREADY_EXISTS`. + * Deprecated: use WorkspaceHierarchyService.CreateTreeNode instead. Creates the specified + * directory (and necessary parent directories if they do not exist). If there is an object (not a + * directory) at any prefix of the input path, this call returns an error + * `RESOURCE_ALREADY_EXISTS`. * *

Note that if this operation fails it may have succeeded in creating some of the necessary * parent directories.