diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml new file mode 100644 index 0000000..683224b --- /dev/null +++ b/.github/workflows/python.yml @@ -0,0 +1,34 @@ +# NOTE: This file is auto generated by OpenAPI Generator. +# URL: https://openapi-generator.tech +# +# ref: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python + +name: vectorize_client Python package + +on: [push, pull_request] + +permissions: + contents: read + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + pip install -r test-requirements.txt + - name: Test with pytest + run: | + pytest --cov=vectorize_client diff --git a/.gitignore b/.gitignore index a7d8464..43995bd 100644 --- a/.gitignore +++ b/.gitignore @@ -1,19 +1,66 @@ -openapitools.json -src/**/.openapi-generator-ignore -src/**/.openapi-generator -src/**/.github -src/**/test -src/**/.gitlab-ci.yml -src/**/.travis.yml -src/**/git_push.sh -src/**/requirements.txt -src/**/setup.py -src/**/setup.cfg -src/**/test-requirements.txt -src/**/tox.ini -src/**/docs -node_modules - -**/*.pyc -.idea -gen-docs \ No newline at end of file +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +venv/ +.venv/ +.python-version +.pytest_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000..1a64f7c --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,31 @@ +# NOTE: This file is auto generated by OpenAPI Generator. +# URL: https://openapi-generator.tech +# +# ref: https://docs.gitlab.com/ee/ci/README.html +# ref: https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Python.gitlab-ci.yml + +stages: + - test + +.pytest: + stage: test + script: + - pip install -r requirements.txt + - pip install -r test-requirements.txt + - pytest --cov=vectorize_client + +pytest-3.9: + extends: .pytest + image: python:3.9-alpine +pytest-3.10: + extends: .pytest + image: python:3.10-alpine +pytest-3.11: + extends: .pytest + image: python:3.11-alpine +pytest-3.12: + extends: .pytest + image: python:3.12-alpine +pytest-3.13: + extends: .pytest + image: python:3.13-alpine diff --git a/.openapi-generator-ignore b/.openapi-generator-ignore new file mode 100644 index 0000000..7484ee5 --- /dev/null +++ b/.openapi-generator-ignore @@ -0,0 +1,23 @@ +# OpenAPI Generator Ignore +# Generated by openapi-generator https://github.com/openapitools/openapi-generator + +# Use this file to prevent files from being overwritten by the generator. +# The patterns follow closely to .gitignore or .dockerignore. + +# As an example, the C# client generator defines ApiClient.cs. +# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: +#ApiClient.cs + +# You can match any string of characters against a directory, file or extension with a single asterisk (*): +#foo/*/qux +# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux + +# You can recursively match patterns against a directory, file or extension with a double asterisk (**): +#foo/**/qux +# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux + +# You can also negate patterns with an exclamation (!). +# For example, you can ignore all files in a docs folder with the file extension .md: +#docs/*.md +# Then explicitly reverse the ignore rule for a single file: +#!docs/README.md diff --git a/.openapi-generator/FILES b/.openapi-generator/FILES new file mode 100644 index 0000000..2419a1f --- /dev/null +++ b/.openapi-generator/FILES @@ -0,0 +1,908 @@ +.github/workflows/python.yml +.gitignore +.gitlab-ci.yml +.openapi-generator-ignore +.travis.yml +README.md +docs/AIPlatform.md +docs/AIPlatformConfigSchema.md +docs/AIPlatformInput.md +docs/AIPlatformSchema.md +docs/AIPlatformType.md +docs/AWSS3AuthConfig.md +docs/AWSS3Config.md +docs/AZUREAISEARCHAuthConfig.md +docs/AZUREAISEARCHConfig.md +docs/AZUREBLOBAuthConfig.md +docs/AZUREBLOBConfig.md +docs/AddUserFromSourceConnectorResponse.md +docs/AddUserToSourceConnectorRequest.md +docs/AddUserToSourceConnectorRequestSelectedFiles.md +docs/AddUserToSourceConnectorRequestSelectedFilesAnyOf.md +docs/AddUserToSourceConnectorRequestSelectedFilesAnyOfValue.md +docs/AdvancedQuery.md +docs/AmazonS3.md +docs/AmazonS31.md +docs/AmazonS32.md +docs/AzureBlobStorage.md +docs/AzureBlobStorage1.md +docs/AzureBlobStorage2.md +docs/Azureaisearch.md +docs/Azureaisearch1.md +docs/Azureaisearch2.md +docs/BEDROCKAuthConfig.md +docs/Bedrock.md +docs/Bedrock1.md +docs/Bedrock2.md +docs/CAPELLAAuthConfig.md +docs/CAPELLAConfig.md +docs/CONFLUENCEAuthConfig.md +docs/CONFLUENCEConfig.md +docs/Capella.md +docs/Capella1.md +docs/Capella2.md +docs/Confluence.md +docs/Confluence1.md +docs/Confluence2.md +docs/ConnectorsAIPlatformsApi.md +docs/ConnectorsDestinationConnectorsApi.md +docs/ConnectorsSourceConnectorsApi.md +docs/CreateAIPlatformConnector.md +docs/CreateAIPlatformConnectorRequestInner.md +docs/CreateAIPlatformConnectorResponse.md +docs/CreateDestinationConnector.md +docs/CreateDestinationConnectorRequestInner.md +docs/CreateDestinationConnectorResponse.md +docs/CreatePipelineResponse.md +docs/CreatePipelineResponseData.md +docs/CreateSourceConnector.md +docs/CreateSourceConnectorRequestInner.md +docs/CreateSourceConnectorResponse.md +docs/CreatedAIPlatformConnector.md +docs/CreatedDestinationConnector.md +docs/CreatedSourceConnector.md +docs/DATASTAXAuthConfig.md +docs/DATASTAXConfig.md +docs/DISCORDAuthConfig.md +docs/DISCORDConfig.md +docs/DROPBOXAuthConfig.md +docs/DROPBOXConfig.md +docs/DROPBOXOAUTHAuthConfig.md +docs/DROPBOXOAUTHMULTIAuthConfig.md +docs/DROPBOXOAUTHMULTICUSTOMAuthConfig.md +docs/Datastax.md +docs/Datastax1.md +docs/Datastax2.md +docs/DeepResearchResult.md +docs/DeleteAIPlatformConnectorResponse.md +docs/DeleteDestinationConnectorResponse.md +docs/DeleteFileResponse.md +docs/DeletePipelineResponse.md +docs/DeleteSourceConnectorResponse.md +docs/DestinationConnector.md +docs/DestinationConnectorInput.md +docs/DestinationConnectorInputConfig.md +docs/DestinationConnectorSchema.md +docs/DestinationConnectorType.md +docs/Discord.md +docs/Discord1.md +docs/Discord2.md +docs/Document.md +docs/Dropbox.md +docs/Dropbox1.md +docs/Dropbox2.md +docs/DropboxOauth.md +docs/DropboxOauth1.md +docs/DropboxOauth2.md +docs/DropboxOauthMulti.md +docs/DropboxOauthMulti1.md +docs/DropboxOauthMulti2.md +docs/DropboxOauthMultiCustom.md +docs/DropboxOauthMultiCustom1.md +docs/DropboxOauthMultiCustom2.md +docs/ELASTICAuthConfig.md +docs/ELASTICConfig.md +docs/Elastic.md +docs/Elastic1.md +docs/Elastic2.md +docs/ExtractionApi.md +docs/ExtractionChunkingStrategy.md +docs/ExtractionResult.md +docs/ExtractionResultResponse.md +docs/ExtractionType.md +docs/FILEUPLOADAuthConfig.md +docs/FIRECRAWLAuthConfig.md +docs/FIRECRAWLConfig.md +docs/FIREFLIESAuthConfig.md +docs/FIREFLIESConfig.md +docs/FileUpload.md +docs/FileUpload1.md +docs/FileUpload2.md +docs/FilesApi.md +docs/Firecrawl.md +docs/Firecrawl1.md +docs/Firecrawl2.md +docs/Fireflies.md +docs/Fireflies1.md +docs/Fireflies2.md +docs/GCSAuthConfig.md +docs/GCSConfig.md +docs/GITHUBAuthConfig.md +docs/GITHUBConfig.md +docs/GOOGLEDRIVEAuthConfig.md +docs/GOOGLEDRIVEConfig.md +docs/GOOGLEDRIVEOAUTHAuthConfig.md +docs/GOOGLEDRIVEOAUTHConfig.md +docs/GOOGLEDRIVEOAUTHMULTIAuthConfig.md +docs/GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig.md +docs/GOOGLEDRIVEOAUTHMULTICUSTOMConfig.md +docs/GOOGLEDRIVEOAUTHMULTIConfig.md +docs/GetAIPlatformConnectors200Response.md +docs/GetDeepResearchResponse.md +docs/GetDestinationConnectors200Response.md +docs/GetPipelineEventsResponse.md +docs/GetPipelineMetricsResponse.md +docs/GetPipelineResponse.md +docs/GetPipelines400Response.md +docs/GetPipelinesResponse.md +docs/GetSourceConnectors200Response.md +docs/GetUploadFilesResponse.md +docs/Github.md +docs/Github1.md +docs/Github2.md +docs/GoogleCloudStorage.md +docs/GoogleCloudStorage1.md +docs/GoogleCloudStorage2.md +docs/GoogleDrive.md +docs/GoogleDrive1.md +docs/GoogleDrive2.md +docs/GoogleDriveOAuth.md +docs/GoogleDriveOAuth1.md +docs/GoogleDriveOAuth2.md +docs/GoogleDriveOauthMulti.md +docs/GoogleDriveOauthMulti1.md +docs/GoogleDriveOauthMulti2.md +docs/GoogleDriveOauthMultiCustom.md +docs/GoogleDriveOauthMultiCustom1.md +docs/GoogleDriveOauthMultiCustom2.md +docs/INTERCOMAuthConfig.md +docs/INTERCOMConfig.md +docs/Intercom.md +docs/Intercom1.md +docs/Intercom2.md +docs/MILVUSAuthConfig.md +docs/MILVUSConfig.md +docs/MetadataExtractionStrategy.md +docs/MetadataExtractionStrategySchema.md +docs/Milvus.md +docs/Milvus1.md +docs/Milvus2.md +docs/N8NConfig.md +docs/NOTIONAuthConfig.md +docs/NOTIONConfig.md +docs/NOTIONOAUTHMULTIAuthConfig.md +docs/NOTIONOAUTHMULTICUSTOMAuthConfig.md +docs/Notion.md +docs/Notion1.md +docs/Notion2.md +docs/NotionOauthMulti.md +docs/NotionOauthMulti1.md +docs/NotionOauthMulti2.md +docs/NotionOauthMultiCustom.md +docs/NotionOauthMultiCustom1.md +docs/NotionOauthMultiCustom2.md +docs/ONEDRIVEAuthConfig.md +docs/ONEDRIVEConfig.md +docs/OPENAIAuthConfig.md +docs/OneDrive.md +docs/OneDrive1.md +docs/OneDrive2.md +docs/Openai.md +docs/Openai1.md +docs/Openai2.md +docs/PINECONEAuthConfig.md +docs/PINECONEConfig.md +docs/POSTGRESQLAuthConfig.md +docs/POSTGRESQLConfig.md +docs/Pinecone.md +docs/Pinecone1.md +docs/Pinecone2.md +docs/PipelineAIPlatformRequestInner.md +docs/PipelineConfigurationSchema.md +docs/PipelineDestinationConnectorRequestInner.md +docs/PipelineEvents.md +docs/PipelineListSummary.md +docs/PipelineMetrics.md +docs/PipelineSourceConnectorRequestInner.md +docs/PipelineSummary.md +docs/PipelinesApi.md +docs/Postgresql.md +docs/Postgresql1.md +docs/Postgresql2.md +docs/QDRANTAuthConfig.md +docs/QDRANTConfig.md +docs/Qdrant.md +docs/Qdrant1.md +docs/Qdrant2.md +docs/RemoveUserFromSourceConnectorRequest.md +docs/RemoveUserFromSourceConnectorResponse.md +docs/RetrieveContext.md +docs/RetrieveContextMessage.md +docs/RetrieveDocumentsRequest.md +docs/RetrieveDocumentsResponse.md +docs/SHAREPOINTAuthConfig.md +docs/SHAREPOINTConfig.md +docs/SINGLESTOREAuthConfig.md +docs/SINGLESTOREConfig.md +docs/SUPABASEAuthConfig.md +docs/SUPABASEConfig.md +docs/ScheduleSchema.md +docs/ScheduleSchemaType.md +docs/Sharepoint.md +docs/Sharepoint1.md +docs/Sharepoint2.md +docs/Singlestore.md +docs/Singlestore1.md +docs/Singlestore2.md +docs/SourceConnector.md +docs/SourceConnectorInput.md +docs/SourceConnectorInputConfig.md +docs/SourceConnectorSchema.md +docs/SourceConnectorType.md +docs/StartDeepResearchRequest.md +docs/StartDeepResearchResponse.md +docs/StartExtractionRequest.md +docs/StartExtractionResponse.md +docs/StartFileUploadRequest.md +docs/StartFileUploadResponse.md +docs/StartFileUploadToConnectorRequest.md +docs/StartFileUploadToConnectorResponse.md +docs/StartPipelineResponse.md +docs/StopPipelineResponse.md +docs/Supabase.md +docs/Supabase1.md +docs/Supabase2.md +docs/TURBOPUFFERAuthConfig.md +docs/TURBOPUFFERConfig.md +docs/Turbopuffer.md +docs/Turbopuffer1.md +docs/Turbopuffer2.md +docs/UpdateAIPlatformConnectorRequest.md +docs/UpdateAIPlatformConnectorResponse.md +docs/UpdateAiplatformConnectorRequest.md +docs/UpdateDestinationConnectorRequest.md +docs/UpdateDestinationConnectorResponse.md +docs/UpdateSourceConnectorRequest.md +docs/UpdateSourceConnectorResponse.md +docs/UpdateSourceConnectorResponseData.md +docs/UpdateUserInSourceConnectorRequest.md +docs/UpdateUserInSourceConnectorResponse.md +docs/UpdatedAIPlatformConnectorData.md +docs/UpdatedDestinationConnectorData.md +docs/UploadFile.md +docs/UploadsApi.md +docs/VERTEXAuthConfig.md +docs/VOYAGEAuthConfig.md +docs/Vertex.md +docs/Vertex1.md +docs/Vertex2.md +docs/Voyage.md +docs/Voyage1.md +docs/Voyage2.md +docs/WEAVIATEAuthConfig.md +docs/WEAVIATEConfig.md +docs/WEBCRAWLERAuthConfig.md +docs/WEBCRAWLERConfig.md +docs/Weaviate.md +docs/Weaviate1.md +docs/Weaviate2.md +docs/WebCrawler.md +docs/WebCrawler1.md +docs/WebCrawler2.md +git_push.sh +pyproject.toml +requirements.txt +setup.cfg +setup.py +test-requirements.txt +test/__init__.py +test/test_add_user_from_source_connector_response.py +test/test_add_user_to_source_connector_request.py +test/test_add_user_to_source_connector_request_selected_files.py +test/test_add_user_to_source_connector_request_selected_files_any_of.py +test/test_add_user_to_source_connector_request_selected_files_any_of_value.py +test/test_advanced_query.py +test/test_ai_platform.py +test/test_ai_platform_config_schema.py +test/test_ai_platform_input.py +test/test_ai_platform_schema.py +test/test_ai_platform_type.py +test/test_amazon_s3.py +test/test_amazon_s31.py +test/test_amazon_s32.py +test/test_awss3_auth_config.py +test/test_awss3_config.py +test/test_azure_blob_storage.py +test/test_azure_blob_storage1.py +test/test_azure_blob_storage2.py +test/test_azureaisearch.py +test/test_azureaisearch1.py +test/test_azureaisearch2.py +test/test_azureaisearch_auth_config.py +test/test_azureaisearch_config.py +test/test_azureblob_auth_config.py +test/test_azureblob_config.py +test/test_bedrock.py +test/test_bedrock1.py +test/test_bedrock2.py +test/test_bedrock_auth_config.py +test/test_capella.py +test/test_capella1.py +test/test_capella2.py +test/test_capella_auth_config.py +test/test_capella_config.py +test/test_confluence.py +test/test_confluence1.py +test/test_confluence2.py +test/test_confluence_auth_config.py +test/test_confluence_config.py +test/test_connectors_ai_platforms_api.py +test/test_connectors_destination_connectors_api.py +test/test_connectors_source_connectors_api.py +test/test_create_ai_platform_connector.py +test/test_create_ai_platform_connector_request_inner.py +test/test_create_ai_platform_connector_response.py +test/test_create_destination_connector.py +test/test_create_destination_connector_request_inner.py +test/test_create_destination_connector_response.py +test/test_create_pipeline_response.py +test/test_create_pipeline_response_data.py +test/test_create_source_connector.py +test/test_create_source_connector_request_inner.py +test/test_create_source_connector_response.py +test/test_created_ai_platform_connector.py +test/test_created_destination_connector.py +test/test_created_source_connector.py +test/test_datastax.py +test/test_datastax1.py +test/test_datastax2.py +test/test_datastax_auth_config.py +test/test_datastax_config.py +test/test_deep_research_result.py +test/test_delete_ai_platform_connector_response.py +test/test_delete_destination_connector_response.py +test/test_delete_file_response.py +test/test_delete_pipeline_response.py +test/test_delete_source_connector_response.py +test/test_destination_connector.py +test/test_destination_connector_input.py +test/test_destination_connector_input_config.py +test/test_destination_connector_schema.py +test/test_destination_connector_type.py +test/test_discord.py +test/test_discord1.py +test/test_discord2.py +test/test_discord_auth_config.py +test/test_discord_config.py +test/test_document.py +test/test_dropbox.py +test/test_dropbox1.py +test/test_dropbox2.py +test/test_dropbox_auth_config.py +test/test_dropbox_config.py +test/test_dropbox_oauth.py +test/test_dropbox_oauth1.py +test/test_dropbox_oauth2.py +test/test_dropbox_oauth_multi.py +test/test_dropbox_oauth_multi1.py +test/test_dropbox_oauth_multi2.py +test/test_dropbox_oauth_multi_custom.py +test/test_dropbox_oauth_multi_custom1.py +test/test_dropbox_oauth_multi_custom2.py +test/test_dropboxoauth_auth_config.py +test/test_dropboxoauthmulti_auth_config.py +test/test_dropboxoauthmulticustom_auth_config.py +test/test_elastic.py +test/test_elastic1.py +test/test_elastic2.py +test/test_elastic_auth_config.py +test/test_elastic_config.py +test/test_extraction_api.py +test/test_extraction_chunking_strategy.py +test/test_extraction_result.py +test/test_extraction_result_response.py +test/test_extraction_type.py +test/test_file_upload.py +test/test_file_upload1.py +test/test_file_upload2.py +test/test_files_api.py +test/test_fileupload_auth_config.py +test/test_firecrawl.py +test/test_firecrawl1.py +test/test_firecrawl2.py +test/test_firecrawl_auth_config.py +test/test_firecrawl_config.py +test/test_fireflies.py +test/test_fireflies1.py +test/test_fireflies2.py +test/test_fireflies_auth_config.py +test/test_fireflies_config.py +test/test_gcs_auth_config.py +test/test_gcs_config.py +test/test_get_ai_platform_connectors200_response.py +test/test_get_deep_research_response.py +test/test_get_destination_connectors200_response.py +test/test_get_pipeline_events_response.py +test/test_get_pipeline_metrics_response.py +test/test_get_pipeline_response.py +test/test_get_pipelines400_response.py +test/test_get_pipelines_response.py +test/test_get_source_connectors200_response.py +test/test_get_upload_files_response.py +test/test_github.py +test/test_github1.py +test/test_github2.py +test/test_github_auth_config.py +test/test_github_config.py +test/test_google_cloud_storage.py +test/test_google_cloud_storage1.py +test/test_google_cloud_storage2.py +test/test_google_drive.py +test/test_google_drive1.py +test/test_google_drive2.py +test/test_google_drive_o_auth.py +test/test_google_drive_o_auth1.py +test/test_google_drive_o_auth2.py +test/test_google_drive_oauth_multi.py +test/test_google_drive_oauth_multi1.py +test/test_google_drive_oauth_multi2.py +test/test_google_drive_oauth_multi_custom.py +test/test_google_drive_oauth_multi_custom1.py +test/test_google_drive_oauth_multi_custom2.py +test/test_googledrive_auth_config.py +test/test_googledrive_config.py +test/test_googledriveoauth_auth_config.py +test/test_googledriveoauth_config.py +test/test_googledriveoauthmulti_auth_config.py +test/test_googledriveoauthmulti_config.py +test/test_googledriveoauthmulticustom_auth_config.py +test/test_googledriveoauthmulticustom_config.py +test/test_intercom.py +test/test_intercom1.py +test/test_intercom2.py +test/test_intercom_auth_config.py +test/test_intercom_config.py +test/test_metadata_extraction_strategy.py +test/test_metadata_extraction_strategy_schema.py +test/test_milvus.py +test/test_milvus1.py +test/test_milvus2.py +test/test_milvus_auth_config.py +test/test_milvus_config.py +test/test_n8_n_config.py +test/test_notion.py +test/test_notion1.py +test/test_notion2.py +test/test_notion_auth_config.py +test/test_notion_config.py +test/test_notion_oauth_multi.py +test/test_notion_oauth_multi1.py +test/test_notion_oauth_multi2.py +test/test_notion_oauth_multi_custom.py +test/test_notion_oauth_multi_custom1.py +test/test_notion_oauth_multi_custom2.py +test/test_notionoauthmulti_auth_config.py +test/test_notionoauthmulticustom_auth_config.py +test/test_one_drive.py +test/test_one_drive1.py +test/test_one_drive2.py +test/test_onedrive_auth_config.py +test/test_onedrive_config.py +test/test_openai.py +test/test_openai1.py +test/test_openai2.py +test/test_openai_auth_config.py +test/test_pinecone.py +test/test_pinecone1.py +test/test_pinecone2.py +test/test_pinecone_auth_config.py +test/test_pinecone_config.py +test/test_pipeline_ai_platform_request_inner.py +test/test_pipeline_configuration_schema.py +test/test_pipeline_destination_connector_request_inner.py +test/test_pipeline_events.py +test/test_pipeline_list_summary.py +test/test_pipeline_metrics.py +test/test_pipeline_source_connector_request_inner.py +test/test_pipeline_summary.py +test/test_pipelines_api.py +test/test_postgresql.py +test/test_postgresql1.py +test/test_postgresql2.py +test/test_postgresql_auth_config.py +test/test_postgresql_config.py +test/test_qdrant.py +test/test_qdrant1.py +test/test_qdrant2.py +test/test_qdrant_auth_config.py +test/test_qdrant_config.py +test/test_remove_user_from_source_connector_request.py +test/test_remove_user_from_source_connector_response.py +test/test_retrieve_context.py +test/test_retrieve_context_message.py +test/test_retrieve_documents_request.py +test/test_retrieve_documents_response.py +test/test_schedule_schema.py +test/test_schedule_schema_type.py +test/test_sharepoint.py +test/test_sharepoint1.py +test/test_sharepoint2.py +test/test_sharepoint_auth_config.py +test/test_sharepoint_config.py +test/test_singlestore.py +test/test_singlestore1.py +test/test_singlestore2.py +test/test_singlestore_auth_config.py +test/test_singlestore_config.py +test/test_source_connector.py +test/test_source_connector_input.py +test/test_source_connector_input_config.py +test/test_source_connector_schema.py +test/test_source_connector_type.py +test/test_start_deep_research_request.py +test/test_start_deep_research_response.py +test/test_start_extraction_request.py +test/test_start_extraction_response.py +test/test_start_file_upload_request.py +test/test_start_file_upload_response.py +test/test_start_file_upload_to_connector_request.py +test/test_start_file_upload_to_connector_response.py +test/test_start_pipeline_response.py +test/test_stop_pipeline_response.py +test/test_supabase.py +test/test_supabase1.py +test/test_supabase2.py +test/test_supabase_auth_config.py +test/test_supabase_config.py +test/test_turbopuffer.py +test/test_turbopuffer1.py +test/test_turbopuffer2.py +test/test_turbopuffer_auth_config.py +test/test_turbopuffer_config.py +test/test_update_ai_platform_connector_request.py +test/test_update_ai_platform_connector_response.py +test/test_update_aiplatform_connector_request.py +test/test_update_destination_connector_request.py +test/test_update_destination_connector_response.py +test/test_update_source_connector_request.py +test/test_update_source_connector_response.py +test/test_update_source_connector_response_data.py +test/test_update_user_in_source_connector_request.py +test/test_update_user_in_source_connector_response.py +test/test_updated_ai_platform_connector_data.py +test/test_updated_destination_connector_data.py +test/test_upload_file.py +test/test_uploads_api.py +test/test_vertex.py +test/test_vertex1.py +test/test_vertex2.py +test/test_vertex_auth_config.py +test/test_voyage.py +test/test_voyage1.py +test/test_voyage2.py +test/test_voyage_auth_config.py +test/test_weaviate.py +test/test_weaviate1.py +test/test_weaviate2.py +test/test_weaviate_auth_config.py +test/test_weaviate_config.py +test/test_web_crawler.py +test/test_web_crawler1.py +test/test_web_crawler2.py +test/test_webcrawler_auth_config.py +test/test_webcrawler_config.py +tox.ini +vectorize_client/__init__.py +vectorize_client/api/__init__.py +vectorize_client/api/connectors_ai_platforms_api.py +vectorize_client/api/connectors_destination_connectors_api.py +vectorize_client/api/connectors_source_connectors_api.py +vectorize_client/api/extraction_api.py +vectorize_client/api/files_api.py +vectorize_client/api/pipelines_api.py +vectorize_client/api/uploads_api.py +vectorize_client/api_client.py +vectorize_client/api_response.py +vectorize_client/configuration.py +vectorize_client/exceptions.py +vectorize_client/models/__init__.py +vectorize_client/models/add_user_from_source_connector_response.py +vectorize_client/models/add_user_to_source_connector_request.py +vectorize_client/models/add_user_to_source_connector_request_selected_files.py +vectorize_client/models/add_user_to_source_connector_request_selected_files_any_of.py +vectorize_client/models/add_user_to_source_connector_request_selected_files_any_of_value.py +vectorize_client/models/advanced_query.py +vectorize_client/models/ai_platform.py +vectorize_client/models/ai_platform_config_schema.py +vectorize_client/models/ai_platform_input.py +vectorize_client/models/ai_platform_schema.py +vectorize_client/models/ai_platform_type.py +vectorize_client/models/amazon_s3.py +vectorize_client/models/amazon_s31.py +vectorize_client/models/amazon_s32.py +vectorize_client/models/awss3_auth_config.py +vectorize_client/models/awss3_config.py +vectorize_client/models/azure_blob_storage.py +vectorize_client/models/azure_blob_storage1.py +vectorize_client/models/azure_blob_storage2.py +vectorize_client/models/azureaisearch.py +vectorize_client/models/azureaisearch1.py +vectorize_client/models/azureaisearch2.py +vectorize_client/models/azureaisearch_auth_config.py +vectorize_client/models/azureaisearch_config.py +vectorize_client/models/azureblob_auth_config.py +vectorize_client/models/azureblob_config.py +vectorize_client/models/bedrock.py +vectorize_client/models/bedrock1.py +vectorize_client/models/bedrock2.py +vectorize_client/models/bedrock_auth_config.py +vectorize_client/models/capella.py +vectorize_client/models/capella1.py +vectorize_client/models/capella2.py +vectorize_client/models/capella_auth_config.py +vectorize_client/models/capella_config.py +vectorize_client/models/confluence.py +vectorize_client/models/confluence1.py +vectorize_client/models/confluence2.py +vectorize_client/models/confluence_auth_config.py +vectorize_client/models/confluence_config.py +vectorize_client/models/create_ai_platform_connector.py +vectorize_client/models/create_ai_platform_connector_request_inner.py +vectorize_client/models/create_ai_platform_connector_response.py +vectorize_client/models/create_destination_connector.py +vectorize_client/models/create_destination_connector_request_inner.py +vectorize_client/models/create_destination_connector_response.py +vectorize_client/models/create_pipeline_response.py +vectorize_client/models/create_pipeline_response_data.py +vectorize_client/models/create_source_connector.py +vectorize_client/models/create_source_connector_request_inner.py +vectorize_client/models/create_source_connector_response.py +vectorize_client/models/created_ai_platform_connector.py +vectorize_client/models/created_destination_connector.py +vectorize_client/models/created_source_connector.py +vectorize_client/models/datastax.py +vectorize_client/models/datastax1.py +vectorize_client/models/datastax2.py +vectorize_client/models/datastax_auth_config.py +vectorize_client/models/datastax_config.py +vectorize_client/models/deep_research_result.py +vectorize_client/models/delete_ai_platform_connector_response.py +vectorize_client/models/delete_destination_connector_response.py +vectorize_client/models/delete_file_response.py +vectorize_client/models/delete_pipeline_response.py +vectorize_client/models/delete_source_connector_response.py +vectorize_client/models/destination_connector.py +vectorize_client/models/destination_connector_input.py +vectorize_client/models/destination_connector_input_config.py +vectorize_client/models/destination_connector_schema.py +vectorize_client/models/destination_connector_type.py +vectorize_client/models/discord.py +vectorize_client/models/discord1.py +vectorize_client/models/discord2.py +vectorize_client/models/discord_auth_config.py +vectorize_client/models/discord_config.py +vectorize_client/models/document.py +vectorize_client/models/dropbox.py +vectorize_client/models/dropbox1.py +vectorize_client/models/dropbox2.py +vectorize_client/models/dropbox_auth_config.py +vectorize_client/models/dropbox_config.py +vectorize_client/models/dropbox_oauth.py +vectorize_client/models/dropbox_oauth1.py +vectorize_client/models/dropbox_oauth2.py +vectorize_client/models/dropbox_oauth_multi.py +vectorize_client/models/dropbox_oauth_multi1.py +vectorize_client/models/dropbox_oauth_multi2.py +vectorize_client/models/dropbox_oauth_multi_custom.py +vectorize_client/models/dropbox_oauth_multi_custom1.py +vectorize_client/models/dropbox_oauth_multi_custom2.py +vectorize_client/models/dropboxoauth_auth_config.py +vectorize_client/models/dropboxoauthmulti_auth_config.py +vectorize_client/models/dropboxoauthmulticustom_auth_config.py +vectorize_client/models/elastic.py +vectorize_client/models/elastic1.py +vectorize_client/models/elastic2.py +vectorize_client/models/elastic_auth_config.py +vectorize_client/models/elastic_config.py +vectorize_client/models/extraction_chunking_strategy.py +vectorize_client/models/extraction_result.py +vectorize_client/models/extraction_result_response.py +vectorize_client/models/extraction_type.py +vectorize_client/models/file_upload.py +vectorize_client/models/file_upload1.py +vectorize_client/models/file_upload2.py +vectorize_client/models/fileupload_auth_config.py +vectorize_client/models/firecrawl.py +vectorize_client/models/firecrawl1.py +vectorize_client/models/firecrawl2.py +vectorize_client/models/firecrawl_auth_config.py +vectorize_client/models/firecrawl_config.py +vectorize_client/models/fireflies.py +vectorize_client/models/fireflies1.py +vectorize_client/models/fireflies2.py +vectorize_client/models/fireflies_auth_config.py +vectorize_client/models/fireflies_config.py +vectorize_client/models/gcs_auth_config.py +vectorize_client/models/gcs_config.py +vectorize_client/models/get_ai_platform_connectors200_response.py +vectorize_client/models/get_deep_research_response.py +vectorize_client/models/get_destination_connectors200_response.py +vectorize_client/models/get_pipeline_events_response.py +vectorize_client/models/get_pipeline_metrics_response.py +vectorize_client/models/get_pipeline_response.py +vectorize_client/models/get_pipelines400_response.py +vectorize_client/models/get_pipelines_response.py +vectorize_client/models/get_source_connectors200_response.py +vectorize_client/models/get_upload_files_response.py +vectorize_client/models/github.py +vectorize_client/models/github1.py +vectorize_client/models/github2.py +vectorize_client/models/github_auth_config.py +vectorize_client/models/github_config.py +vectorize_client/models/google_cloud_storage.py +vectorize_client/models/google_cloud_storage1.py +vectorize_client/models/google_cloud_storage2.py +vectorize_client/models/google_drive.py +vectorize_client/models/google_drive1.py +vectorize_client/models/google_drive2.py +vectorize_client/models/google_drive_o_auth.py +vectorize_client/models/google_drive_o_auth1.py +vectorize_client/models/google_drive_o_auth2.py +vectorize_client/models/google_drive_oauth_multi.py +vectorize_client/models/google_drive_oauth_multi1.py +vectorize_client/models/google_drive_oauth_multi2.py +vectorize_client/models/google_drive_oauth_multi_custom.py +vectorize_client/models/google_drive_oauth_multi_custom1.py +vectorize_client/models/google_drive_oauth_multi_custom2.py +vectorize_client/models/googledrive_auth_config.py +vectorize_client/models/googledrive_config.py +vectorize_client/models/googledriveoauth_auth_config.py +vectorize_client/models/googledriveoauth_config.py +vectorize_client/models/googledriveoauthmulti_auth_config.py +vectorize_client/models/googledriveoauthmulti_config.py +vectorize_client/models/googledriveoauthmulticustom_auth_config.py +vectorize_client/models/googledriveoauthmulticustom_config.py +vectorize_client/models/intercom.py +vectorize_client/models/intercom1.py +vectorize_client/models/intercom2.py +vectorize_client/models/intercom_auth_config.py +vectorize_client/models/intercom_config.py +vectorize_client/models/metadata_extraction_strategy.py +vectorize_client/models/metadata_extraction_strategy_schema.py +vectorize_client/models/milvus.py +vectorize_client/models/milvus1.py +vectorize_client/models/milvus2.py +vectorize_client/models/milvus_auth_config.py +vectorize_client/models/milvus_config.py +vectorize_client/models/n8_n_config.py +vectorize_client/models/notion.py +vectorize_client/models/notion1.py +vectorize_client/models/notion2.py +vectorize_client/models/notion_auth_config.py +vectorize_client/models/notion_config.py +vectorize_client/models/notion_oauth_multi.py +vectorize_client/models/notion_oauth_multi1.py +vectorize_client/models/notion_oauth_multi2.py +vectorize_client/models/notion_oauth_multi_custom.py +vectorize_client/models/notion_oauth_multi_custom1.py +vectorize_client/models/notion_oauth_multi_custom2.py +vectorize_client/models/notionoauthmulti_auth_config.py +vectorize_client/models/notionoauthmulticustom_auth_config.py +vectorize_client/models/one_drive.py +vectorize_client/models/one_drive1.py +vectorize_client/models/one_drive2.py +vectorize_client/models/onedrive_auth_config.py +vectorize_client/models/onedrive_config.py +vectorize_client/models/openai.py +vectorize_client/models/openai1.py +vectorize_client/models/openai2.py +vectorize_client/models/openai_auth_config.py +vectorize_client/models/pinecone.py +vectorize_client/models/pinecone1.py +vectorize_client/models/pinecone2.py +vectorize_client/models/pinecone_auth_config.py +vectorize_client/models/pinecone_config.py +vectorize_client/models/pipeline_ai_platform_request_inner.py +vectorize_client/models/pipeline_configuration_schema.py +vectorize_client/models/pipeline_destination_connector_request_inner.py +vectorize_client/models/pipeline_events.py +vectorize_client/models/pipeline_list_summary.py +vectorize_client/models/pipeline_metrics.py +vectorize_client/models/pipeline_source_connector_request_inner.py +vectorize_client/models/pipeline_summary.py +vectorize_client/models/postgresql.py +vectorize_client/models/postgresql1.py +vectorize_client/models/postgresql2.py +vectorize_client/models/postgresql_auth_config.py +vectorize_client/models/postgresql_config.py +vectorize_client/models/qdrant.py +vectorize_client/models/qdrant1.py +vectorize_client/models/qdrant2.py +vectorize_client/models/qdrant_auth_config.py +vectorize_client/models/qdrant_config.py +vectorize_client/models/remove_user_from_source_connector_request.py +vectorize_client/models/remove_user_from_source_connector_response.py +vectorize_client/models/retrieve_context.py +vectorize_client/models/retrieve_context_message.py +vectorize_client/models/retrieve_documents_request.py +vectorize_client/models/retrieve_documents_response.py +vectorize_client/models/schedule_schema.py +vectorize_client/models/schedule_schema_type.py +vectorize_client/models/sharepoint.py +vectorize_client/models/sharepoint1.py +vectorize_client/models/sharepoint2.py +vectorize_client/models/sharepoint_auth_config.py +vectorize_client/models/sharepoint_config.py +vectorize_client/models/singlestore.py +vectorize_client/models/singlestore1.py +vectorize_client/models/singlestore2.py +vectorize_client/models/singlestore_auth_config.py +vectorize_client/models/singlestore_config.py +vectorize_client/models/source_connector.py +vectorize_client/models/source_connector_input.py +vectorize_client/models/source_connector_input_config.py +vectorize_client/models/source_connector_schema.py +vectorize_client/models/source_connector_type.py +vectorize_client/models/start_deep_research_request.py +vectorize_client/models/start_deep_research_response.py +vectorize_client/models/start_extraction_request.py +vectorize_client/models/start_extraction_response.py +vectorize_client/models/start_file_upload_request.py +vectorize_client/models/start_file_upload_response.py +vectorize_client/models/start_file_upload_to_connector_request.py +vectorize_client/models/start_file_upload_to_connector_response.py +vectorize_client/models/start_pipeline_response.py +vectorize_client/models/stop_pipeline_response.py +vectorize_client/models/supabase.py +vectorize_client/models/supabase1.py +vectorize_client/models/supabase2.py +vectorize_client/models/supabase_auth_config.py +vectorize_client/models/supabase_config.py +vectorize_client/models/turbopuffer.py +vectorize_client/models/turbopuffer1.py +vectorize_client/models/turbopuffer2.py +vectorize_client/models/turbopuffer_auth_config.py +vectorize_client/models/turbopuffer_config.py +vectorize_client/models/update_ai_platform_connector_request.py +vectorize_client/models/update_ai_platform_connector_response.py +vectorize_client/models/update_aiplatform_connector_request0.py +vectorize_client/models/update_destination_connector_request.py +vectorize_client/models/update_destination_connector_response.py +vectorize_client/models/update_source_connector_request.py +vectorize_client/models/update_source_connector_response.py +vectorize_client/models/update_source_connector_response_data.py +vectorize_client/models/update_user_in_source_connector_request.py +vectorize_client/models/update_user_in_source_connector_response.py +vectorize_client/models/updated_ai_platform_connector_data.py +vectorize_client/models/updated_destination_connector_data.py +vectorize_client/models/upload_file.py +vectorize_client/models/vertex.py +vectorize_client/models/vertex1.py +vectorize_client/models/vertex2.py +vectorize_client/models/vertex_auth_config.py +vectorize_client/models/voyage.py +vectorize_client/models/voyage1.py +vectorize_client/models/voyage2.py +vectorize_client/models/voyage_auth_config.py +vectorize_client/models/weaviate.py +vectorize_client/models/weaviate1.py +vectorize_client/models/weaviate2.py +vectorize_client/models/weaviate_auth_config.py +vectorize_client/models/weaviate_config.py +vectorize_client/models/web_crawler.py +vectorize_client/models/web_crawler1.py +vectorize_client/models/web_crawler2.py +vectorize_client/models/webcrawler_auth_config.py +vectorize_client/models/webcrawler_config.py +vectorize_client/py.typed +vectorize_client/rest.py diff --git a/.openapi-generator/VERSION b/.openapi-generator/VERSION new file mode 100644 index 0000000..e465da4 --- /dev/null +++ b/.openapi-generator/VERSION @@ -0,0 +1 @@ +7.14.0 diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..6b7cfde --- /dev/null +++ b/.travis.yml @@ -0,0 +1,17 @@ +# ref: https://docs.travis-ci.com/user/languages/python +language: python +python: + - "3.9" + - "3.10" + - "3.11" + - "3.12" + - "3.13" + # uncomment the following if needed + #- "3.13-dev" # 3.13 development branch + #- "nightly" # nightly build +# command to install dependencies +install: + - "pip install -r requirements.txt" + - "pip install -r test-requirements.txt" +# command to run tests +script: pytest --cov=vectorize_client diff --git a/README.md b/README.md index 96e6660..fbc91f0 100644 --- a/README.md +++ b/README.md @@ -1,52 +1,443 @@ -# Vectorize Clients +# vectorize-client +API for Vectorize services -

- - PyPI - - - Pypi - - - NPM - -

+This Python package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project: -This repository contains source and test code for **Vectorize** clients in different languages. +- API version: 0.0.1 +- Package version: 1.0.0 +- Generator version: 7.14.0 +- Build package: org.openapitools.codegen.languages.PythonClientCodegen +For more information, please visit [https://vectorize.io](https://vectorize.io) -The clients are generated automatically using OpenAPI generator, starting from the OpenAPI specification in the `vectorize_api.json` file that is downloaded from the [Vectorize Platform OpenAPI endpoint](https://platform.vectorize.io/api/openapi). +## Requirements. +Python 3.9+ -## How to -- Python - - [Getting started](./src/python/README.md) - - [Official documentation](https://docs.vectorize.io/api/api-getting-started) - - [Code Reference](https://vectorize-io.github.io/vectorize-clients/python/vectorize_client/api.html) -- TypeScript - - [Getting started](./src/ts/README.md) - - [Official documentation](https://docs.vectorize.io/api/api-getting-started) - - [Code Reference](https://vectorize-io.github.io/vectorize-clients/ts/) +## Installation & Usage +### pip install +If the python package is hosted on a repository, you can install directly using: +```sh +pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git +``` +(you may need to run `pip` with root permission: `sudo pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git`) + +Then import the package: +```python +import vectorize_client +``` + +### Setuptools -## Generate and release clients -To generate a client, run the following command: +Install via [Setuptools](http://pypi.python.org/pypi/setuptools). -```bash -npm install +```sh +python setup.py install --user +``` +(or `sudo python setup.py install` to install the package for all users) -npm run generate:ts -npm run generate:python +Then import the package: +```python +import vectorize_client ``` -To release a client, run the following command: +### Tests + +Execute `pytest` to run the tests. + +## Getting Started + +Please follow the [installation procedure](#installation--usage) and then run the following: + +```python + +import vectorize_client +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsAIPlatformsApi(api_client) + organization_id = 'organization_id_example' # str | + create_ai_platform_connector_request_inner = [{"name":"My CreateAIPlatformConnectorRequest","type":"BEDROCK","config":{"name":"My BEDROCKAuthConfig","access-key":"AKIAIOSFODNN7EXAMPLE","key":"key_example_123456","region":"us-east-1"}}] # List[CreateAIPlatformConnectorRequestInner] | -```bash -npm install + try: + # Create a new AI platform connector + api_response = api_instance.create_ai_platform_connector(organization_id, create_ai_platform_connector_request_inner) + print("The response of ConnectorsAIPlatformsApi->create_ai_platform_connector:\n") + pprint(api_response) + except ApiException as e: + print("Exception when calling ConnectorsAIPlatformsApi->create_ai_platform_connector: %s\n" % e) -npm run release:ts -npm run release:python ``` +## Documentation for API Endpoints + +All URIs are relative to *https://api.vectorize.io/v1* + +Class | Method | HTTP request | Description +------------ | ------------- | ------------- | ------------- +*ConnectorsAIPlatformsApi* | [**create_ai_platform_connector**](docs/ConnectorsAIPlatformsApi.md#create_ai_platform_connector) | **POST** /org/{organizationId}/connectors/aiplatforms | Create a new AI platform connector +*ConnectorsAIPlatformsApi* | [**delete_ai_platform**](docs/ConnectorsAIPlatformsApi.md#delete_ai_platform) | **DELETE** /org/{organizationId}/connectors/aiplatforms/{aiplatformId} | Delete an AI platform connector +*ConnectorsAIPlatformsApi* | [**get_ai_platform_connector**](docs/ConnectorsAIPlatformsApi.md#get_ai_platform_connector) | **GET** /org/{organizationId}/connectors/aiplatforms/{aiplatformId} | Get an AI platform connector +*ConnectorsAIPlatformsApi* | [**get_ai_platform_connectors**](docs/ConnectorsAIPlatformsApi.md#get_ai_platform_connectors) | **GET** /org/{organizationId}/connectors/aiplatforms | Get all existing AI Platform connectors +*ConnectorsAIPlatformsApi* | [**update_ai_platform_connector**](docs/ConnectorsAIPlatformsApi.md#update_ai_platform_connector) | **PATCH** /org/{organizationId}/connectors/aiplatforms/{aiplatformId} | Update an AI Platform connector +*ConnectorsDestinationConnectorsApi* | [**create_destination_connector**](docs/ConnectorsDestinationConnectorsApi.md#create_destination_connector) | **POST** /org/{organizationId}/connectors/destinations | Create a new destination connector +*ConnectorsDestinationConnectorsApi* | [**delete_destination_connector**](docs/ConnectorsDestinationConnectorsApi.md#delete_destination_connector) | **DELETE** /org/{organizationId}/connectors/destinations/{destinationConnectorId} | Delete a destination connector +*ConnectorsDestinationConnectorsApi* | [**get_destination_connector**](docs/ConnectorsDestinationConnectorsApi.md#get_destination_connector) | **GET** /org/{organizationId}/connectors/destinations/{destinationConnectorId} | Get a destination connector +*ConnectorsDestinationConnectorsApi* | [**get_destination_connectors**](docs/ConnectorsDestinationConnectorsApi.md#get_destination_connectors) | **GET** /org/{organizationId}/connectors/destinations | Get all existing destination connectors +*ConnectorsDestinationConnectorsApi* | [**update_destination_connector**](docs/ConnectorsDestinationConnectorsApi.md#update_destination_connector) | **PATCH** /org/{organizationId}/connectors/destinations/{destinationConnectorId} | Update a destination connector +*ConnectorsSourceConnectorsApi* | [**add_user_to_source_connector**](docs/ConnectorsSourceConnectorsApi.md#add_user_to_source_connector) | **POST** /org/{organizationId}/connectors/sources/{sourceConnectorId}/users | Add a user to a source connector +*ConnectorsSourceConnectorsApi* | [**create_source_connector**](docs/ConnectorsSourceConnectorsApi.md#create_source_connector) | **POST** /org/{organizationId}/connectors/sources | Create a new source connector +*ConnectorsSourceConnectorsApi* | [**delete_source_connector**](docs/ConnectorsSourceConnectorsApi.md#delete_source_connector) | **DELETE** /org/{organizationId}/connectors/sources/{sourceConnectorId} | Delete a source connector +*ConnectorsSourceConnectorsApi* | [**delete_user_from_source_connector**](docs/ConnectorsSourceConnectorsApi.md#delete_user_from_source_connector) | **DELETE** /org/{organizationId}/connectors/sources/{sourceConnectorId}/users | Delete a source connector user +*ConnectorsSourceConnectorsApi* | [**get_source_connector**](docs/ConnectorsSourceConnectorsApi.md#get_source_connector) | **GET** /org/{organizationId}/connectors/sources/{sourceConnectorId} | Get a source connector +*ConnectorsSourceConnectorsApi* | [**get_source_connectors**](docs/ConnectorsSourceConnectorsApi.md#get_source_connectors) | **GET** /org/{organizationId}/connectors/sources | Get all existing source connectors +*ConnectorsSourceConnectorsApi* | [**update_source_connector**](docs/ConnectorsSourceConnectorsApi.md#update_source_connector) | **PATCH** /org/{organizationId}/connectors/sources/{sourceConnectorId} | Update a source connector +*ConnectorsSourceConnectorsApi* | [**update_user_in_source_connector**](docs/ConnectorsSourceConnectorsApi.md#update_user_in_source_connector) | **PATCH** /org/{organizationId}/connectors/sources/{sourceConnectorId}/users | Update a source connector user +*ExtractionApi* | [**get_extraction_result**](docs/ExtractionApi.md#get_extraction_result) | **GET** /org/{organizationId}/extraction/{extractionId} | Get extraction result +*ExtractionApi* | [**start_extraction**](docs/ExtractionApi.md#start_extraction) | **POST** /org/{organizationId}/extraction | Start content extraction from a file +*FilesApi* | [**start_file_upload**](docs/FilesApi.md#start_file_upload) | **POST** /org/{organizationId}/files | Upload a generic file to the platform +*PipelinesApi* | [**create_pipeline**](docs/PipelinesApi.md#create_pipeline) | **POST** /org/{organizationId}/pipelines | Create a new pipeline +*PipelinesApi* | [**delete_pipeline**](docs/PipelinesApi.md#delete_pipeline) | **DELETE** /org/{organizationId}/pipelines/{pipelineId} | Delete a pipeline +*PipelinesApi* | [**get_deep_research_result**](docs/PipelinesApi.md#get_deep_research_result) | **GET** /org/{organizationId}/pipelines/{pipelineId}/deep-research/{researchId} | Get deep research result +*PipelinesApi* | [**get_pipeline**](docs/PipelinesApi.md#get_pipeline) | **GET** /org/{organizationId}/pipelines/{pipelineId} | Get a pipeline +*PipelinesApi* | [**get_pipeline_events**](docs/PipelinesApi.md#get_pipeline_events) | **GET** /org/{organizationId}/pipelines/{pipelineId}/events | Get pipeline events +*PipelinesApi* | [**get_pipeline_metrics**](docs/PipelinesApi.md#get_pipeline_metrics) | **GET** /org/{organizationId}/pipelines/{pipelineId}/metrics | Get pipeline metrics +*PipelinesApi* | [**get_pipelines**](docs/PipelinesApi.md#get_pipelines) | **GET** /org/{organizationId}/pipelines | Get all pipelines +*PipelinesApi* | [**retrieve_documents**](docs/PipelinesApi.md#retrieve_documents) | **POST** /org/{organizationId}/pipelines/{pipelineId}/retrieval | Retrieve documents from a pipeline +*PipelinesApi* | [**start_deep_research**](docs/PipelinesApi.md#start_deep_research) | **POST** /org/{organizationId}/pipelines/{pipelineId}/deep-research | Start a deep research +*PipelinesApi* | [**start_pipeline**](docs/PipelinesApi.md#start_pipeline) | **POST** /org/{organizationId}/pipelines/{pipelineId}/start | Start a pipeline +*PipelinesApi* | [**stop_pipeline**](docs/PipelinesApi.md#stop_pipeline) | **POST** /org/{organizationId}/pipelines/{pipelineId}/stop | Stop a pipeline +*UploadsApi* | [**delete_file_from_connector**](docs/UploadsApi.md#delete_file_from_connector) | **DELETE** /org/{organizationId}/uploads/{connectorId}/files | Delete a file from a file upload connector +*UploadsApi* | [**get_upload_files_from_connector**](docs/UploadsApi.md#get_upload_files_from_connector) | **GET** /org/{organizationId}/uploads/{connectorId}/files | Get uploaded files from a file upload connector +*UploadsApi* | [**start_file_upload_to_connector**](docs/UploadsApi.md#start_file_upload_to_connector) | **PUT** /org/{organizationId}/uploads/{connectorId}/files | Upload a file to a file upload connector + + +## Documentation For Models + + - [AIPlatform](docs/AIPlatform.md) + - [AIPlatformConfigSchema](docs/AIPlatformConfigSchema.md) + - [AIPlatformInput](docs/AIPlatformInput.md) + - [AIPlatformSchema](docs/AIPlatformSchema.md) + - [AIPlatformType](docs/AIPlatformType.md) + - [AWSS3AuthConfig](docs/AWSS3AuthConfig.md) + - [AWSS3Config](docs/AWSS3Config.md) + - [AZUREAISEARCHAuthConfig](docs/AZUREAISEARCHAuthConfig.md) + - [AZUREAISEARCHConfig](docs/AZUREAISEARCHConfig.md) + - [AZUREBLOBAuthConfig](docs/AZUREBLOBAuthConfig.md) + - [AZUREBLOBConfig](docs/AZUREBLOBConfig.md) + - [AddUserFromSourceConnectorResponse](docs/AddUserFromSourceConnectorResponse.md) + - [AddUserToSourceConnectorRequest](docs/AddUserToSourceConnectorRequest.md) + - [AddUserToSourceConnectorRequestSelectedFiles](docs/AddUserToSourceConnectorRequestSelectedFiles.md) + - [AddUserToSourceConnectorRequestSelectedFilesAnyOf](docs/AddUserToSourceConnectorRequestSelectedFilesAnyOf.md) + - [AddUserToSourceConnectorRequestSelectedFilesAnyOfValue](docs/AddUserToSourceConnectorRequestSelectedFilesAnyOfValue.md) + - [AdvancedQuery](docs/AdvancedQuery.md) + - [AmazonS3](docs/AmazonS3.md) + - [AmazonS31](docs/AmazonS31.md) + - [AmazonS32](docs/AmazonS32.md) + - [AzureBlobStorage](docs/AzureBlobStorage.md) + - [AzureBlobStorage1](docs/AzureBlobStorage1.md) + - [AzureBlobStorage2](docs/AzureBlobStorage2.md) + - [Azureaisearch](docs/Azureaisearch.md) + - [Azureaisearch1](docs/Azureaisearch1.md) + - [Azureaisearch2](docs/Azureaisearch2.md) + - [BEDROCKAuthConfig](docs/BEDROCKAuthConfig.md) + - [Bedrock](docs/Bedrock.md) + - [Bedrock1](docs/Bedrock1.md) + - [Bedrock2](docs/Bedrock2.md) + - [CAPELLAAuthConfig](docs/CAPELLAAuthConfig.md) + - [CAPELLAConfig](docs/CAPELLAConfig.md) + - [CONFLUENCEAuthConfig](docs/CONFLUENCEAuthConfig.md) + - [CONFLUENCEConfig](docs/CONFLUENCEConfig.md) + - [Capella](docs/Capella.md) + - [Capella1](docs/Capella1.md) + - [Capella2](docs/Capella2.md) + - [Confluence](docs/Confluence.md) + - [Confluence1](docs/Confluence1.md) + - [Confluence2](docs/Confluence2.md) + - [CreateAIPlatformConnector](docs/CreateAIPlatformConnector.md) + - [CreateAIPlatformConnectorRequestInner](docs/CreateAIPlatformConnectorRequestInner.md) + - [CreateAIPlatformConnectorResponse](docs/CreateAIPlatformConnectorResponse.md) + - [CreateDestinationConnector](docs/CreateDestinationConnector.md) + - [CreateDestinationConnectorRequestInner](docs/CreateDestinationConnectorRequestInner.md) + - [CreateDestinationConnectorResponse](docs/CreateDestinationConnectorResponse.md) + - [CreatePipelineResponse](docs/CreatePipelineResponse.md) + - [CreatePipelineResponseData](docs/CreatePipelineResponseData.md) + - [CreateSourceConnector](docs/CreateSourceConnector.md) + - [CreateSourceConnectorRequestInner](docs/CreateSourceConnectorRequestInner.md) + - [CreateSourceConnectorResponse](docs/CreateSourceConnectorResponse.md) + - [CreatedAIPlatformConnector](docs/CreatedAIPlatformConnector.md) + - [CreatedDestinationConnector](docs/CreatedDestinationConnector.md) + - [CreatedSourceConnector](docs/CreatedSourceConnector.md) + - [DATASTAXAuthConfig](docs/DATASTAXAuthConfig.md) + - [DATASTAXConfig](docs/DATASTAXConfig.md) + - [DISCORDAuthConfig](docs/DISCORDAuthConfig.md) + - [DISCORDConfig](docs/DISCORDConfig.md) + - [DROPBOXAuthConfig](docs/DROPBOXAuthConfig.md) + - [DROPBOXConfig](docs/DROPBOXConfig.md) + - [DROPBOXOAUTHAuthConfig](docs/DROPBOXOAUTHAuthConfig.md) + - [DROPBOXOAUTHMULTIAuthConfig](docs/DROPBOXOAUTHMULTIAuthConfig.md) + - [DROPBOXOAUTHMULTICUSTOMAuthConfig](docs/DROPBOXOAUTHMULTICUSTOMAuthConfig.md) + - [Datastax](docs/Datastax.md) + - [Datastax1](docs/Datastax1.md) + - [Datastax2](docs/Datastax2.md) + - [DeepResearchResult](docs/DeepResearchResult.md) + - [DeleteAIPlatformConnectorResponse](docs/DeleteAIPlatformConnectorResponse.md) + - [DeleteDestinationConnectorResponse](docs/DeleteDestinationConnectorResponse.md) + - [DeleteFileResponse](docs/DeleteFileResponse.md) + - [DeletePipelineResponse](docs/DeletePipelineResponse.md) + - [DeleteSourceConnectorResponse](docs/DeleteSourceConnectorResponse.md) + - [DestinationConnector](docs/DestinationConnector.md) + - [DestinationConnectorInput](docs/DestinationConnectorInput.md) + - [DestinationConnectorInputConfig](docs/DestinationConnectorInputConfig.md) + - [DestinationConnectorSchema](docs/DestinationConnectorSchema.md) + - [DestinationConnectorType](docs/DestinationConnectorType.md) + - [Discord](docs/Discord.md) + - [Discord1](docs/Discord1.md) + - [Discord2](docs/Discord2.md) + - [Document](docs/Document.md) + - [Dropbox](docs/Dropbox.md) + - [Dropbox1](docs/Dropbox1.md) + - [Dropbox2](docs/Dropbox2.md) + - [DropboxOauth](docs/DropboxOauth.md) + - [DropboxOauth1](docs/DropboxOauth1.md) + - [DropboxOauth2](docs/DropboxOauth2.md) + - [DropboxOauthMulti](docs/DropboxOauthMulti.md) + - [DropboxOauthMulti1](docs/DropboxOauthMulti1.md) + - [DropboxOauthMulti2](docs/DropboxOauthMulti2.md) + - [DropboxOauthMultiCustom](docs/DropboxOauthMultiCustom.md) + - [DropboxOauthMultiCustom1](docs/DropboxOauthMultiCustom1.md) + - [DropboxOauthMultiCustom2](docs/DropboxOauthMultiCustom2.md) + - [ELASTICAuthConfig](docs/ELASTICAuthConfig.md) + - [ELASTICConfig](docs/ELASTICConfig.md) + - [Elastic](docs/Elastic.md) + - [Elastic1](docs/Elastic1.md) + - [Elastic2](docs/Elastic2.md) + - [ExtractionChunkingStrategy](docs/ExtractionChunkingStrategy.md) + - [ExtractionResult](docs/ExtractionResult.md) + - [ExtractionResultResponse](docs/ExtractionResultResponse.md) + - [ExtractionType](docs/ExtractionType.md) + - [FILEUPLOADAuthConfig](docs/FILEUPLOADAuthConfig.md) + - [FIRECRAWLAuthConfig](docs/FIRECRAWLAuthConfig.md) + - [FIRECRAWLConfig](docs/FIRECRAWLConfig.md) + - [FIREFLIESAuthConfig](docs/FIREFLIESAuthConfig.md) + - [FIREFLIESConfig](docs/FIREFLIESConfig.md) + - [FileUpload](docs/FileUpload.md) + - [FileUpload1](docs/FileUpload1.md) + - [FileUpload2](docs/FileUpload2.md) + - [Firecrawl](docs/Firecrawl.md) + - [Firecrawl1](docs/Firecrawl1.md) + - [Firecrawl2](docs/Firecrawl2.md) + - [Fireflies](docs/Fireflies.md) + - [Fireflies1](docs/Fireflies1.md) + - [Fireflies2](docs/Fireflies2.md) + - [GCSAuthConfig](docs/GCSAuthConfig.md) + - [GCSConfig](docs/GCSConfig.md) + - [GITHUBAuthConfig](docs/GITHUBAuthConfig.md) + - [GITHUBConfig](docs/GITHUBConfig.md) + - [GOOGLEDRIVEAuthConfig](docs/GOOGLEDRIVEAuthConfig.md) + - [GOOGLEDRIVEConfig](docs/GOOGLEDRIVEConfig.md) + - [GOOGLEDRIVEOAUTHAuthConfig](docs/GOOGLEDRIVEOAUTHAuthConfig.md) + - [GOOGLEDRIVEOAUTHConfig](docs/GOOGLEDRIVEOAUTHConfig.md) + - [GOOGLEDRIVEOAUTHMULTIAuthConfig](docs/GOOGLEDRIVEOAUTHMULTIAuthConfig.md) + - [GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig](docs/GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig.md) + - [GOOGLEDRIVEOAUTHMULTICUSTOMConfig](docs/GOOGLEDRIVEOAUTHMULTICUSTOMConfig.md) + - [GOOGLEDRIVEOAUTHMULTIConfig](docs/GOOGLEDRIVEOAUTHMULTIConfig.md) + - [GetAIPlatformConnectors200Response](docs/GetAIPlatformConnectors200Response.md) + - [GetDeepResearchResponse](docs/GetDeepResearchResponse.md) + - [GetDestinationConnectors200Response](docs/GetDestinationConnectors200Response.md) + - [GetPipelineEventsResponse](docs/GetPipelineEventsResponse.md) + - [GetPipelineMetricsResponse](docs/GetPipelineMetricsResponse.md) + - [GetPipelineResponse](docs/GetPipelineResponse.md) + - [GetPipelines400Response](docs/GetPipelines400Response.md) + - [GetPipelinesResponse](docs/GetPipelinesResponse.md) + - [GetSourceConnectors200Response](docs/GetSourceConnectors200Response.md) + - [GetUploadFilesResponse](docs/GetUploadFilesResponse.md) + - [Github](docs/Github.md) + - [Github1](docs/Github1.md) + - [Github2](docs/Github2.md) + - [GoogleCloudStorage](docs/GoogleCloudStorage.md) + - [GoogleCloudStorage1](docs/GoogleCloudStorage1.md) + - [GoogleCloudStorage2](docs/GoogleCloudStorage2.md) + - [GoogleDrive](docs/GoogleDrive.md) + - [GoogleDrive1](docs/GoogleDrive1.md) + - [GoogleDrive2](docs/GoogleDrive2.md) + - [GoogleDriveOAuth](docs/GoogleDriveOAuth.md) + - [GoogleDriveOAuth1](docs/GoogleDriveOAuth1.md) + - [GoogleDriveOAuth2](docs/GoogleDriveOAuth2.md) + - [GoogleDriveOauthMulti](docs/GoogleDriveOauthMulti.md) + - [GoogleDriveOauthMulti1](docs/GoogleDriveOauthMulti1.md) + - [GoogleDriveOauthMulti2](docs/GoogleDriveOauthMulti2.md) + - [GoogleDriveOauthMultiCustom](docs/GoogleDriveOauthMultiCustom.md) + - [GoogleDriveOauthMultiCustom1](docs/GoogleDriveOauthMultiCustom1.md) + - [GoogleDriveOauthMultiCustom2](docs/GoogleDriveOauthMultiCustom2.md) + - [INTERCOMAuthConfig](docs/INTERCOMAuthConfig.md) + - [INTERCOMConfig](docs/INTERCOMConfig.md) + - [Intercom](docs/Intercom.md) + - [Intercom1](docs/Intercom1.md) + - [Intercom2](docs/Intercom2.md) + - [MILVUSAuthConfig](docs/MILVUSAuthConfig.md) + - [MILVUSConfig](docs/MILVUSConfig.md) + - [MetadataExtractionStrategy](docs/MetadataExtractionStrategy.md) + - [MetadataExtractionStrategySchema](docs/MetadataExtractionStrategySchema.md) + - [Milvus](docs/Milvus.md) + - [Milvus1](docs/Milvus1.md) + - [Milvus2](docs/Milvus2.md) + - [N8NConfig](docs/N8NConfig.md) + - [NOTIONAuthConfig](docs/NOTIONAuthConfig.md) + - [NOTIONConfig](docs/NOTIONConfig.md) + - [NOTIONOAUTHMULTIAuthConfig](docs/NOTIONOAUTHMULTIAuthConfig.md) + - [NOTIONOAUTHMULTICUSTOMAuthConfig](docs/NOTIONOAUTHMULTICUSTOMAuthConfig.md) + - [Notion](docs/Notion.md) + - [Notion1](docs/Notion1.md) + - [Notion2](docs/Notion2.md) + - [NotionOauthMulti](docs/NotionOauthMulti.md) + - [NotionOauthMulti1](docs/NotionOauthMulti1.md) + - [NotionOauthMulti2](docs/NotionOauthMulti2.md) + - [NotionOauthMultiCustom](docs/NotionOauthMultiCustom.md) + - [NotionOauthMultiCustom1](docs/NotionOauthMultiCustom1.md) + - [NotionOauthMultiCustom2](docs/NotionOauthMultiCustom2.md) + - [ONEDRIVEAuthConfig](docs/ONEDRIVEAuthConfig.md) + - [ONEDRIVEConfig](docs/ONEDRIVEConfig.md) + - [OPENAIAuthConfig](docs/OPENAIAuthConfig.md) + - [OneDrive](docs/OneDrive.md) + - [OneDrive1](docs/OneDrive1.md) + - [OneDrive2](docs/OneDrive2.md) + - [Openai](docs/Openai.md) + - [Openai1](docs/Openai1.md) + - [Openai2](docs/Openai2.md) + - [PINECONEAuthConfig](docs/PINECONEAuthConfig.md) + - [PINECONEConfig](docs/PINECONEConfig.md) + - [POSTGRESQLAuthConfig](docs/POSTGRESQLAuthConfig.md) + - [POSTGRESQLConfig](docs/POSTGRESQLConfig.md) + - [Pinecone](docs/Pinecone.md) + - [Pinecone1](docs/Pinecone1.md) + - [Pinecone2](docs/Pinecone2.md) + - [PipelineAIPlatformRequestInner](docs/PipelineAIPlatformRequestInner.md) + - [PipelineConfigurationSchema](docs/PipelineConfigurationSchema.md) + - [PipelineDestinationConnectorRequestInner](docs/PipelineDestinationConnectorRequestInner.md) + - [PipelineEvents](docs/PipelineEvents.md) + - [PipelineListSummary](docs/PipelineListSummary.md) + - [PipelineMetrics](docs/PipelineMetrics.md) + - [PipelineSourceConnectorRequestInner](docs/PipelineSourceConnectorRequestInner.md) + - [PipelineSummary](docs/PipelineSummary.md) + - [Postgresql](docs/Postgresql.md) + - [Postgresql1](docs/Postgresql1.md) + - [Postgresql2](docs/Postgresql2.md) + - [QDRANTAuthConfig](docs/QDRANTAuthConfig.md) + - [QDRANTConfig](docs/QDRANTConfig.md) + - [Qdrant](docs/Qdrant.md) + - [Qdrant1](docs/Qdrant1.md) + - [Qdrant2](docs/Qdrant2.md) + - [RemoveUserFromSourceConnectorRequest](docs/RemoveUserFromSourceConnectorRequest.md) + - [RemoveUserFromSourceConnectorResponse](docs/RemoveUserFromSourceConnectorResponse.md) + - [RetrieveContext](docs/RetrieveContext.md) + - [RetrieveContextMessage](docs/RetrieveContextMessage.md) + - [RetrieveDocumentsRequest](docs/RetrieveDocumentsRequest.md) + - [RetrieveDocumentsResponse](docs/RetrieveDocumentsResponse.md) + - [SHAREPOINTAuthConfig](docs/SHAREPOINTAuthConfig.md) + - [SHAREPOINTConfig](docs/SHAREPOINTConfig.md) + - [SINGLESTOREAuthConfig](docs/SINGLESTOREAuthConfig.md) + - [SINGLESTOREConfig](docs/SINGLESTOREConfig.md) + - [SUPABASEAuthConfig](docs/SUPABASEAuthConfig.md) + - [SUPABASEConfig](docs/SUPABASEConfig.md) + - [ScheduleSchema](docs/ScheduleSchema.md) + - [ScheduleSchemaType](docs/ScheduleSchemaType.md) + - [Sharepoint](docs/Sharepoint.md) + - [Sharepoint1](docs/Sharepoint1.md) + - [Sharepoint2](docs/Sharepoint2.md) + - [Singlestore](docs/Singlestore.md) + - [Singlestore1](docs/Singlestore1.md) + - [Singlestore2](docs/Singlestore2.md) + - [SourceConnector](docs/SourceConnector.md) + - [SourceConnectorInput](docs/SourceConnectorInput.md) + - [SourceConnectorInputConfig](docs/SourceConnectorInputConfig.md) + - [SourceConnectorSchema](docs/SourceConnectorSchema.md) + - [SourceConnectorType](docs/SourceConnectorType.md) + - [StartDeepResearchRequest](docs/StartDeepResearchRequest.md) + - [StartDeepResearchResponse](docs/StartDeepResearchResponse.md) + - [StartExtractionRequest](docs/StartExtractionRequest.md) + - [StartExtractionResponse](docs/StartExtractionResponse.md) + - [StartFileUploadRequest](docs/StartFileUploadRequest.md) + - [StartFileUploadResponse](docs/StartFileUploadResponse.md) + - [StartFileUploadToConnectorRequest](docs/StartFileUploadToConnectorRequest.md) + - [StartFileUploadToConnectorResponse](docs/StartFileUploadToConnectorResponse.md) + - [StartPipelineResponse](docs/StartPipelineResponse.md) + - [StopPipelineResponse](docs/StopPipelineResponse.md) + - [Supabase](docs/Supabase.md) + - [Supabase1](docs/Supabase1.md) + - [Supabase2](docs/Supabase2.md) + - [TURBOPUFFERAuthConfig](docs/TURBOPUFFERAuthConfig.md) + - [TURBOPUFFERConfig](docs/TURBOPUFFERConfig.md) + - [Turbopuffer](docs/Turbopuffer.md) + - [Turbopuffer1](docs/Turbopuffer1.md) + - [Turbopuffer2](docs/Turbopuffer2.md) + - [UpdateAIPlatformConnectorRequest](docs/UpdateAIPlatformConnectorRequest.md) + - [UpdateAIPlatformConnectorResponse](docs/UpdateAIPlatformConnectorResponse.md) + - [UpdateAiplatformConnectorRequest](docs/UpdateAiplatformConnectorRequest.md) + - [UpdateDestinationConnectorRequest](docs/UpdateDestinationConnectorRequest.md) + - [UpdateDestinationConnectorResponse](docs/UpdateDestinationConnectorResponse.md) + - [UpdateSourceConnectorRequest](docs/UpdateSourceConnectorRequest.md) + - [UpdateSourceConnectorResponse](docs/UpdateSourceConnectorResponse.md) + - [UpdateSourceConnectorResponseData](docs/UpdateSourceConnectorResponseData.md) + - [UpdateUserInSourceConnectorRequest](docs/UpdateUserInSourceConnectorRequest.md) + - [UpdateUserInSourceConnectorResponse](docs/UpdateUserInSourceConnectorResponse.md) + - [UpdatedAIPlatformConnectorData](docs/UpdatedAIPlatformConnectorData.md) + - [UpdatedDestinationConnectorData](docs/UpdatedDestinationConnectorData.md) + - [UploadFile](docs/UploadFile.md) + - [VERTEXAuthConfig](docs/VERTEXAuthConfig.md) + - [VOYAGEAuthConfig](docs/VOYAGEAuthConfig.md) + - [Vertex](docs/Vertex.md) + - [Vertex1](docs/Vertex1.md) + - [Vertex2](docs/Vertex2.md) + - [Voyage](docs/Voyage.md) + - [Voyage1](docs/Voyage1.md) + - [Voyage2](docs/Voyage2.md) + - [WEAVIATEAuthConfig](docs/WEAVIATEAuthConfig.md) + - [WEAVIATEConfig](docs/WEAVIATEConfig.md) + - [WEBCRAWLERAuthConfig](docs/WEBCRAWLERAuthConfig.md) + - [WEBCRAWLERConfig](docs/WEBCRAWLERConfig.md) + - [Weaviate](docs/Weaviate.md) + - [Weaviate1](docs/Weaviate1.md) + - [Weaviate2](docs/Weaviate2.md) + - [WebCrawler](docs/WebCrawler.md) + - [WebCrawler1](docs/WebCrawler1.md) + - [WebCrawler2](docs/WebCrawler2.md) + + + +## Documentation For Authorization + + +Authentication schemes defined for the API: + +### bearerAuth + +- **Type**: Bearer authentication (JWT) + + +## Author + + diff --git a/docs/AIPlatform.md b/docs/AIPlatform.md new file mode 100644 index 0000000..96388e6 --- /dev/null +++ b/docs/AIPlatform.md @@ -0,0 +1,39 @@ +# AIPlatform + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | +**type** | **str** | | +**name** | **str** | | +**config_doc** | **Dict[str, Optional[object]]** | | [optional] +**created_at** | **str** | | [optional] +**created_by_id** | **str** | | [optional] +**last_updated_by_id** | **str** | | [optional] +**created_by_email** | **str** | | [optional] +**last_updated_by_email** | **str** | | [optional] +**error_message** | **str** | | [optional] +**verification_status** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.ai_platform import AIPlatform + +# TODO update the JSON string below +json = "{}" +# create an instance of AIPlatform from a JSON string +ai_platform_instance = AIPlatform.from_json(json) +# print the JSON string representation of the object +print(AIPlatform.to_json()) + +# convert the object into a dict +ai_platform_dict = ai_platform_instance.to_dict() +# create an instance of AIPlatform from a dict +ai_platform_from_dict = AIPlatform.from_dict(ai_platform_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AIPlatformConfigSchema.md b/docs/AIPlatformConfigSchema.md new file mode 100644 index 0000000..6291b72 --- /dev/null +++ b/docs/AIPlatformConfigSchema.md @@ -0,0 +1,34 @@ +# AIPlatformConfigSchema + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**embedding_model** | **str** | | [optional] +**chunking_strategy** | **str** | | [optional] +**chunk_size** | **int** | | [optional] +**chunk_overlap** | **int** | | [optional] +**dimensions** | **int** | | [optional] +**extraction_strategy** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.ai_platform_config_schema import AIPlatformConfigSchema + +# TODO update the JSON string below +json = "{}" +# create an instance of AIPlatformConfigSchema from a JSON string +ai_platform_config_schema_instance = AIPlatformConfigSchema.from_json(json) +# print the JSON string representation of the object +print(AIPlatformConfigSchema.to_json()) + +# convert the object into a dict +ai_platform_config_schema_dict = ai_platform_config_schema_instance.to_dict() +# create an instance of AIPlatformConfigSchema from a dict +ai_platform_config_schema_from_dict = AIPlatformConfigSchema.from_dict(ai_platform_config_schema_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AIPlatformInput.md b/docs/AIPlatformInput.md new file mode 100644 index 0000000..6579a83 --- /dev/null +++ b/docs/AIPlatformInput.md @@ -0,0 +1,32 @@ +# AIPlatformInput + +AI platform configuration + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the AI platform | +**type** | **str** | Type of AI platform | +**config** | **object** | Configuration specific to the AI platform | + +## Example + +```python +from vectorize_client.models.ai_platform_input import AIPlatformInput + +# TODO update the JSON string below +json = "{}" +# create an instance of AIPlatformInput from a JSON string +ai_platform_input_instance = AIPlatformInput.from_json(json) +# print the JSON string representation of the object +print(AIPlatformInput.to_json()) + +# convert the object into a dict +ai_platform_input_dict = ai_platform_input_instance.to_dict() +# create an instance of AIPlatformInput from a dict +ai_platform_input_from_dict = AIPlatformInput.from_dict(ai_platform_input_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AIPlatformSchema.md b/docs/AIPlatformSchema.md new file mode 100644 index 0000000..995b683 --- /dev/null +++ b/docs/AIPlatformSchema.md @@ -0,0 +1,31 @@ +# AIPlatformSchema + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | +**type** | [**AIPlatformType**](AIPlatformType.md) | | +**config** | [**AIPlatformConfigSchema**](AIPlatformConfigSchema.md) | | + +## Example + +```python +from vectorize_client.models.ai_platform_schema import AIPlatformSchema + +# TODO update the JSON string below +json = "{}" +# create an instance of AIPlatformSchema from a JSON string +ai_platform_schema_instance = AIPlatformSchema.from_json(json) +# print the JSON string representation of the object +print(AIPlatformSchema.to_json()) + +# convert the object into a dict +ai_platform_schema_dict = ai_platform_schema_instance.to_dict() +# create an instance of AIPlatformSchema from a dict +ai_platform_schema_from_dict = AIPlatformSchema.from_dict(ai_platform_schema_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AIPlatformType.md b/docs/AIPlatformType.md new file mode 100644 index 0000000..e289a27 --- /dev/null +++ b/docs/AIPlatformType.md @@ -0,0 +1,16 @@ +# AIPlatformType + + +## Enum + +* `BEDROCK` (value: `'BEDROCK'`) + +* `VERTEX` (value: `'VERTEX'`) + +* `OPENAI` (value: `'OPENAI'`) + +* `VOYAGE` (value: `'VOYAGE'`) + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AWSS3AuthConfig.md b/docs/AWSS3AuthConfig.md new file mode 100644 index 0000000..abf8eeb --- /dev/null +++ b/docs/AWSS3AuthConfig.md @@ -0,0 +1,36 @@ +# AWSS3AuthConfig + +Authentication configuration for Amazon S3 + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**access_key** | **str** | Access Key. Example: Enter Access Key | +**secret_key** | **str** | Secret Key. Example: Enter Secret Key | +**bucket_name** | **str** | Bucket Name. Example: Enter your S3 Bucket Name | +**endpoint** | **str** | Endpoint. Example: Enter Endpoint URL | [optional] +**region** | **str** | Region. Example: Region Name | [optional] +**archiver** | **bool** | Allow as archive destination | [default to False] + +## Example + +```python +from vectorize_client.models.awss3_auth_config import AWSS3AuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of AWSS3AuthConfig from a JSON string +awss3_auth_config_instance = AWSS3AuthConfig.from_json(json) +# print the JSON string representation of the object +print(AWSS3AuthConfig.to_json()) + +# convert the object into a dict +awss3_auth_config_dict = awss3_auth_config_instance.to_dict() +# create an instance of AWSS3AuthConfig from a dict +awss3_auth_config_from_dict = AWSS3AuthConfig.from_dict(awss3_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AWSS3Config.md b/docs/AWSS3Config.md new file mode 100644 index 0000000..f0e8fb9 --- /dev/null +++ b/docs/AWSS3Config.md @@ -0,0 +1,35 @@ +# AWSS3Config + +Configuration for Amazon S3 connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**file_extensions** | **List[str]** | File Extensions | +**idle_time** | **float** | Check for updates every (seconds) | [default to 5] +**recursive** | **bool** | Recursively scan all folders in the bucket | [optional] +**path_prefix** | **str** | Path Prefix | [optional] +**path_metadata_regex** | **str** | Path Metadata Regex | [optional] +**path_regex_group_names** | **str** | Path Regex Group Names. Example: Enter Group Name | [optional] + +## Example + +```python +from vectorize_client.models.awss3_config import AWSS3Config + +# TODO update the JSON string below +json = "{}" +# create an instance of AWSS3Config from a JSON string +awss3_config_instance = AWSS3Config.from_json(json) +# print the JSON string representation of the object +print(AWSS3Config.to_json()) + +# convert the object into a dict +awss3_config_dict = awss3_config_instance.to_dict() +# create an instance of AWSS3Config from a dict +awss3_config_from_dict = AWSS3Config.from_dict(awss3_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AZUREAISEARCHAuthConfig.md b/docs/AZUREAISEARCHAuthConfig.md new file mode 100644 index 0000000..548a08f --- /dev/null +++ b/docs/AZUREAISEARCHAuthConfig.md @@ -0,0 +1,32 @@ +# AZUREAISEARCHAuthConfig + +Authentication configuration for Azure AI Search + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for your Azure AI Search integration | +**service_name** | **str** | Azure AI Search Service Name. Example: Enter your Azure AI Search service name | +**api_key** | **str** | API Key. Example: Enter your API key | + +## Example + +```python +from vectorize_client.models.azureaisearch_auth_config import AZUREAISEARCHAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of AZUREAISEARCHAuthConfig from a JSON string +azureaisearch_auth_config_instance = AZUREAISEARCHAuthConfig.from_json(json) +# print the JSON string representation of the object +print(AZUREAISEARCHAuthConfig.to_json()) + +# convert the object into a dict +azureaisearch_auth_config_dict = azureaisearch_auth_config_instance.to_dict() +# create an instance of AZUREAISEARCHAuthConfig from a dict +azureaisearch_auth_config_from_dict = AZUREAISEARCHAuthConfig.from_dict(azureaisearch_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AZUREAISEARCHConfig.md b/docs/AZUREAISEARCHConfig.md new file mode 100644 index 0000000..9fc02f5 --- /dev/null +++ b/docs/AZUREAISEARCHConfig.md @@ -0,0 +1,30 @@ +# AZUREAISEARCHConfig + +Configuration for Azure AI Search connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**index** | **str** | Index Name. Example: Enter index name | + +## Example + +```python +from vectorize_client.models.azureaisearch_config import AZUREAISEARCHConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of AZUREAISEARCHConfig from a JSON string +azureaisearch_config_instance = AZUREAISEARCHConfig.from_json(json) +# print the JSON string representation of the object +print(AZUREAISEARCHConfig.to_json()) + +# convert the object into a dict +azureaisearch_config_dict = azureaisearch_config_instance.to_dict() +# create an instance of AZUREAISEARCHConfig from a dict +azureaisearch_config_from_dict = AZUREAISEARCHConfig.from_dict(azureaisearch_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AZUREBLOBAuthConfig.md b/docs/AZUREBLOBAuthConfig.md new file mode 100644 index 0000000..089628d --- /dev/null +++ b/docs/AZUREBLOBAuthConfig.md @@ -0,0 +1,34 @@ +# AZUREBLOBAuthConfig + +Authentication configuration for Azure Blob Storage + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**storage_account_name** | **str** | Storage Account Name. Example: Enter Storage Account Name | +**storage_account_key** | **str** | Storage Account Key. Example: Enter Storage Account Key | +**container** | **str** | Container. Example: Enter Container Name | +**endpoint** | **str** | Endpoint. Example: Enter Endpoint URL | [optional] + +## Example + +```python +from vectorize_client.models.azureblob_auth_config import AZUREBLOBAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of AZUREBLOBAuthConfig from a JSON string +azureblob_auth_config_instance = AZUREBLOBAuthConfig.from_json(json) +# print the JSON string representation of the object +print(AZUREBLOBAuthConfig.to_json()) + +# convert the object into a dict +azureblob_auth_config_dict = azureblob_auth_config_instance.to_dict() +# create an instance of AZUREBLOBAuthConfig from a dict +azureblob_auth_config_from_dict = AZUREBLOBAuthConfig.from_dict(azureblob_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AZUREBLOBConfig.md b/docs/AZUREBLOBConfig.md new file mode 100644 index 0000000..e98e6aa --- /dev/null +++ b/docs/AZUREBLOBConfig.md @@ -0,0 +1,35 @@ +# AZUREBLOBConfig + +Configuration for Azure Blob Storage connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**file_extensions** | **List[str]** | File Extensions | +**idle_time** | **float** | Polling Interval (seconds) | [default to 5] +**recursive** | **bool** | Recursively scan all folders in the bucket | [optional] +**path_prefix** | **str** | Path Prefix | [optional] +**path_metadata_regex** | **str** | Path Metadata Regex | [optional] +**path_regex_group_names** | **str** | Path Regex Group Names. Example: Enter Group Name | [optional] + +## Example + +```python +from vectorize_client.models.azureblob_config import AZUREBLOBConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of AZUREBLOBConfig from a JSON string +azureblob_config_instance = AZUREBLOBConfig.from_json(json) +# print the JSON string representation of the object +print(AZUREBLOBConfig.to_json()) + +# convert the object into a dict +azureblob_config_dict = azureblob_config_instance.to_dict() +# create an instance of AZUREBLOBConfig from a dict +azureblob_config_from_dict = AZUREBLOBConfig.from_dict(azureblob_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AddUserFromSourceConnectorResponse.md b/docs/AddUserFromSourceConnectorResponse.md new file mode 100644 index 0000000..839f8df --- /dev/null +++ b/docs/AddUserFromSourceConnectorResponse.md @@ -0,0 +1,29 @@ +# AddUserFromSourceConnectorResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | + +## Example + +```python +from vectorize_client.models.add_user_from_source_connector_response import AddUserFromSourceConnectorResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of AddUserFromSourceConnectorResponse from a JSON string +add_user_from_source_connector_response_instance = AddUserFromSourceConnectorResponse.from_json(json) +# print the JSON string representation of the object +print(AddUserFromSourceConnectorResponse.to_json()) + +# convert the object into a dict +add_user_from_source_connector_response_dict = add_user_from_source_connector_response_instance.to_dict() +# create an instance of AddUserFromSourceConnectorResponse from a dict +add_user_from_source_connector_response_from_dict = AddUserFromSourceConnectorResponse.from_dict(add_user_from_source_connector_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AddUserToSourceConnectorRequest.md b/docs/AddUserToSourceConnectorRequest.md new file mode 100644 index 0000000..3768a06 --- /dev/null +++ b/docs/AddUserToSourceConnectorRequest.md @@ -0,0 +1,32 @@ +# AddUserToSourceConnectorRequest + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**user_id** | **str** | | +**selected_files** | [**AddUserToSourceConnectorRequestSelectedFiles**](AddUserToSourceConnectorRequestSelectedFiles.md) | | +**refresh_token** | **str** | | [optional] +**access_token** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.add_user_to_source_connector_request import AddUserToSourceConnectorRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of AddUserToSourceConnectorRequest from a JSON string +add_user_to_source_connector_request_instance = AddUserToSourceConnectorRequest.from_json(json) +# print the JSON string representation of the object +print(AddUserToSourceConnectorRequest.to_json()) + +# convert the object into a dict +add_user_to_source_connector_request_dict = add_user_to_source_connector_request_instance.to_dict() +# create an instance of AddUserToSourceConnectorRequest from a dict +add_user_to_source_connector_request_from_dict = AddUserToSourceConnectorRequest.from_dict(add_user_to_source_connector_request_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AddUserToSourceConnectorRequestSelectedFiles.md b/docs/AddUserToSourceConnectorRequestSelectedFiles.md new file mode 100644 index 0000000..9526870 --- /dev/null +++ b/docs/AddUserToSourceConnectorRequestSelectedFiles.md @@ -0,0 +1,30 @@ +# AddUserToSourceConnectorRequestSelectedFiles + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**page_ids** | **List[str]** | | [optional] +**database_ids** | **List[str]** | | [optional] + +## Example + +```python +from vectorize_client.models.add_user_to_source_connector_request_selected_files import AddUserToSourceConnectorRequestSelectedFiles + +# TODO update the JSON string below +json = "{}" +# create an instance of AddUserToSourceConnectorRequestSelectedFiles from a JSON string +add_user_to_source_connector_request_selected_files_instance = AddUserToSourceConnectorRequestSelectedFiles.from_json(json) +# print the JSON string representation of the object +print(AddUserToSourceConnectorRequestSelectedFiles.to_json()) + +# convert the object into a dict +add_user_to_source_connector_request_selected_files_dict = add_user_to_source_connector_request_selected_files_instance.to_dict() +# create an instance of AddUserToSourceConnectorRequestSelectedFiles from a dict +add_user_to_source_connector_request_selected_files_from_dict = AddUserToSourceConnectorRequestSelectedFiles.from_dict(add_user_to_source_connector_request_selected_files_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AddUserToSourceConnectorRequestSelectedFilesAnyOf.md b/docs/AddUserToSourceConnectorRequestSelectedFilesAnyOf.md new file mode 100644 index 0000000..d5531c9 --- /dev/null +++ b/docs/AddUserToSourceConnectorRequestSelectedFilesAnyOf.md @@ -0,0 +1,30 @@ +# AddUserToSourceConnectorRequestSelectedFilesAnyOf + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**page_ids** | **List[str]** | | [optional] +**database_ids** | **List[str]** | | [optional] + +## Example + +```python +from vectorize_client.models.add_user_to_source_connector_request_selected_files_any_of import AddUserToSourceConnectorRequestSelectedFilesAnyOf + +# TODO update the JSON string below +json = "{}" +# create an instance of AddUserToSourceConnectorRequestSelectedFilesAnyOf from a JSON string +add_user_to_source_connector_request_selected_files_any_of_instance = AddUserToSourceConnectorRequestSelectedFilesAnyOf.from_json(json) +# print the JSON string representation of the object +print(AddUserToSourceConnectorRequestSelectedFilesAnyOf.to_json()) + +# convert the object into a dict +add_user_to_source_connector_request_selected_files_any_of_dict = add_user_to_source_connector_request_selected_files_any_of_instance.to_dict() +# create an instance of AddUserToSourceConnectorRequestSelectedFilesAnyOf from a dict +add_user_to_source_connector_request_selected_files_any_of_from_dict = AddUserToSourceConnectorRequestSelectedFilesAnyOf.from_dict(add_user_to_source_connector_request_selected_files_any_of_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AddUserToSourceConnectorRequestSelectedFilesAnyOfValue.md b/docs/AddUserToSourceConnectorRequestSelectedFilesAnyOfValue.md new file mode 100644 index 0000000..a0ad6e8 --- /dev/null +++ b/docs/AddUserToSourceConnectorRequestSelectedFilesAnyOfValue.md @@ -0,0 +1,30 @@ +# AddUserToSourceConnectorRequestSelectedFilesAnyOfValue + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | +**mime_type** | **str** | | + +## Example + +```python +from vectorize_client.models.add_user_to_source_connector_request_selected_files_any_of_value import AddUserToSourceConnectorRequestSelectedFilesAnyOfValue + +# TODO update the JSON string below +json = "{}" +# create an instance of AddUserToSourceConnectorRequestSelectedFilesAnyOfValue from a JSON string +add_user_to_source_connector_request_selected_files_any_of_value_instance = AddUserToSourceConnectorRequestSelectedFilesAnyOfValue.from_json(json) +# print the JSON string representation of the object +print(AddUserToSourceConnectorRequestSelectedFilesAnyOfValue.to_json()) + +# convert the object into a dict +add_user_to_source_connector_request_selected_files_any_of_value_dict = add_user_to_source_connector_request_selected_files_any_of_value_instance.to_dict() +# create an instance of AddUserToSourceConnectorRequestSelectedFilesAnyOfValue from a dict +add_user_to_source_connector_request_selected_files_any_of_value_from_dict = AddUserToSourceConnectorRequestSelectedFilesAnyOfValue.from_dict(add_user_to_source_connector_request_selected_files_any_of_value_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AdvancedQuery.md b/docs/AdvancedQuery.md new file mode 100644 index 0000000..4c0c6d4 --- /dev/null +++ b/docs/AdvancedQuery.md @@ -0,0 +1,34 @@ +# AdvancedQuery + +Advanced query parameters for enhanced search capabilities. + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**mode** | **str** | Search mode: 'text', 'vector', or 'hybrid'. Defaults to 'vector' if not specified. | [optional] +**text_fields** | **List[str]** | Fields to perform text search on. | [optional] +**match_type** | **str** | Type of text match to perform. | [optional] +**text_boost** | **float** | Multiplier for text search scores. | [optional] +**filters** | **object** | Elasticsearch-compatible filter object. | [optional] + +## Example + +```python +from vectorize_client.models.advanced_query import AdvancedQuery + +# TODO update the JSON string below +json = "{}" +# create an instance of AdvancedQuery from a JSON string +advanced_query_instance = AdvancedQuery.from_json(json) +# print the JSON string representation of the object +print(AdvancedQuery.to_json()) + +# convert the object into a dict +advanced_query_dict = advanced_query_instance.to_dict() +# create an instance of AdvancedQuery from a dict +advanced_query_from_dict = AdvancedQuery.from_dict(advanced_query_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AmazonS3.md b/docs/AmazonS3.md new file mode 100644 index 0000000..7f752fd --- /dev/null +++ b/docs/AmazonS3.md @@ -0,0 +1,31 @@ +# AmazonS3 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"AWS_S3\") | +**config** | [**AWSS3Config**](AWSS3Config.md) | | + +## Example + +```python +from vectorize_client.models.amazon_s3 import AmazonS3 + +# TODO update the JSON string below +json = "{}" +# create an instance of AmazonS3 from a JSON string +amazon_s3_instance = AmazonS3.from_json(json) +# print the JSON string representation of the object +print(AmazonS3.to_json()) + +# convert the object into a dict +amazon_s3_dict = amazon_s3_instance.to_dict() +# create an instance of AmazonS3 from a dict +amazon_s3_from_dict = AmazonS3.from_dict(amazon_s3_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AmazonS31.md b/docs/AmazonS31.md new file mode 100644 index 0000000..a688a8a --- /dev/null +++ b/docs/AmazonS31.md @@ -0,0 +1,29 @@ +# AmazonS31 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**AWSS3Config**](AWSS3Config.md) | | [optional] + +## Example + +```python +from vectorize_client.models.amazon_s31 import AmazonS31 + +# TODO update the JSON string below +json = "{}" +# create an instance of AmazonS31 from a JSON string +amazon_s31_instance = AmazonS31.from_json(json) +# print the JSON string representation of the object +print(AmazonS31.to_json()) + +# convert the object into a dict +amazon_s31_dict = amazon_s31_instance.to_dict() +# create an instance of AmazonS31 from a dict +amazon_s31_from_dict = AmazonS31.from_dict(amazon_s31_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AmazonS32.md b/docs/AmazonS32.md new file mode 100644 index 0000000..29c792b --- /dev/null +++ b/docs/AmazonS32.md @@ -0,0 +1,30 @@ +# AmazonS32 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"AWS_S3\") | + +## Example + +```python +from vectorize_client.models.amazon_s32 import AmazonS32 + +# TODO update the JSON string below +json = "{}" +# create an instance of AmazonS32 from a JSON string +amazon_s32_instance = AmazonS32.from_json(json) +# print the JSON string representation of the object +print(AmazonS32.to_json()) + +# convert the object into a dict +amazon_s32_dict = amazon_s32_instance.to_dict() +# create an instance of AmazonS32 from a dict +amazon_s32_from_dict = AmazonS32.from_dict(amazon_s32_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AzureBlobStorage.md b/docs/AzureBlobStorage.md new file mode 100644 index 0000000..4d7c3ee --- /dev/null +++ b/docs/AzureBlobStorage.md @@ -0,0 +1,31 @@ +# AzureBlobStorage + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"AZURE_BLOB\") | +**config** | [**AZUREBLOBConfig**](AZUREBLOBConfig.md) | | + +## Example + +```python +from vectorize_client.models.azure_blob_storage import AzureBlobStorage + +# TODO update the JSON string below +json = "{}" +# create an instance of AzureBlobStorage from a JSON string +azure_blob_storage_instance = AzureBlobStorage.from_json(json) +# print the JSON string representation of the object +print(AzureBlobStorage.to_json()) + +# convert the object into a dict +azure_blob_storage_dict = azure_blob_storage_instance.to_dict() +# create an instance of AzureBlobStorage from a dict +azure_blob_storage_from_dict = AzureBlobStorage.from_dict(azure_blob_storage_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AzureBlobStorage1.md b/docs/AzureBlobStorage1.md new file mode 100644 index 0000000..f6bd884 --- /dev/null +++ b/docs/AzureBlobStorage1.md @@ -0,0 +1,29 @@ +# AzureBlobStorage1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**AZUREBLOBConfig**](AZUREBLOBConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.azure_blob_storage1 import AzureBlobStorage1 + +# TODO update the JSON string below +json = "{}" +# create an instance of AzureBlobStorage1 from a JSON string +azure_blob_storage1_instance = AzureBlobStorage1.from_json(json) +# print the JSON string representation of the object +print(AzureBlobStorage1.to_json()) + +# convert the object into a dict +azure_blob_storage1_dict = azure_blob_storage1_instance.to_dict() +# create an instance of AzureBlobStorage1 from a dict +azure_blob_storage1_from_dict = AzureBlobStorage1.from_dict(azure_blob_storage1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/AzureBlobStorage2.md b/docs/AzureBlobStorage2.md new file mode 100644 index 0000000..874c5fc --- /dev/null +++ b/docs/AzureBlobStorage2.md @@ -0,0 +1,30 @@ +# AzureBlobStorage2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"AZURE_BLOB\") | + +## Example + +```python +from vectorize_client.models.azure_blob_storage2 import AzureBlobStorage2 + +# TODO update the JSON string below +json = "{}" +# create an instance of AzureBlobStorage2 from a JSON string +azure_blob_storage2_instance = AzureBlobStorage2.from_json(json) +# print the JSON string representation of the object +print(AzureBlobStorage2.to_json()) + +# convert the object into a dict +azure_blob_storage2_dict = azure_blob_storage2_instance.to_dict() +# create an instance of AzureBlobStorage2 from a dict +azure_blob_storage2_from_dict = AzureBlobStorage2.from_dict(azure_blob_storage2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Azureaisearch.md b/docs/Azureaisearch.md new file mode 100644 index 0000000..a94540d --- /dev/null +++ b/docs/Azureaisearch.md @@ -0,0 +1,31 @@ +# Azureaisearch + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"AZUREAISEARCH\") | +**config** | [**AZUREAISEARCHConfig**](AZUREAISEARCHConfig.md) | | + +## Example + +```python +from vectorize_client.models.azureaisearch import Azureaisearch + +# TODO update the JSON string below +json = "{}" +# create an instance of Azureaisearch from a JSON string +azureaisearch_instance = Azureaisearch.from_json(json) +# print the JSON string representation of the object +print(Azureaisearch.to_json()) + +# convert the object into a dict +azureaisearch_dict = azureaisearch_instance.to_dict() +# create an instance of Azureaisearch from a dict +azureaisearch_from_dict = Azureaisearch.from_dict(azureaisearch_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Azureaisearch1.md b/docs/Azureaisearch1.md new file mode 100644 index 0000000..52eb086 --- /dev/null +++ b/docs/Azureaisearch1.md @@ -0,0 +1,29 @@ +# Azureaisearch1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**AZUREAISEARCHConfig**](AZUREAISEARCHConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.azureaisearch1 import Azureaisearch1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Azureaisearch1 from a JSON string +azureaisearch1_instance = Azureaisearch1.from_json(json) +# print the JSON string representation of the object +print(Azureaisearch1.to_json()) + +# convert the object into a dict +azureaisearch1_dict = azureaisearch1_instance.to_dict() +# create an instance of Azureaisearch1 from a dict +azureaisearch1_from_dict = Azureaisearch1.from_dict(azureaisearch1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Azureaisearch2.md b/docs/Azureaisearch2.md new file mode 100644 index 0000000..12799ab --- /dev/null +++ b/docs/Azureaisearch2.md @@ -0,0 +1,30 @@ +# Azureaisearch2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"AZUREAISEARCH\") | + +## Example + +```python +from vectorize_client.models.azureaisearch2 import Azureaisearch2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Azureaisearch2 from a JSON string +azureaisearch2_instance = Azureaisearch2.from_json(json) +# print the JSON string representation of the object +print(Azureaisearch2.to_json()) + +# convert the object into a dict +azureaisearch2_dict = azureaisearch2_instance.to_dict() +# create an instance of Azureaisearch2 from a dict +azureaisearch2_from_dict = Azureaisearch2.from_dict(azureaisearch2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/BEDROCKAuthConfig.md b/docs/BEDROCKAuthConfig.md new file mode 100644 index 0000000..bf13fe6 --- /dev/null +++ b/docs/BEDROCKAuthConfig.md @@ -0,0 +1,33 @@ +# BEDROCKAuthConfig + +Authentication configuration for Amazon Bedrock + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for your Amazon Bedrock integration | +**access_key** | **str** | Access Key. Example: Enter your Amazon Bedrock Access Key | +**key** | **str** | Secret Key. Example: Enter your Amazon Bedrock Secret Key | +**region** | **str** | Region. Example: Region Name | + +## Example + +```python +from vectorize_client.models.bedrock_auth_config import BEDROCKAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of BEDROCKAuthConfig from a JSON string +bedrock_auth_config_instance = BEDROCKAuthConfig.from_json(json) +# print the JSON string representation of the object +print(BEDROCKAuthConfig.to_json()) + +# convert the object into a dict +bedrock_auth_config_dict = bedrock_auth_config_instance.to_dict() +# create an instance of BEDROCKAuthConfig from a dict +bedrock_auth_config_from_dict = BEDROCKAuthConfig.from_dict(bedrock_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Bedrock.md b/docs/Bedrock.md new file mode 100644 index 0000000..ef2d829 --- /dev/null +++ b/docs/Bedrock.md @@ -0,0 +1,31 @@ +# Bedrock + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"BEDROCK\") | +**config** | [**BEDROCKAuthConfig**](BEDROCKAuthConfig.md) | | + +## Example + +```python +from vectorize_client.models.bedrock import Bedrock + +# TODO update the JSON string below +json = "{}" +# create an instance of Bedrock from a JSON string +bedrock_instance = Bedrock.from_json(json) +# print the JSON string representation of the object +print(Bedrock.to_json()) + +# convert the object into a dict +bedrock_dict = bedrock_instance.to_dict() +# create an instance of Bedrock from a dict +bedrock_from_dict = Bedrock.from_dict(bedrock_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Bedrock1.md b/docs/Bedrock1.md new file mode 100644 index 0000000..69878c7 --- /dev/null +++ b/docs/Bedrock1.md @@ -0,0 +1,29 @@ +# Bedrock1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**BEDROCKAuthConfig**](BEDROCKAuthConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.bedrock1 import Bedrock1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Bedrock1 from a JSON string +bedrock1_instance = Bedrock1.from_json(json) +# print the JSON string representation of the object +print(Bedrock1.to_json()) + +# convert the object into a dict +bedrock1_dict = bedrock1_instance.to_dict() +# create an instance of Bedrock1 from a dict +bedrock1_from_dict = Bedrock1.from_dict(bedrock1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Bedrock2.md b/docs/Bedrock2.md new file mode 100644 index 0000000..31ea5bf --- /dev/null +++ b/docs/Bedrock2.md @@ -0,0 +1,30 @@ +# Bedrock2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"BEDROCK\") | + +## Example + +```python +from vectorize_client.models.bedrock2 import Bedrock2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Bedrock2 from a JSON string +bedrock2_instance = Bedrock2.from_json(json) +# print the JSON string representation of the object +print(Bedrock2.to_json()) + +# convert the object into a dict +bedrock2_dict = bedrock2_instance.to_dict() +# create an instance of Bedrock2 from a dict +bedrock2_from_dict = Bedrock2.from_dict(bedrock2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CAPELLAAuthConfig.md b/docs/CAPELLAAuthConfig.md new file mode 100644 index 0000000..e6ace1f --- /dev/null +++ b/docs/CAPELLAAuthConfig.md @@ -0,0 +1,33 @@ +# CAPELLAAuthConfig + +Authentication configuration for Couchbase Capella + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for your Capella integration | +**username** | **str** | Cluster Access Name. Example: Enter your cluster access name | +**password** | **str** | Cluster Access Password. Example: Enter your cluster access password | +**connection_string** | **str** | Connection String. Example: Enter your connection string | + +## Example + +```python +from vectorize_client.models.capella_auth_config import CAPELLAAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of CAPELLAAuthConfig from a JSON string +capella_auth_config_instance = CAPELLAAuthConfig.from_json(json) +# print the JSON string representation of the object +print(CAPELLAAuthConfig.to_json()) + +# convert the object into a dict +capella_auth_config_dict = capella_auth_config_instance.to_dict() +# create an instance of CAPELLAAuthConfig from a dict +capella_auth_config_from_dict = CAPELLAAuthConfig.from_dict(capella_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CAPELLAConfig.md b/docs/CAPELLAConfig.md new file mode 100644 index 0000000..d3d2f04 --- /dev/null +++ b/docs/CAPELLAConfig.md @@ -0,0 +1,33 @@ +# CAPELLAConfig + +Configuration for Couchbase Capella connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**bucket** | **str** | Bucket Name. Example: Enter bucket name | +**scope** | **str** | Scope Name. Example: Enter scope name | +**collection** | **str** | Collection Name. Example: Enter collection name | +**index** | **str** | Search Index Name. Example: Enter search index name | + +## Example + +```python +from vectorize_client.models.capella_config import CAPELLAConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of CAPELLAConfig from a JSON string +capella_config_instance = CAPELLAConfig.from_json(json) +# print the JSON string representation of the object +print(CAPELLAConfig.to_json()) + +# convert the object into a dict +capella_config_dict = capella_config_instance.to_dict() +# create an instance of CAPELLAConfig from a dict +capella_config_from_dict = CAPELLAConfig.from_dict(capella_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CONFLUENCEAuthConfig.md b/docs/CONFLUENCEAuthConfig.md new file mode 100644 index 0000000..85d29f1 --- /dev/null +++ b/docs/CONFLUENCEAuthConfig.md @@ -0,0 +1,33 @@ +# CONFLUENCEAuthConfig + +Authentication configuration for Confluence + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**username** | **str** | Username. Example: Enter your Confluence username | +**api_token** | **str** | API Token. Example: Enter your Confluence API token | +**domain** | **str** | Domain. Example: Enter your Confluence domain (e.g. my-domain.atlassian.net or confluence.<my-company>.com) | + +## Example + +```python +from vectorize_client.models.confluence_auth_config import CONFLUENCEAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of CONFLUENCEAuthConfig from a JSON string +confluence_auth_config_instance = CONFLUENCEAuthConfig.from_json(json) +# print the JSON string representation of the object +print(CONFLUENCEAuthConfig.to_json()) + +# convert the object into a dict +confluence_auth_config_dict = confluence_auth_config_instance.to_dict() +# create an instance of CONFLUENCEAuthConfig from a dict +confluence_auth_config_from_dict = CONFLUENCEAuthConfig.from_dict(confluence_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CONFLUENCEConfig.md b/docs/CONFLUENCEConfig.md new file mode 100644 index 0000000..22f7f97 --- /dev/null +++ b/docs/CONFLUENCEConfig.md @@ -0,0 +1,31 @@ +# CONFLUENCEConfig + +Configuration for Confluence connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**spaces** | **str** | Spaces. Example: Spaces to include (name, key or id) | +**root_parents** | **str** | Root Parents. Example: Enter root parent pages | [optional] + +## Example + +```python +from vectorize_client.models.confluence_config import CONFLUENCEConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of CONFLUENCEConfig from a JSON string +confluence_config_instance = CONFLUENCEConfig.from_json(json) +# print the JSON string representation of the object +print(CONFLUENCEConfig.to_json()) + +# convert the object into a dict +confluence_config_dict = confluence_config_instance.to_dict() +# create an instance of CONFLUENCEConfig from a dict +confluence_config_from_dict = CONFLUENCEConfig.from_dict(confluence_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Capella.md b/docs/Capella.md new file mode 100644 index 0000000..c038eb7 --- /dev/null +++ b/docs/Capella.md @@ -0,0 +1,31 @@ +# Capella + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"CAPELLA\") | +**config** | [**CAPELLAConfig**](CAPELLAConfig.md) | | + +## Example + +```python +from vectorize_client.models.capella import Capella + +# TODO update the JSON string below +json = "{}" +# create an instance of Capella from a JSON string +capella_instance = Capella.from_json(json) +# print the JSON string representation of the object +print(Capella.to_json()) + +# convert the object into a dict +capella_dict = capella_instance.to_dict() +# create an instance of Capella from a dict +capella_from_dict = Capella.from_dict(capella_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Capella1.md b/docs/Capella1.md new file mode 100644 index 0000000..32ca7ae --- /dev/null +++ b/docs/Capella1.md @@ -0,0 +1,29 @@ +# Capella1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**CAPELLAConfig**](CAPELLAConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.capella1 import Capella1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Capella1 from a JSON string +capella1_instance = Capella1.from_json(json) +# print the JSON string representation of the object +print(Capella1.to_json()) + +# convert the object into a dict +capella1_dict = capella1_instance.to_dict() +# create an instance of Capella1 from a dict +capella1_from_dict = Capella1.from_dict(capella1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Capella2.md b/docs/Capella2.md new file mode 100644 index 0000000..b06c81d --- /dev/null +++ b/docs/Capella2.md @@ -0,0 +1,30 @@ +# Capella2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"CAPELLA\") | + +## Example + +```python +from vectorize_client.models.capella2 import Capella2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Capella2 from a JSON string +capella2_instance = Capella2.from_json(json) +# print the JSON string representation of the object +print(Capella2.to_json()) + +# convert the object into a dict +capella2_dict = capella2_instance.to_dict() +# create an instance of Capella2 from a dict +capella2_from_dict = Capella2.from_dict(capella2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Confluence.md b/docs/Confluence.md new file mode 100644 index 0000000..f202da5 --- /dev/null +++ b/docs/Confluence.md @@ -0,0 +1,31 @@ +# Confluence + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"CONFLUENCE\") | +**config** | [**CONFLUENCEConfig**](CONFLUENCEConfig.md) | | + +## Example + +```python +from vectorize_client.models.confluence import Confluence + +# TODO update the JSON string below +json = "{}" +# create an instance of Confluence from a JSON string +confluence_instance = Confluence.from_json(json) +# print the JSON string representation of the object +print(Confluence.to_json()) + +# convert the object into a dict +confluence_dict = confluence_instance.to_dict() +# create an instance of Confluence from a dict +confluence_from_dict = Confluence.from_dict(confluence_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Confluence1.md b/docs/Confluence1.md new file mode 100644 index 0000000..43cce9f --- /dev/null +++ b/docs/Confluence1.md @@ -0,0 +1,29 @@ +# Confluence1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**CONFLUENCEConfig**](CONFLUENCEConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.confluence1 import Confluence1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Confluence1 from a JSON string +confluence1_instance = Confluence1.from_json(json) +# print the JSON string representation of the object +print(Confluence1.to_json()) + +# convert the object into a dict +confluence1_dict = confluence1_instance.to_dict() +# create an instance of Confluence1 from a dict +confluence1_from_dict = Confluence1.from_dict(confluence1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Confluence2.md b/docs/Confluence2.md new file mode 100644 index 0000000..66afc8f --- /dev/null +++ b/docs/Confluence2.md @@ -0,0 +1,30 @@ +# Confluence2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"CONFLUENCE\") | + +## Example + +```python +from vectorize_client.models.confluence2 import Confluence2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Confluence2 from a JSON string +confluence2_instance = Confluence2.from_json(json) +# print the JSON string representation of the object +print(Confluence2.to_json()) + +# convert the object into a dict +confluence2_dict = confluence2_instance.to_dict() +# create an instance of Confluence2 from a dict +confluence2_from_dict = Confluence2.from_dict(confluence2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/ConnectorsAIPlatformsApi.md b/docs/ConnectorsAIPlatformsApi.md new file mode 100644 index 0000000..437ef7a --- /dev/null +++ b/docs/ConnectorsAIPlatformsApi.md @@ -0,0 +1,432 @@ +# vectorize_client.ConnectorsAIPlatformsApi + +All URIs are relative to *https://api.vectorize.io/v1* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**create_ai_platform_connector**](ConnectorsAIPlatformsApi.md#create_ai_platform_connector) | **POST** /org/{organizationId}/connectors/aiplatforms | Create a new AI platform connector +[**delete_ai_platform**](ConnectorsAIPlatformsApi.md#delete_ai_platform) | **DELETE** /org/{organizationId}/connectors/aiplatforms/{aiplatformId} | Delete an AI platform connector +[**get_ai_platform_connector**](ConnectorsAIPlatformsApi.md#get_ai_platform_connector) | **GET** /org/{organizationId}/connectors/aiplatforms/{aiplatformId} | Get an AI platform connector +[**get_ai_platform_connectors**](ConnectorsAIPlatformsApi.md#get_ai_platform_connectors) | **GET** /org/{organizationId}/connectors/aiplatforms | Get all existing AI Platform connectors +[**update_ai_platform_connector**](ConnectorsAIPlatformsApi.md#update_ai_platform_connector) | **PATCH** /org/{organizationId}/connectors/aiplatforms/{aiplatformId} | Update an AI Platform connector + + +# **create_ai_platform_connector** +> CreateAIPlatformConnectorResponse create_ai_platform_connector(organization_id, create_ai_platform_connector_request_inner) + +Create a new AI platform connector + +Creates a new AI platform connector for embeddings and processing. The specific configuration fields required depend on the platform type selected. + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.create_ai_platform_connector_request_inner import CreateAIPlatformConnectorRequestInner +from vectorize_client.models.create_ai_platform_connector_response import CreateAIPlatformConnectorResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsAIPlatformsApi(api_client) + organization_id = 'organization_id_example' # str | + create_ai_platform_connector_request_inner = [{"name":"My CreateAIPlatformConnectorRequest","type":"BEDROCK","config":{"name":"My BEDROCKAuthConfig","access-key":"AKIAIOSFODNN7EXAMPLE","key":"key_example_123456","region":"us-east-1"}}] # List[CreateAIPlatformConnectorRequestInner] | + + try: + # Create a new AI platform connector + api_response = api_instance.create_ai_platform_connector(organization_id, create_ai_platform_connector_request_inner) + print("The response of ConnectorsAIPlatformsApi->create_ai_platform_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsAIPlatformsApi->create_ai_platform_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + **create_ai_platform_connector_request_inner** | [**List[CreateAIPlatformConnectorRequestInner]**](CreateAIPlatformConnectorRequestInner.md)| | + +### Return type + +[**CreateAIPlatformConnectorResponse**](CreateAIPlatformConnectorResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Connector successfully created | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_ai_platform** +> DeleteAIPlatformConnectorResponse delete_ai_platform(organization, aiplatform_id) + +Delete an AI platform connector + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.delete_ai_platform_connector_response import DeleteAIPlatformConnectorResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsAIPlatformsApi(api_client) + organization = 'organization_example' # str | + aiplatform_id = 'aiplatform_id_example' # str | + + try: + # Delete an AI platform connector + api_response = api_instance.delete_ai_platform(organization, aiplatform_id) + print("The response of ConnectorsAIPlatformsApi->delete_ai_platform:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsAIPlatformsApi->delete_ai_platform: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **aiplatform_id** | **str**| | + +### Return type + +[**DeleteAIPlatformConnectorResponse**](DeleteAIPlatformConnectorResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | AI Platform connector successfully deleted | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_ai_platform_connector** +> AIPlatform get_ai_platform_connector(organization, aiplatform_id) + +Get an AI platform connector + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.ai_platform import AIPlatform +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsAIPlatformsApi(api_client) + organization = 'organization_example' # str | + aiplatform_id = 'aiplatform_id_example' # str | + + try: + # Get an AI platform connector + api_response = api_instance.get_ai_platform_connector(organization, aiplatform_id) + print("The response of ConnectorsAIPlatformsApi->get_ai_platform_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsAIPlatformsApi->get_ai_platform_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **aiplatform_id** | **str**| | + +### Return type + +[**AIPlatform**](AIPlatform.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Get an AI platform connector | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_ai_platform_connectors** +> GetAIPlatformConnectors200Response get_ai_platform_connectors(organization_id) + +Get all existing AI Platform connectors + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.get_ai_platform_connectors200_response import GetAIPlatformConnectors200Response +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsAIPlatformsApi(api_client) + organization_id = 'organization_id_example' # str | + + try: + # Get all existing AI Platform connectors + api_response = api_instance.get_ai_platform_connectors(organization_id) + print("The response of ConnectorsAIPlatformsApi->get_ai_platform_connectors:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsAIPlatformsApi->get_ai_platform_connectors: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + +### Return type + +[**GetAIPlatformConnectors200Response**](GetAIPlatformConnectors200Response.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Get all existing AI Platform connectors | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **update_ai_platform_connector** +> UpdateAIPlatformConnectorResponse update_ai_platform_connector(organization, aiplatform_id, update_aiplatform_connector_request) + +Update an AI Platform connector + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.update_ai_platform_connector_response import UpdateAIPlatformConnectorResponse +from vectorize_client.models.update_aiplatform_connector_request import UpdateAiplatformConnectorRequest +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsAIPlatformsApi(api_client) + organization = 'organization_example' # str | + aiplatform_id = 'aiplatform_id_example' # str | + update_aiplatform_connector_request = vectorize_client.UpdateAiplatformConnectorRequest() # UpdateAiplatformConnectorRequest | + + try: + # Update an AI Platform connector + api_response = api_instance.update_ai_platform_connector(organization, aiplatform_id, update_aiplatform_connector_request) + print("The response of ConnectorsAIPlatformsApi->update_ai_platform_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsAIPlatformsApi->update_ai_platform_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **aiplatform_id** | **str**| | + **update_aiplatform_connector_request** | [**UpdateAiplatformConnectorRequest**](UpdateAiplatformConnectorRequest.md)| | + +### Return type + +[**UpdateAIPlatformConnectorResponse**](UpdateAIPlatformConnectorResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | AI Platform connector successfully updated | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/docs/ConnectorsDestinationConnectorsApi.md b/docs/ConnectorsDestinationConnectorsApi.md new file mode 100644 index 0000000..da227fa --- /dev/null +++ b/docs/ConnectorsDestinationConnectorsApi.md @@ -0,0 +1,432 @@ +# vectorize_client.ConnectorsDestinationConnectorsApi + +All URIs are relative to *https://api.vectorize.io/v1* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**create_destination_connector**](ConnectorsDestinationConnectorsApi.md#create_destination_connector) | **POST** /org/{organizationId}/connectors/destinations | Create a new destination connector +[**delete_destination_connector**](ConnectorsDestinationConnectorsApi.md#delete_destination_connector) | **DELETE** /org/{organizationId}/connectors/destinations/{destinationConnectorId} | Delete a destination connector +[**get_destination_connector**](ConnectorsDestinationConnectorsApi.md#get_destination_connector) | **GET** /org/{organizationId}/connectors/destinations/{destinationConnectorId} | Get a destination connector +[**get_destination_connectors**](ConnectorsDestinationConnectorsApi.md#get_destination_connectors) | **GET** /org/{organizationId}/connectors/destinations | Get all existing destination connectors +[**update_destination_connector**](ConnectorsDestinationConnectorsApi.md#update_destination_connector) | **PATCH** /org/{organizationId}/connectors/destinations/{destinationConnectorId} | Update a destination connector + + +# **create_destination_connector** +> CreateDestinationConnectorResponse create_destination_connector(organization_id, create_destination_connector_request_inner) + +Create a new destination connector + +Creates a new destination connector for data storage. The specific configuration fields required depend on the connector type selected. + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.create_destination_connector_request_inner import CreateDestinationConnectorRequestInner +from vectorize_client.models.create_destination_connector_response import CreateDestinationConnectorResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsDestinationConnectorsApi(api_client) + organization_id = 'organization_id_example' # str | + create_destination_connector_request_inner = [{"name":"My CreateDestinationConnectorRequest","type":"CAPELLA","config":{"bucket":"example-bucket","scope":"example-scope","collection":"example-collection","index":"example-index"}}] # List[CreateDestinationConnectorRequestInner] | + + try: + # Create a new destination connector + api_response = api_instance.create_destination_connector(organization_id, create_destination_connector_request_inner) + print("The response of ConnectorsDestinationConnectorsApi->create_destination_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsDestinationConnectorsApi->create_destination_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + **create_destination_connector_request_inner** | [**List[CreateDestinationConnectorRequestInner]**](CreateDestinationConnectorRequestInner.md)| | + +### Return type + +[**CreateDestinationConnectorResponse**](CreateDestinationConnectorResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Connector successfully created | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_destination_connector** +> DeleteDestinationConnectorResponse delete_destination_connector(organization, destination_connector_id) + +Delete a destination connector + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.delete_destination_connector_response import DeleteDestinationConnectorResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsDestinationConnectorsApi(api_client) + organization = 'organization_example' # str | + destination_connector_id = 'destination_connector_id_example' # str | + + try: + # Delete a destination connector + api_response = api_instance.delete_destination_connector(organization, destination_connector_id) + print("The response of ConnectorsDestinationConnectorsApi->delete_destination_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsDestinationConnectorsApi->delete_destination_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **destination_connector_id** | **str**| | + +### Return type + +[**DeleteDestinationConnectorResponse**](DeleteDestinationConnectorResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Destination connector successfully deleted | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_destination_connector** +> DestinationConnector get_destination_connector(organization, destination_connector_id) + +Get a destination connector + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.destination_connector import DestinationConnector +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsDestinationConnectorsApi(api_client) + organization = 'organization_example' # str | + destination_connector_id = 'destination_connector_id_example' # str | + + try: + # Get a destination connector + api_response = api_instance.get_destination_connector(organization, destination_connector_id) + print("The response of ConnectorsDestinationConnectorsApi->get_destination_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsDestinationConnectorsApi->get_destination_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **destination_connector_id** | **str**| | + +### Return type + +[**DestinationConnector**](DestinationConnector.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Get a destination connector | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_destination_connectors** +> GetDestinationConnectors200Response get_destination_connectors(organization_id) + +Get all existing destination connectors + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.get_destination_connectors200_response import GetDestinationConnectors200Response +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsDestinationConnectorsApi(api_client) + organization_id = 'organization_id_example' # str | + + try: + # Get all existing destination connectors + api_response = api_instance.get_destination_connectors(organization_id) + print("The response of ConnectorsDestinationConnectorsApi->get_destination_connectors:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsDestinationConnectorsApi->get_destination_connectors: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + +### Return type + +[**GetDestinationConnectors200Response**](GetDestinationConnectors200Response.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Get all destination connectors | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **update_destination_connector** +> UpdateDestinationConnectorResponse update_destination_connector(organization, destination_connector_id, update_destination_connector_request) + +Update a destination connector + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.update_destination_connector_request import UpdateDestinationConnectorRequest +from vectorize_client.models.update_destination_connector_response import UpdateDestinationConnectorResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsDestinationConnectorsApi(api_client) + organization = 'organization_example' # str | + destination_connector_id = 'destination_connector_id_example' # str | + update_destination_connector_request = vectorize_client.UpdateDestinationConnectorRequest() # UpdateDestinationConnectorRequest | + + try: + # Update a destination connector + api_response = api_instance.update_destination_connector(organization, destination_connector_id, update_destination_connector_request) + print("The response of ConnectorsDestinationConnectorsApi->update_destination_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsDestinationConnectorsApi->update_destination_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **destination_connector_id** | **str**| | + **update_destination_connector_request** | [**UpdateDestinationConnectorRequest**](UpdateDestinationConnectorRequest.md)| | + +### Return type + +[**UpdateDestinationConnectorResponse**](UpdateDestinationConnectorResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Destination connector successfully updated | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/docs/ConnectorsSourceConnectorsApi.md b/docs/ConnectorsSourceConnectorsApi.md new file mode 100644 index 0000000..2358517 --- /dev/null +++ b/docs/ConnectorsSourceConnectorsApi.md @@ -0,0 +1,693 @@ +# vectorize_client.ConnectorsSourceConnectorsApi + +All URIs are relative to *https://api.vectorize.io/v1* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**add_user_to_source_connector**](ConnectorsSourceConnectorsApi.md#add_user_to_source_connector) | **POST** /org/{organizationId}/connectors/sources/{sourceConnectorId}/users | Add a user to a source connector +[**create_source_connector**](ConnectorsSourceConnectorsApi.md#create_source_connector) | **POST** /org/{organizationId}/connectors/sources | Create a new source connector +[**delete_source_connector**](ConnectorsSourceConnectorsApi.md#delete_source_connector) | **DELETE** /org/{organizationId}/connectors/sources/{sourceConnectorId} | Delete a source connector +[**delete_user_from_source_connector**](ConnectorsSourceConnectorsApi.md#delete_user_from_source_connector) | **DELETE** /org/{organizationId}/connectors/sources/{sourceConnectorId}/users | Delete a source connector user +[**get_source_connector**](ConnectorsSourceConnectorsApi.md#get_source_connector) | **GET** /org/{organizationId}/connectors/sources/{sourceConnectorId} | Get a source connector +[**get_source_connectors**](ConnectorsSourceConnectorsApi.md#get_source_connectors) | **GET** /org/{organizationId}/connectors/sources | Get all existing source connectors +[**update_source_connector**](ConnectorsSourceConnectorsApi.md#update_source_connector) | **PATCH** /org/{organizationId}/connectors/sources/{sourceConnectorId} | Update a source connector +[**update_user_in_source_connector**](ConnectorsSourceConnectorsApi.md#update_user_in_source_connector) | **PATCH** /org/{organizationId}/connectors/sources/{sourceConnectorId}/users | Update a source connector user + + +# **add_user_to_source_connector** +> AddUserFromSourceConnectorResponse add_user_to_source_connector(organization, source_connector_id, add_user_to_source_connector_request) + +Add a user to a source connector + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.add_user_from_source_connector_response import AddUserFromSourceConnectorResponse +from vectorize_client.models.add_user_to_source_connector_request import AddUserToSourceConnectorRequest +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsSourceConnectorsApi(api_client) + organization = 'organization_example' # str | + source_connector_id = 'source_connector_id_example' # str | + add_user_to_source_connector_request = {"userId":"29cc613c-dcb8-429e-88fe-be19dbd8b312","selectedFiles":{},"refreshToken":"refresh_token_example_123456","accessToken":"access_token_example_123456"} # AddUserToSourceConnectorRequest | + + try: + # Add a user to a source connector + api_response = api_instance.add_user_to_source_connector(organization, source_connector_id, add_user_to_source_connector_request) + print("The response of ConnectorsSourceConnectorsApi->add_user_to_source_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsSourceConnectorsApi->add_user_to_source_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **source_connector_id** | **str**| | + **add_user_to_source_connector_request** | [**AddUserToSourceConnectorRequest**](AddUserToSourceConnectorRequest.md)| | + +### Return type + +[**AddUserFromSourceConnectorResponse**](AddUserFromSourceConnectorResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | User successfully added to the source connector | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_source_connector** +> CreateSourceConnectorResponse create_source_connector(organization_id, create_source_connector_request_inner) + +Create a new source connector + +Creates a new source connector for data ingestion. The specific configuration fields required depend on the connector type selected. + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.create_source_connector_request_inner import CreateSourceConnectorRequestInner +from vectorize_client.models.create_source_connector_response import CreateSourceConnectorResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsSourceConnectorsApi(api_client) + organization_id = 'organization_id_example' # str | + create_source_connector_request_inner = [{"name":"My CreateSourceConnectorRequest","type":"AWS_S3","config":{"file-extensions":"pdf","idle-time":300,"recursive":true,"path-prefix":"/example/path","path-metadata-regex":"/example/path","path-regex-group-names":"/example/path"}}] # List[CreateSourceConnectorRequestInner] | + + try: + # Create a new source connector + api_response = api_instance.create_source_connector(organization_id, create_source_connector_request_inner) + print("The response of ConnectorsSourceConnectorsApi->create_source_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsSourceConnectorsApi->create_source_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + **create_source_connector_request_inner** | [**List[CreateSourceConnectorRequestInner]**](CreateSourceConnectorRequestInner.md)| | + +### Return type + +[**CreateSourceConnectorResponse**](CreateSourceConnectorResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Connector successfully created | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_source_connector** +> DeleteSourceConnectorResponse delete_source_connector(organization, source_connector_id) + +Delete a source connector + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.delete_source_connector_response import DeleteSourceConnectorResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsSourceConnectorsApi(api_client) + organization = 'organization_example' # str | + source_connector_id = 'source_connector_id_example' # str | + + try: + # Delete a source connector + api_response = api_instance.delete_source_connector(organization, source_connector_id) + print("The response of ConnectorsSourceConnectorsApi->delete_source_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsSourceConnectorsApi->delete_source_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **source_connector_id** | **str**| | + +### Return type + +[**DeleteSourceConnectorResponse**](DeleteSourceConnectorResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Source connector successfully deleted | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_user_from_source_connector** +> RemoveUserFromSourceConnectorResponse delete_user_from_source_connector(organization, source_connector_id, remove_user_from_source_connector_request) + +Delete a source connector user + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.remove_user_from_source_connector_request import RemoveUserFromSourceConnectorRequest +from vectorize_client.models.remove_user_from_source_connector_response import RemoveUserFromSourceConnectorResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsSourceConnectorsApi(api_client) + organization = 'organization_example' # str | + source_connector_id = 'source_connector_id_example' # str | + remove_user_from_source_connector_request = {"userId":"a3703b11-2eba-45e3-87cd-7e5e7c076e3a"} # RemoveUserFromSourceConnectorRequest | + + try: + # Delete a source connector user + api_response = api_instance.delete_user_from_source_connector(organization, source_connector_id, remove_user_from_source_connector_request) + print("The response of ConnectorsSourceConnectorsApi->delete_user_from_source_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsSourceConnectorsApi->delete_user_from_source_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **source_connector_id** | **str**| | + **remove_user_from_source_connector_request** | [**RemoveUserFromSourceConnectorRequest**](RemoveUserFromSourceConnectorRequest.md)| | + +### Return type + +[**RemoveUserFromSourceConnectorResponse**](RemoveUserFromSourceConnectorResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | User successfully removed from the source connector | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_source_connector** +> SourceConnector get_source_connector(organization, source_connector_id) + +Get a source connector + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.source_connector import SourceConnector +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsSourceConnectorsApi(api_client) + organization = 'organization_example' # str | + source_connector_id = 'source_connector_id_example' # str | + + try: + # Get a source connector + api_response = api_instance.get_source_connector(organization, source_connector_id) + print("The response of ConnectorsSourceConnectorsApi->get_source_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsSourceConnectorsApi->get_source_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **source_connector_id** | **str**| | + +### Return type + +[**SourceConnector**](SourceConnector.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Get a source connector | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_source_connectors** +> GetSourceConnectors200Response get_source_connectors(organization_id) + +Get all existing source connectors + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.get_source_connectors200_response import GetSourceConnectors200Response +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsSourceConnectorsApi(api_client) + organization_id = 'organization_id_example' # str | + + try: + # Get all existing source connectors + api_response = api_instance.get_source_connectors(organization_id) + print("The response of ConnectorsSourceConnectorsApi->get_source_connectors:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsSourceConnectorsApi->get_source_connectors: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + +### Return type + +[**GetSourceConnectors200Response**](GetSourceConnectors200Response.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Get all source connectors | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **update_source_connector** +> UpdateSourceConnectorResponse update_source_connector(organization, source_connector_id, update_source_connector_request) + +Update a source connector + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.update_source_connector_request import UpdateSourceConnectorRequest +from vectorize_client.models.update_source_connector_response import UpdateSourceConnectorResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsSourceConnectorsApi(api_client) + organization = 'organization_example' # str | + source_connector_id = 'source_connector_id_example' # str | + update_source_connector_request = vectorize_client.UpdateSourceConnectorRequest() # UpdateSourceConnectorRequest | + + try: + # Update a source connector + api_response = api_instance.update_source_connector(organization, source_connector_id, update_source_connector_request) + print("The response of ConnectorsSourceConnectorsApi->update_source_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsSourceConnectorsApi->update_source_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **source_connector_id** | **str**| | + **update_source_connector_request** | [**UpdateSourceConnectorRequest**](UpdateSourceConnectorRequest.md)| | + +### Return type + +[**UpdateSourceConnectorResponse**](UpdateSourceConnectorResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Source connector successfully updated | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **update_user_in_source_connector** +> UpdateUserInSourceConnectorResponse update_user_in_source_connector(organization, source_connector_id, update_user_in_source_connector_request) + +Update a source connector user + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.update_user_in_source_connector_request import UpdateUserInSourceConnectorRequest +from vectorize_client.models.update_user_in_source_connector_response import UpdateUserInSourceConnectorResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ConnectorsSourceConnectorsApi(api_client) + organization = 'organization_example' # str | + source_connector_id = 'source_connector_id_example' # str | + update_user_in_source_connector_request = {"userId":"1dda2405-5b9d-403a-bdf7-01a78cb796da","selectedFiles":{},"refreshToken":"refresh_token_example_123456","accessToken":"access_token_example_123456"} # UpdateUserInSourceConnectorRequest | + + try: + # Update a source connector user + api_response = api_instance.update_user_in_source_connector(organization, source_connector_id, update_user_in_source_connector_request) + print("The response of ConnectorsSourceConnectorsApi->update_user_in_source_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ConnectorsSourceConnectorsApi->update_user_in_source_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **source_connector_id** | **str**| | + **update_user_in_source_connector_request** | [**UpdateUserInSourceConnectorRequest**](UpdateUserInSourceConnectorRequest.md)| | + +### Return type + +[**UpdateUserInSourceConnectorResponse**](UpdateUserInSourceConnectorResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | User successfully updated in the source connector | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/docs/CreateAIPlatformConnector.md b/docs/CreateAIPlatformConnector.md new file mode 100644 index 0000000..aa2b666 --- /dev/null +++ b/docs/CreateAIPlatformConnector.md @@ -0,0 +1,31 @@ +# CreateAIPlatformConnector + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | +**type** | [**AIPlatformType**](AIPlatformType.md) | | +**config** | **Dict[str, Optional[object]]** | | [optional] + +## Example + +```python +from vectorize_client.models.create_ai_platform_connector import CreateAIPlatformConnector + +# TODO update the JSON string below +json = "{}" +# create an instance of CreateAIPlatformConnector from a JSON string +create_ai_platform_connector_instance = CreateAIPlatformConnector.from_json(json) +# print the JSON string representation of the object +print(CreateAIPlatformConnector.to_json()) + +# convert the object into a dict +create_ai_platform_connector_dict = create_ai_platform_connector_instance.to_dict() +# create an instance of CreateAIPlatformConnector from a dict +create_ai_platform_connector_from_dict = CreateAIPlatformConnector.from_dict(create_ai_platform_connector_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CreateAIPlatformConnectorRequestInner.md b/docs/CreateAIPlatformConnectorRequestInner.md new file mode 100644 index 0000000..d10a800 --- /dev/null +++ b/docs/CreateAIPlatformConnectorRequestInner.md @@ -0,0 +1,31 @@ +# CreateAIPlatformConnectorRequestInner + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"BEDROCK\") | +**config** | [**VOYAGEAuthConfig**](VOYAGEAuthConfig.md) | | + +## Example + +```python +from vectorize_client.models.create_ai_platform_connector_request_inner import CreateAIPlatformConnectorRequestInner + +# TODO update the JSON string below +json = "{}" +# create an instance of CreateAIPlatformConnectorRequestInner from a JSON string +create_ai_platform_connector_request_inner_instance = CreateAIPlatformConnectorRequestInner.from_json(json) +# print the JSON string representation of the object +print(CreateAIPlatformConnectorRequestInner.to_json()) + +# convert the object into a dict +create_ai_platform_connector_request_inner_dict = create_ai_platform_connector_request_inner_instance.to_dict() +# create an instance of CreateAIPlatformConnectorRequestInner from a dict +create_ai_platform_connector_request_inner_from_dict = CreateAIPlatformConnectorRequestInner.from_dict(create_ai_platform_connector_request_inner_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CreateAIPlatformConnectorResponse.md b/docs/CreateAIPlatformConnectorResponse.md new file mode 100644 index 0000000..7727f5e --- /dev/null +++ b/docs/CreateAIPlatformConnectorResponse.md @@ -0,0 +1,30 @@ +# CreateAIPlatformConnectorResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | +**connectors** | [**List[CreatedAIPlatformConnector]**](CreatedAIPlatformConnector.md) | | + +## Example + +```python +from vectorize_client.models.create_ai_platform_connector_response import CreateAIPlatformConnectorResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of CreateAIPlatformConnectorResponse from a JSON string +create_ai_platform_connector_response_instance = CreateAIPlatformConnectorResponse.from_json(json) +# print the JSON string representation of the object +print(CreateAIPlatformConnectorResponse.to_json()) + +# convert the object into a dict +create_ai_platform_connector_response_dict = create_ai_platform_connector_response_instance.to_dict() +# create an instance of CreateAIPlatformConnectorResponse from a dict +create_ai_platform_connector_response_from_dict = CreateAIPlatformConnectorResponse.from_dict(create_ai_platform_connector_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CreateDestinationConnector.md b/docs/CreateDestinationConnector.md new file mode 100644 index 0000000..0cf3211 --- /dev/null +++ b/docs/CreateDestinationConnector.md @@ -0,0 +1,31 @@ +# CreateDestinationConnector + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | +**type** | [**DestinationConnectorType**](DestinationConnectorType.md) | | +**config** | **Dict[str, Optional[object]]** | | [optional] + +## Example + +```python +from vectorize_client.models.create_destination_connector import CreateDestinationConnector + +# TODO update the JSON string below +json = "{}" +# create an instance of CreateDestinationConnector from a JSON string +create_destination_connector_instance = CreateDestinationConnector.from_json(json) +# print the JSON string representation of the object +print(CreateDestinationConnector.to_json()) + +# convert the object into a dict +create_destination_connector_dict = create_destination_connector_instance.to_dict() +# create an instance of CreateDestinationConnector from a dict +create_destination_connector_from_dict = CreateDestinationConnector.from_dict(create_destination_connector_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CreateDestinationConnectorRequestInner.md b/docs/CreateDestinationConnectorRequestInner.md new file mode 100644 index 0000000..86590cb --- /dev/null +++ b/docs/CreateDestinationConnectorRequestInner.md @@ -0,0 +1,31 @@ +# CreateDestinationConnectorRequestInner + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"CAPELLA\") | +**config** | [**TURBOPUFFERConfig**](TURBOPUFFERConfig.md) | | + +## Example + +```python +from vectorize_client.models.create_destination_connector_request_inner import CreateDestinationConnectorRequestInner + +# TODO update the JSON string below +json = "{}" +# create an instance of CreateDestinationConnectorRequestInner from a JSON string +create_destination_connector_request_inner_instance = CreateDestinationConnectorRequestInner.from_json(json) +# print the JSON string representation of the object +print(CreateDestinationConnectorRequestInner.to_json()) + +# convert the object into a dict +create_destination_connector_request_inner_dict = create_destination_connector_request_inner_instance.to_dict() +# create an instance of CreateDestinationConnectorRequestInner from a dict +create_destination_connector_request_inner_from_dict = CreateDestinationConnectorRequestInner.from_dict(create_destination_connector_request_inner_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CreateDestinationConnectorResponse.md b/docs/CreateDestinationConnectorResponse.md new file mode 100644 index 0000000..67140c9 --- /dev/null +++ b/docs/CreateDestinationConnectorResponse.md @@ -0,0 +1,30 @@ +# CreateDestinationConnectorResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | +**connectors** | [**List[CreatedDestinationConnector]**](CreatedDestinationConnector.md) | | + +## Example + +```python +from vectorize_client.models.create_destination_connector_response import CreateDestinationConnectorResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of CreateDestinationConnectorResponse from a JSON string +create_destination_connector_response_instance = CreateDestinationConnectorResponse.from_json(json) +# print the JSON string representation of the object +print(CreateDestinationConnectorResponse.to_json()) + +# convert the object into a dict +create_destination_connector_response_dict = create_destination_connector_response_instance.to_dict() +# create an instance of CreateDestinationConnectorResponse from a dict +create_destination_connector_response_from_dict = CreateDestinationConnectorResponse.from_dict(create_destination_connector_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CreatePipelineResponse.md b/docs/CreatePipelineResponse.md new file mode 100644 index 0000000..752d37f --- /dev/null +++ b/docs/CreatePipelineResponse.md @@ -0,0 +1,30 @@ +# CreatePipelineResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | +**data** | [**CreatePipelineResponseData**](CreatePipelineResponseData.md) | | + +## Example + +```python +from vectorize_client.models.create_pipeline_response import CreatePipelineResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of CreatePipelineResponse from a JSON string +create_pipeline_response_instance = CreatePipelineResponse.from_json(json) +# print the JSON string representation of the object +print(CreatePipelineResponse.to_json()) + +# convert the object into a dict +create_pipeline_response_dict = create_pipeline_response_instance.to_dict() +# create an instance of CreatePipelineResponse from a dict +create_pipeline_response_from_dict = CreatePipelineResponse.from_dict(create_pipeline_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CreatePipelineResponseData.md b/docs/CreatePipelineResponseData.md new file mode 100644 index 0000000..7e86a7a --- /dev/null +++ b/docs/CreatePipelineResponseData.md @@ -0,0 +1,29 @@ +# CreatePipelineResponseData + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | + +## Example + +```python +from vectorize_client.models.create_pipeline_response_data import CreatePipelineResponseData + +# TODO update the JSON string below +json = "{}" +# create an instance of CreatePipelineResponseData from a JSON string +create_pipeline_response_data_instance = CreatePipelineResponseData.from_json(json) +# print the JSON string representation of the object +print(CreatePipelineResponseData.to_json()) + +# convert the object into a dict +create_pipeline_response_data_dict = create_pipeline_response_data_instance.to_dict() +# create an instance of CreatePipelineResponseData from a dict +create_pipeline_response_data_from_dict = CreatePipelineResponseData.from_dict(create_pipeline_response_data_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CreateSourceConnector.md b/docs/CreateSourceConnector.md new file mode 100644 index 0000000..c8674f1 --- /dev/null +++ b/docs/CreateSourceConnector.md @@ -0,0 +1,31 @@ +# CreateSourceConnector + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | +**type** | [**SourceConnectorType**](SourceConnectorType.md) | | +**config** | **Dict[str, Optional[object]]** | | [optional] + +## Example + +```python +from vectorize_client.models.create_source_connector import CreateSourceConnector + +# TODO update the JSON string below +json = "{}" +# create an instance of CreateSourceConnector from a JSON string +create_source_connector_instance = CreateSourceConnector.from_json(json) +# print the JSON string representation of the object +print(CreateSourceConnector.to_json()) + +# convert the object into a dict +create_source_connector_dict = create_source_connector_instance.to_dict() +# create an instance of CreateSourceConnector from a dict +create_source_connector_from_dict = CreateSourceConnector.from_dict(create_source_connector_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CreateSourceConnectorRequestInner.md b/docs/CreateSourceConnectorRequestInner.md new file mode 100644 index 0000000..dcea7d4 --- /dev/null +++ b/docs/CreateSourceConnectorRequestInner.md @@ -0,0 +1,31 @@ +# CreateSourceConnectorRequestInner + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"AWS_S3\") | +**config** | [**FIREFLIESConfig**](FIREFLIESConfig.md) | | + +## Example + +```python +from vectorize_client.models.create_source_connector_request_inner import CreateSourceConnectorRequestInner + +# TODO update the JSON string below +json = "{}" +# create an instance of CreateSourceConnectorRequestInner from a JSON string +create_source_connector_request_inner_instance = CreateSourceConnectorRequestInner.from_json(json) +# print the JSON string representation of the object +print(CreateSourceConnectorRequestInner.to_json()) + +# convert the object into a dict +create_source_connector_request_inner_dict = create_source_connector_request_inner_instance.to_dict() +# create an instance of CreateSourceConnectorRequestInner from a dict +create_source_connector_request_inner_from_dict = CreateSourceConnectorRequestInner.from_dict(create_source_connector_request_inner_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CreateSourceConnectorResponse.md b/docs/CreateSourceConnectorResponse.md new file mode 100644 index 0000000..7124435 --- /dev/null +++ b/docs/CreateSourceConnectorResponse.md @@ -0,0 +1,30 @@ +# CreateSourceConnectorResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | +**connectors** | [**List[CreatedSourceConnector]**](CreatedSourceConnector.md) | | + +## Example + +```python +from vectorize_client.models.create_source_connector_response import CreateSourceConnectorResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of CreateSourceConnectorResponse from a JSON string +create_source_connector_response_instance = CreateSourceConnectorResponse.from_json(json) +# print the JSON string representation of the object +print(CreateSourceConnectorResponse.to_json()) + +# convert the object into a dict +create_source_connector_response_dict = create_source_connector_response_instance.to_dict() +# create an instance of CreateSourceConnectorResponse from a dict +create_source_connector_response_from_dict = CreateSourceConnectorResponse.from_dict(create_source_connector_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CreatedAIPlatformConnector.md b/docs/CreatedAIPlatformConnector.md new file mode 100644 index 0000000..c6131de --- /dev/null +++ b/docs/CreatedAIPlatformConnector.md @@ -0,0 +1,30 @@ +# CreatedAIPlatformConnector + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | +**id** | **str** | | + +## Example + +```python +from vectorize_client.models.created_ai_platform_connector import CreatedAIPlatformConnector + +# TODO update the JSON string below +json = "{}" +# create an instance of CreatedAIPlatformConnector from a JSON string +created_ai_platform_connector_instance = CreatedAIPlatformConnector.from_json(json) +# print the JSON string representation of the object +print(CreatedAIPlatformConnector.to_json()) + +# convert the object into a dict +created_ai_platform_connector_dict = created_ai_platform_connector_instance.to_dict() +# create an instance of CreatedAIPlatformConnector from a dict +created_ai_platform_connector_from_dict = CreatedAIPlatformConnector.from_dict(created_ai_platform_connector_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CreatedDestinationConnector.md b/docs/CreatedDestinationConnector.md new file mode 100644 index 0000000..236a87a --- /dev/null +++ b/docs/CreatedDestinationConnector.md @@ -0,0 +1,30 @@ +# CreatedDestinationConnector + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | +**id** | **str** | | + +## Example + +```python +from vectorize_client.models.created_destination_connector import CreatedDestinationConnector + +# TODO update the JSON string below +json = "{}" +# create an instance of CreatedDestinationConnector from a JSON string +created_destination_connector_instance = CreatedDestinationConnector.from_json(json) +# print the JSON string representation of the object +print(CreatedDestinationConnector.to_json()) + +# convert the object into a dict +created_destination_connector_dict = created_destination_connector_instance.to_dict() +# create an instance of CreatedDestinationConnector from a dict +created_destination_connector_from_dict = CreatedDestinationConnector.from_dict(created_destination_connector_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/CreatedSourceConnector.md b/docs/CreatedSourceConnector.md new file mode 100644 index 0000000..471753b --- /dev/null +++ b/docs/CreatedSourceConnector.md @@ -0,0 +1,30 @@ +# CreatedSourceConnector + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | +**id** | **str** | | + +## Example + +```python +from vectorize_client.models.created_source_connector import CreatedSourceConnector + +# TODO update the JSON string below +json = "{}" +# create an instance of CreatedSourceConnector from a JSON string +created_source_connector_instance = CreatedSourceConnector.from_json(json) +# print the JSON string representation of the object +print(CreatedSourceConnector.to_json()) + +# convert the object into a dict +created_source_connector_dict = created_source_connector_instance.to_dict() +# create an instance of CreatedSourceConnector from a dict +created_source_connector_from_dict = CreatedSourceConnector.from_dict(created_source_connector_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DATASTAXAuthConfig.md b/docs/DATASTAXAuthConfig.md new file mode 100644 index 0000000..8dcd8e3 --- /dev/null +++ b/docs/DATASTAXAuthConfig.md @@ -0,0 +1,32 @@ +# DATASTAXAuthConfig + +Authentication configuration for DataStax Astra + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for your DataStax integration | +**endpoint_secret** | **str** | API Endpoint. Example: Enter your API endpoint | +**token** | **str** | Application Token. Example: Enter your application token | + +## Example + +```python +from vectorize_client.models.datastax_auth_config import DATASTAXAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of DATASTAXAuthConfig from a JSON string +datastax_auth_config_instance = DATASTAXAuthConfig.from_json(json) +# print the JSON string representation of the object +print(DATASTAXAuthConfig.to_json()) + +# convert the object into a dict +datastax_auth_config_dict = datastax_auth_config_instance.to_dict() +# create an instance of DATASTAXAuthConfig from a dict +datastax_auth_config_from_dict = DATASTAXAuthConfig.from_dict(datastax_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DATASTAXConfig.md b/docs/DATASTAXConfig.md new file mode 100644 index 0000000..d6c8a6f --- /dev/null +++ b/docs/DATASTAXConfig.md @@ -0,0 +1,30 @@ +# DATASTAXConfig + +Configuration for DataStax Astra connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**collection** | **str** | Collection Name. Example: Enter collection name | + +## Example + +```python +from vectorize_client.models.datastax_config import DATASTAXConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of DATASTAXConfig from a JSON string +datastax_config_instance = DATASTAXConfig.from_json(json) +# print the JSON string representation of the object +print(DATASTAXConfig.to_json()) + +# convert the object into a dict +datastax_config_dict = datastax_config_instance.to_dict() +# create an instance of DATASTAXConfig from a dict +datastax_config_from_dict = DATASTAXConfig.from_dict(datastax_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DISCORDAuthConfig.md b/docs/DISCORDAuthConfig.md new file mode 100644 index 0000000..b41aea0 --- /dev/null +++ b/docs/DISCORDAuthConfig.md @@ -0,0 +1,33 @@ +# DISCORDAuthConfig + +Authentication configuration for Discord + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**server_id** | **str** | Server ID. Example: Enter Server ID | +**bot_token** | **str** | Bot token. Example: Enter Token | +**channel_ids** | **str** | Channel ID. Example: Enter channel ID | + +## Example + +```python +from vectorize_client.models.discord_auth_config import DISCORDAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of DISCORDAuthConfig from a JSON string +discord_auth_config_instance = DISCORDAuthConfig.from_json(json) +# print the JSON string representation of the object +print(DISCORDAuthConfig.to_json()) + +# convert the object into a dict +discord_auth_config_dict = discord_auth_config_instance.to_dict() +# create an instance of DISCORDAuthConfig from a dict +discord_auth_config_from_dict = DISCORDAuthConfig.from_dict(discord_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DISCORDConfig.md b/docs/DISCORDConfig.md new file mode 100644 index 0000000..6692eb3 --- /dev/null +++ b/docs/DISCORDConfig.md @@ -0,0 +1,36 @@ +# DISCORDConfig + +Configuration for Discord connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**emoji** | **str** | Emoji Filter. Example: Enter custom emoji filter name | [optional] +**author** | **str** | Author Filter. Example: Enter author name | [optional] +**ignore_author** | **str** | Ignore Author Filter. Example: Enter ignore author name | [optional] +**limit** | **float** | Limit. Example: Enter limit | [optional] [default to 10000] +**thread_message_inclusion** | **str** | Thread Message Inclusion | [optional] [default to 'ALL'] +**filter_logic** | **str** | Filter Logic | [optional] [default to 'AND'] +**thread_message_mode** | **str** | Thread Message Mode | [optional] [default to 'CONCATENATE'] + +## Example + +```python +from vectorize_client.models.discord_config import DISCORDConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of DISCORDConfig from a JSON string +discord_config_instance = DISCORDConfig.from_json(json) +# print the JSON string representation of the object +print(DISCORDConfig.to_json()) + +# convert the object into a dict +discord_config_dict = discord_config_instance.to_dict() +# create an instance of DISCORDConfig from a dict +discord_config_from_dict = DISCORDConfig.from_dict(discord_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DROPBOXAuthConfig.md b/docs/DROPBOXAuthConfig.md new file mode 100644 index 0000000..cc916d9 --- /dev/null +++ b/docs/DROPBOXAuthConfig.md @@ -0,0 +1,31 @@ +# DROPBOXAuthConfig + +Authentication configuration for Dropbox (Legacy) + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**refresh_token** | **str** | Connect Dropbox to Vectorize. Example: Authorize | + +## Example + +```python +from vectorize_client.models.dropbox_auth_config import DROPBOXAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of DROPBOXAuthConfig from a JSON string +dropbox_auth_config_instance = DROPBOXAuthConfig.from_json(json) +# print the JSON string representation of the object +print(DROPBOXAuthConfig.to_json()) + +# convert the object into a dict +dropbox_auth_config_dict = dropbox_auth_config_instance.to_dict() +# create an instance of DROPBOXAuthConfig from a dict +dropbox_auth_config_from_dict = DROPBOXAuthConfig.from_dict(dropbox_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DROPBOXConfig.md b/docs/DROPBOXConfig.md new file mode 100644 index 0000000..2994d5a --- /dev/null +++ b/docs/DROPBOXConfig.md @@ -0,0 +1,30 @@ +# DROPBOXConfig + +Configuration for Dropbox (Legacy) connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**path_prefix** | **str** | Read from these folders (optional). Example: Enter Path: /exampleFolder/subFolder | [optional] + +## Example + +```python +from vectorize_client.models.dropbox_config import DROPBOXConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of DROPBOXConfig from a JSON string +dropbox_config_instance = DROPBOXConfig.from_json(json) +# print the JSON string representation of the object +print(DROPBOXConfig.to_json()) + +# convert the object into a dict +dropbox_config_dict = dropbox_config_instance.to_dict() +# create an instance of DROPBOXConfig from a dict +dropbox_config_from_dict = DROPBOXConfig.from_dict(dropbox_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DROPBOXOAUTHAuthConfig.md b/docs/DROPBOXOAUTHAuthConfig.md new file mode 100644 index 0000000..a18c009 --- /dev/null +++ b/docs/DROPBOXOAUTHAuthConfig.md @@ -0,0 +1,34 @@ +# DROPBOXOAUTHAuthConfig + +Authentication configuration for Dropbox OAuth + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**authorized_user** | **str** | Authorized User | [optional] +**selection_details** | **str** | Connect Dropbox to Vectorize. Example: Authorize | +**edited_users** | **str** | | [optional] +**reconnect_users** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.dropboxoauth_auth_config import DROPBOXOAUTHAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of DROPBOXOAUTHAuthConfig from a JSON string +dropboxoauth_auth_config_instance = DROPBOXOAUTHAuthConfig.from_json(json) +# print the JSON string representation of the object +print(DROPBOXOAUTHAuthConfig.to_json()) + +# convert the object into a dict +dropboxoauth_auth_config_dict = dropboxoauth_auth_config_instance.to_dict() +# create an instance of DROPBOXOAUTHAuthConfig from a dict +dropboxoauth_auth_config_from_dict = DROPBOXOAUTHAuthConfig.from_dict(dropboxoauth_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DROPBOXOAUTHMULTIAuthConfig.md b/docs/DROPBOXOAUTHMULTIAuthConfig.md new file mode 100644 index 0000000..caa1dfb --- /dev/null +++ b/docs/DROPBOXOAUTHMULTIAuthConfig.md @@ -0,0 +1,33 @@ +# DROPBOXOAUTHMULTIAuthConfig + +Authentication configuration for Dropbox Multi-User (Vectorize) + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**authorized_users** | **str** | Authorized Users | [optional] +**edited_users** | **str** | | [optional] +**deleted_users** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.dropboxoauthmulti_auth_config import DROPBOXOAUTHMULTIAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of DROPBOXOAUTHMULTIAuthConfig from a JSON string +dropboxoauthmulti_auth_config_instance = DROPBOXOAUTHMULTIAuthConfig.from_json(json) +# print the JSON string representation of the object +print(DROPBOXOAUTHMULTIAuthConfig.to_json()) + +# convert the object into a dict +dropboxoauthmulti_auth_config_dict = dropboxoauthmulti_auth_config_instance.to_dict() +# create an instance of DROPBOXOAUTHMULTIAuthConfig from a dict +dropboxoauthmulti_auth_config_from_dict = DROPBOXOAUTHMULTIAuthConfig.from_dict(dropboxoauthmulti_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DROPBOXOAUTHMULTICUSTOMAuthConfig.md b/docs/DROPBOXOAUTHMULTICUSTOMAuthConfig.md new file mode 100644 index 0000000..ceef340 --- /dev/null +++ b/docs/DROPBOXOAUTHMULTICUSTOMAuthConfig.md @@ -0,0 +1,35 @@ +# DROPBOXOAUTHMULTICUSTOMAuthConfig + +Authentication configuration for Dropbox Multi-User (White Label) + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**app_key** | **str** | Dropbox App Key. Example: Enter App Key | +**app_secret** | **str** | Dropbox App Secret. Example: Enter App Secret | +**authorized_users** | **str** | Authorized Users | [optional] +**edited_users** | **str** | | [optional] +**deleted_users** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.dropboxoauthmulticustom_auth_config import DROPBOXOAUTHMULTICUSTOMAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of DROPBOXOAUTHMULTICUSTOMAuthConfig from a JSON string +dropboxoauthmulticustom_auth_config_instance = DROPBOXOAUTHMULTICUSTOMAuthConfig.from_json(json) +# print the JSON string representation of the object +print(DROPBOXOAUTHMULTICUSTOMAuthConfig.to_json()) + +# convert the object into a dict +dropboxoauthmulticustom_auth_config_dict = dropboxoauthmulticustom_auth_config_instance.to_dict() +# create an instance of DROPBOXOAUTHMULTICUSTOMAuthConfig from a dict +dropboxoauthmulticustom_auth_config_from_dict = DROPBOXOAUTHMULTICUSTOMAuthConfig.from_dict(dropboxoauthmulticustom_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Datastax.md b/docs/Datastax.md new file mode 100644 index 0000000..c0d5791 --- /dev/null +++ b/docs/Datastax.md @@ -0,0 +1,31 @@ +# Datastax + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"DATASTAX\") | +**config** | [**DATASTAXConfig**](DATASTAXConfig.md) | | + +## Example + +```python +from vectorize_client.models.datastax import Datastax + +# TODO update the JSON string below +json = "{}" +# create an instance of Datastax from a JSON string +datastax_instance = Datastax.from_json(json) +# print the JSON string representation of the object +print(Datastax.to_json()) + +# convert the object into a dict +datastax_dict = datastax_instance.to_dict() +# create an instance of Datastax from a dict +datastax_from_dict = Datastax.from_dict(datastax_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Datastax1.md b/docs/Datastax1.md new file mode 100644 index 0000000..3c23f1f --- /dev/null +++ b/docs/Datastax1.md @@ -0,0 +1,29 @@ +# Datastax1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**DATASTAXConfig**](DATASTAXConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.datastax1 import Datastax1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Datastax1 from a JSON string +datastax1_instance = Datastax1.from_json(json) +# print the JSON string representation of the object +print(Datastax1.to_json()) + +# convert the object into a dict +datastax1_dict = datastax1_instance.to_dict() +# create an instance of Datastax1 from a dict +datastax1_from_dict = Datastax1.from_dict(datastax1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Datastax2.md b/docs/Datastax2.md new file mode 100644 index 0000000..a309855 --- /dev/null +++ b/docs/Datastax2.md @@ -0,0 +1,30 @@ +# Datastax2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"DATASTAX\") | + +## Example + +```python +from vectorize_client.models.datastax2 import Datastax2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Datastax2 from a JSON string +datastax2_instance = Datastax2.from_json(json) +# print the JSON string representation of the object +print(Datastax2.to_json()) + +# convert the object into a dict +datastax2_dict = datastax2_instance.to_dict() +# create an instance of Datastax2 from a dict +datastax2_from_dict = Datastax2.from_dict(datastax2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DeepResearchResult.md b/docs/DeepResearchResult.md new file mode 100644 index 0000000..860c339 --- /dev/null +++ b/docs/DeepResearchResult.md @@ -0,0 +1,32 @@ +# DeepResearchResult + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**success** | **bool** | | +**events** | **List[str]** | | [optional] +**markdown** | **str** | | [optional] +**error** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.deep_research_result import DeepResearchResult + +# TODO update the JSON string below +json = "{}" +# create an instance of DeepResearchResult from a JSON string +deep_research_result_instance = DeepResearchResult.from_json(json) +# print the JSON string representation of the object +print(DeepResearchResult.to_json()) + +# convert the object into a dict +deep_research_result_dict = deep_research_result_instance.to_dict() +# create an instance of DeepResearchResult from a dict +deep_research_result_from_dict = DeepResearchResult.from_dict(deep_research_result_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DeleteAIPlatformConnectorResponse.md b/docs/DeleteAIPlatformConnectorResponse.md new file mode 100644 index 0000000..106ddaf --- /dev/null +++ b/docs/DeleteAIPlatformConnectorResponse.md @@ -0,0 +1,29 @@ +# DeleteAIPlatformConnectorResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | + +## Example + +```python +from vectorize_client.models.delete_ai_platform_connector_response import DeleteAIPlatformConnectorResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of DeleteAIPlatformConnectorResponse from a JSON string +delete_ai_platform_connector_response_instance = DeleteAIPlatformConnectorResponse.from_json(json) +# print the JSON string representation of the object +print(DeleteAIPlatformConnectorResponse.to_json()) + +# convert the object into a dict +delete_ai_platform_connector_response_dict = delete_ai_platform_connector_response_instance.to_dict() +# create an instance of DeleteAIPlatformConnectorResponse from a dict +delete_ai_platform_connector_response_from_dict = DeleteAIPlatformConnectorResponse.from_dict(delete_ai_platform_connector_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DeleteDestinationConnectorResponse.md b/docs/DeleteDestinationConnectorResponse.md new file mode 100644 index 0000000..4a80311 --- /dev/null +++ b/docs/DeleteDestinationConnectorResponse.md @@ -0,0 +1,29 @@ +# DeleteDestinationConnectorResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | + +## Example + +```python +from vectorize_client.models.delete_destination_connector_response import DeleteDestinationConnectorResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of DeleteDestinationConnectorResponse from a JSON string +delete_destination_connector_response_instance = DeleteDestinationConnectorResponse.from_json(json) +# print the JSON string representation of the object +print(DeleteDestinationConnectorResponse.to_json()) + +# convert the object into a dict +delete_destination_connector_response_dict = delete_destination_connector_response_instance.to_dict() +# create an instance of DeleteDestinationConnectorResponse from a dict +delete_destination_connector_response_from_dict = DeleteDestinationConnectorResponse.from_dict(delete_destination_connector_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DeleteFileResponse.md b/docs/DeleteFileResponse.md new file mode 100644 index 0000000..7a3df62 --- /dev/null +++ b/docs/DeleteFileResponse.md @@ -0,0 +1,30 @@ +# DeleteFileResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | +**file_name** | **str** | | + +## Example + +```python +from vectorize_client.models.delete_file_response import DeleteFileResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of DeleteFileResponse from a JSON string +delete_file_response_instance = DeleteFileResponse.from_json(json) +# print the JSON string representation of the object +print(DeleteFileResponse.to_json()) + +# convert the object into a dict +delete_file_response_dict = delete_file_response_instance.to_dict() +# create an instance of DeleteFileResponse from a dict +delete_file_response_from_dict = DeleteFileResponse.from_dict(delete_file_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DeletePipelineResponse.md b/docs/DeletePipelineResponse.md new file mode 100644 index 0000000..ab7c475 --- /dev/null +++ b/docs/DeletePipelineResponse.md @@ -0,0 +1,29 @@ +# DeletePipelineResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | + +## Example + +```python +from vectorize_client.models.delete_pipeline_response import DeletePipelineResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of DeletePipelineResponse from a JSON string +delete_pipeline_response_instance = DeletePipelineResponse.from_json(json) +# print the JSON string representation of the object +print(DeletePipelineResponse.to_json()) + +# convert the object into a dict +delete_pipeline_response_dict = delete_pipeline_response_instance.to_dict() +# create an instance of DeletePipelineResponse from a dict +delete_pipeline_response_from_dict = DeletePipelineResponse.from_dict(delete_pipeline_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DeleteSourceConnectorResponse.md b/docs/DeleteSourceConnectorResponse.md new file mode 100644 index 0000000..c8e57e2 --- /dev/null +++ b/docs/DeleteSourceConnectorResponse.md @@ -0,0 +1,29 @@ +# DeleteSourceConnectorResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | + +## Example + +```python +from vectorize_client.models.delete_source_connector_response import DeleteSourceConnectorResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of DeleteSourceConnectorResponse from a JSON string +delete_source_connector_response_instance = DeleteSourceConnectorResponse.from_json(json) +# print the JSON string representation of the object +print(DeleteSourceConnectorResponse.to_json()) + +# convert the object into a dict +delete_source_connector_response_dict = delete_source_connector_response_instance.to_dict() +# create an instance of DeleteSourceConnectorResponse from a dict +delete_source_connector_response_from_dict = DeleteSourceConnectorResponse.from_dict(delete_source_connector_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DestinationConnector.md b/docs/DestinationConnector.md new file mode 100644 index 0000000..62b1b7d --- /dev/null +++ b/docs/DestinationConnector.md @@ -0,0 +1,39 @@ +# DestinationConnector + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | +**type** | **str** | | +**name** | **str** | | +**config_doc** | **Dict[str, Optional[object]]** | | [optional] +**created_at** | **str** | | [optional] +**created_by_id** | **str** | | [optional] +**last_updated_by_id** | **str** | | [optional] +**created_by_email** | **str** | | [optional] +**last_updated_by_email** | **str** | | [optional] +**error_message** | **str** | | [optional] +**verification_status** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.destination_connector import DestinationConnector + +# TODO update the JSON string below +json = "{}" +# create an instance of DestinationConnector from a JSON string +destination_connector_instance = DestinationConnector.from_json(json) +# print the JSON string representation of the object +print(DestinationConnector.to_json()) + +# convert the object into a dict +destination_connector_dict = destination_connector_instance.to_dict() +# create an instance of DestinationConnector from a dict +destination_connector_from_dict = DestinationConnector.from_dict(destination_connector_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DestinationConnectorInput.md b/docs/DestinationConnectorInput.md new file mode 100644 index 0000000..fa0450c --- /dev/null +++ b/docs/DestinationConnectorInput.md @@ -0,0 +1,32 @@ +# DestinationConnectorInput + +Destination connector configuration + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the destination connector | +**type** | **str** | Type of destination connector | +**config** | [**DestinationConnectorInputConfig**](DestinationConnectorInputConfig.md) | | + +## Example + +```python +from vectorize_client.models.destination_connector_input import DestinationConnectorInput + +# TODO update the JSON string below +json = "{}" +# create an instance of DestinationConnectorInput from a JSON string +destination_connector_input_instance = DestinationConnectorInput.from_json(json) +# print the JSON string representation of the object +print(DestinationConnectorInput.to_json()) + +# convert the object into a dict +destination_connector_input_dict = destination_connector_input_instance.to_dict() +# create an instance of DestinationConnectorInput from a dict +destination_connector_input_from_dict = DestinationConnectorInput.from_dict(destination_connector_input_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DestinationConnectorInputConfig.md b/docs/DestinationConnectorInputConfig.md new file mode 100644 index 0000000..e60b3cd --- /dev/null +++ b/docs/DestinationConnectorInputConfig.md @@ -0,0 +1,35 @@ +# DestinationConnectorInputConfig + +Configuration specific to the connector type + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**bucket** | **str** | Bucket Name. Example: Enter bucket name | +**scope** | **str** | Scope Name. Example: Enter scope name | +**collection** | **str** | Collection Name. Example: Enter collection name | +**index** | **str** | Index Name. Example: Enter index name | +**namespace** | **str** | Namespace. Example: Enter namespace name | +**table** | **str** | Table Name. Example: Enter <table name> or <schema>.<table name> | + +## Example + +```python +from vectorize_client.models.destination_connector_input_config import DestinationConnectorInputConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of DestinationConnectorInputConfig from a JSON string +destination_connector_input_config_instance = DestinationConnectorInputConfig.from_json(json) +# print the JSON string representation of the object +print(DestinationConnectorInputConfig.to_json()) + +# convert the object into a dict +destination_connector_input_config_dict = destination_connector_input_config_instance.to_dict() +# create an instance of DestinationConnectorInputConfig from a dict +destination_connector_input_config_from_dict = DestinationConnectorInputConfig.from_dict(destination_connector_input_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DestinationConnectorSchema.md b/docs/DestinationConnectorSchema.md new file mode 100644 index 0000000..0c4cc7e --- /dev/null +++ b/docs/DestinationConnectorSchema.md @@ -0,0 +1,31 @@ +# DestinationConnectorSchema + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | +**type** | [**DestinationConnectorType**](DestinationConnectorType.md) | | +**config** | **Dict[str, Optional[object]]** | | [optional] + +## Example + +```python +from vectorize_client.models.destination_connector_schema import DestinationConnectorSchema + +# TODO update the JSON string below +json = "{}" +# create an instance of DestinationConnectorSchema from a JSON string +destination_connector_schema_instance = DestinationConnectorSchema.from_json(json) +# print the JSON string representation of the object +print(DestinationConnectorSchema.to_json()) + +# convert the object into a dict +destination_connector_schema_dict = destination_connector_schema_instance.to_dict() +# create an instance of DestinationConnectorSchema from a dict +destination_connector_schema_from_dict = DestinationConnectorSchema.from_dict(destination_connector_schema_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DestinationConnectorType.md b/docs/DestinationConnectorType.md new file mode 100644 index 0000000..82e80fa --- /dev/null +++ b/docs/DestinationConnectorType.md @@ -0,0 +1,32 @@ +# DestinationConnectorType + + +## Enum + +* `CAPELLA` (value: `'CAPELLA'`) + +* `DATASTAX` (value: `'DATASTAX'`) + +* `ELASTIC` (value: `'ELASTIC'`) + +* `PINECONE` (value: `'PINECONE'`) + +* `SINGLESTORE` (value: `'SINGLESTORE'`) + +* `MILVUS` (value: `'MILVUS'`) + +* `POSTGRESQL` (value: `'POSTGRESQL'`) + +* `QDRANT` (value: `'QDRANT'`) + +* `SUPABASE` (value: `'SUPABASE'`) + +* `WEAVIATE` (value: `'WEAVIATE'`) + +* `AZUREAISEARCH` (value: `'AZUREAISEARCH'`) + +* `TURBOPUFFER` (value: `'TURBOPUFFER'`) + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Discord.md b/docs/Discord.md new file mode 100644 index 0000000..5eb6fef --- /dev/null +++ b/docs/Discord.md @@ -0,0 +1,31 @@ +# Discord + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"DISCORD\") | +**config** | [**DISCORDConfig**](DISCORDConfig.md) | | + +## Example + +```python +from vectorize_client.models.discord import Discord + +# TODO update the JSON string below +json = "{}" +# create an instance of Discord from a JSON string +discord_instance = Discord.from_json(json) +# print the JSON string representation of the object +print(Discord.to_json()) + +# convert the object into a dict +discord_dict = discord_instance.to_dict() +# create an instance of Discord from a dict +discord_from_dict = Discord.from_dict(discord_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Discord1.md b/docs/Discord1.md new file mode 100644 index 0000000..48fc98b --- /dev/null +++ b/docs/Discord1.md @@ -0,0 +1,29 @@ +# Discord1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**DISCORDConfig**](DISCORDConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.discord1 import Discord1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Discord1 from a JSON string +discord1_instance = Discord1.from_json(json) +# print the JSON string representation of the object +print(Discord1.to_json()) + +# convert the object into a dict +discord1_dict = discord1_instance.to_dict() +# create an instance of Discord1 from a dict +discord1_from_dict = Discord1.from_dict(discord1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Discord2.md b/docs/Discord2.md new file mode 100644 index 0000000..da87862 --- /dev/null +++ b/docs/Discord2.md @@ -0,0 +1,30 @@ +# Discord2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"DISCORD\") | + +## Example + +```python +from vectorize_client.models.discord2 import Discord2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Discord2 from a JSON string +discord2_instance = Discord2.from_json(json) +# print the JSON string representation of the object +print(Discord2.to_json()) + +# convert the object into a dict +discord2_dict = discord2_instance.to_dict() +# create an instance of Discord2 from a dict +discord2_from_dict = Discord2.from_dict(discord2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Document.md b/docs/Document.md new file mode 100644 index 0000000..da775fe --- /dev/null +++ b/docs/Document.md @@ -0,0 +1,41 @@ +# Document + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**relevancy** | **float** | | +**id** | **str** | | +**text** | **str** | | +**chunk_id** | **str** | | +**total_chunks** | **str** | | +**origin** | **str** | | +**origin_id** | **str** | | +**similarity** | **float** | | +**source** | **str** | | +**unique_source** | **str** | | +**source_display_name** | **str** | | +**pipeline_id** | **str** | | [optional] +**org_id** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.document import Document + +# TODO update the JSON string below +json = "{}" +# create an instance of Document from a JSON string +document_instance = Document.from_json(json) +# print the JSON string representation of the object +print(Document.to_json()) + +# convert the object into a dict +document_dict = document_instance.to_dict() +# create an instance of Document from a dict +document_from_dict = Document.from_dict(document_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Dropbox.md b/docs/Dropbox.md new file mode 100644 index 0000000..0e40063 --- /dev/null +++ b/docs/Dropbox.md @@ -0,0 +1,31 @@ +# Dropbox + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"DROPBOX\") | +**config** | [**DROPBOXConfig**](DROPBOXConfig.md) | | + +## Example + +```python +from vectorize_client.models.dropbox import Dropbox + +# TODO update the JSON string below +json = "{}" +# create an instance of Dropbox from a JSON string +dropbox_instance = Dropbox.from_json(json) +# print the JSON string representation of the object +print(Dropbox.to_json()) + +# convert the object into a dict +dropbox_dict = dropbox_instance.to_dict() +# create an instance of Dropbox from a dict +dropbox_from_dict = Dropbox.from_dict(dropbox_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Dropbox1.md b/docs/Dropbox1.md new file mode 100644 index 0000000..762609e --- /dev/null +++ b/docs/Dropbox1.md @@ -0,0 +1,29 @@ +# Dropbox1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**DROPBOXConfig**](DROPBOXConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.dropbox1 import Dropbox1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Dropbox1 from a JSON string +dropbox1_instance = Dropbox1.from_json(json) +# print the JSON string representation of the object +print(Dropbox1.to_json()) + +# convert the object into a dict +dropbox1_dict = dropbox1_instance.to_dict() +# create an instance of Dropbox1 from a dict +dropbox1_from_dict = Dropbox1.from_dict(dropbox1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Dropbox2.md b/docs/Dropbox2.md new file mode 100644 index 0000000..2019ff7 --- /dev/null +++ b/docs/Dropbox2.md @@ -0,0 +1,30 @@ +# Dropbox2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"DROPBOX\") | + +## Example + +```python +from vectorize_client.models.dropbox2 import Dropbox2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Dropbox2 from a JSON string +dropbox2_instance = Dropbox2.from_json(json) +# print the JSON string representation of the object +print(Dropbox2.to_json()) + +# convert the object into a dict +dropbox2_dict = dropbox2_instance.to_dict() +# create an instance of Dropbox2 from a dict +dropbox2_from_dict = Dropbox2.from_dict(dropbox2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DropboxOauth.md b/docs/DropboxOauth.md new file mode 100644 index 0000000..78d2fa2 --- /dev/null +++ b/docs/DropboxOauth.md @@ -0,0 +1,31 @@ +# DropboxOauth + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"DROPBOX_OAUTH\") | +**config** | [**DROPBOXOAUTHAuthConfig**](DROPBOXOAUTHAuthConfig.md) | | + +## Example + +```python +from vectorize_client.models.dropbox_oauth import DropboxOauth + +# TODO update the JSON string below +json = "{}" +# create an instance of DropboxOauth from a JSON string +dropbox_oauth_instance = DropboxOauth.from_json(json) +# print the JSON string representation of the object +print(DropboxOauth.to_json()) + +# convert the object into a dict +dropbox_oauth_dict = dropbox_oauth_instance.to_dict() +# create an instance of DropboxOauth from a dict +dropbox_oauth_from_dict = DropboxOauth.from_dict(dropbox_oauth_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DropboxOauth1.md b/docs/DropboxOauth1.md new file mode 100644 index 0000000..89e5f49 --- /dev/null +++ b/docs/DropboxOauth1.md @@ -0,0 +1,29 @@ +# DropboxOauth1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**DROPBOXOAUTHAuthConfig**](DROPBOXOAUTHAuthConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.dropbox_oauth1 import DropboxOauth1 + +# TODO update the JSON string below +json = "{}" +# create an instance of DropboxOauth1 from a JSON string +dropbox_oauth1_instance = DropboxOauth1.from_json(json) +# print the JSON string representation of the object +print(DropboxOauth1.to_json()) + +# convert the object into a dict +dropbox_oauth1_dict = dropbox_oauth1_instance.to_dict() +# create an instance of DropboxOauth1 from a dict +dropbox_oauth1_from_dict = DropboxOauth1.from_dict(dropbox_oauth1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DropboxOauth2.md b/docs/DropboxOauth2.md new file mode 100644 index 0000000..ae6a141 --- /dev/null +++ b/docs/DropboxOauth2.md @@ -0,0 +1,30 @@ +# DropboxOauth2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"DROPBOX_OAUTH\") | + +## Example + +```python +from vectorize_client.models.dropbox_oauth2 import DropboxOauth2 + +# TODO update the JSON string below +json = "{}" +# create an instance of DropboxOauth2 from a JSON string +dropbox_oauth2_instance = DropboxOauth2.from_json(json) +# print the JSON string representation of the object +print(DropboxOauth2.to_json()) + +# convert the object into a dict +dropbox_oauth2_dict = dropbox_oauth2_instance.to_dict() +# create an instance of DropboxOauth2 from a dict +dropbox_oauth2_from_dict = DropboxOauth2.from_dict(dropbox_oauth2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DropboxOauthMulti.md b/docs/DropboxOauthMulti.md new file mode 100644 index 0000000..dc590e7 --- /dev/null +++ b/docs/DropboxOauthMulti.md @@ -0,0 +1,31 @@ +# DropboxOauthMulti + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"DROPBOX_OAUTH_MULTI\") | +**config** | [**DROPBOXOAUTHMULTIAuthConfig**](DROPBOXOAUTHMULTIAuthConfig.md) | | + +## Example + +```python +from vectorize_client.models.dropbox_oauth_multi import DropboxOauthMulti + +# TODO update the JSON string below +json = "{}" +# create an instance of DropboxOauthMulti from a JSON string +dropbox_oauth_multi_instance = DropboxOauthMulti.from_json(json) +# print the JSON string representation of the object +print(DropboxOauthMulti.to_json()) + +# convert the object into a dict +dropbox_oauth_multi_dict = dropbox_oauth_multi_instance.to_dict() +# create an instance of DropboxOauthMulti from a dict +dropbox_oauth_multi_from_dict = DropboxOauthMulti.from_dict(dropbox_oauth_multi_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DropboxOauthMulti1.md b/docs/DropboxOauthMulti1.md new file mode 100644 index 0000000..bd53c6f --- /dev/null +++ b/docs/DropboxOauthMulti1.md @@ -0,0 +1,29 @@ +# DropboxOauthMulti1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**DROPBOXOAUTHMULTIAuthConfig**](DROPBOXOAUTHMULTIAuthConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.dropbox_oauth_multi1 import DropboxOauthMulti1 + +# TODO update the JSON string below +json = "{}" +# create an instance of DropboxOauthMulti1 from a JSON string +dropbox_oauth_multi1_instance = DropboxOauthMulti1.from_json(json) +# print the JSON string representation of the object +print(DropboxOauthMulti1.to_json()) + +# convert the object into a dict +dropbox_oauth_multi1_dict = dropbox_oauth_multi1_instance.to_dict() +# create an instance of DropboxOauthMulti1 from a dict +dropbox_oauth_multi1_from_dict = DropboxOauthMulti1.from_dict(dropbox_oauth_multi1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DropboxOauthMulti2.md b/docs/DropboxOauthMulti2.md new file mode 100644 index 0000000..7f52cc9 --- /dev/null +++ b/docs/DropboxOauthMulti2.md @@ -0,0 +1,30 @@ +# DropboxOauthMulti2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"DROPBOX_OAUTH_MULTI\") | + +## Example + +```python +from vectorize_client.models.dropbox_oauth_multi2 import DropboxOauthMulti2 + +# TODO update the JSON string below +json = "{}" +# create an instance of DropboxOauthMulti2 from a JSON string +dropbox_oauth_multi2_instance = DropboxOauthMulti2.from_json(json) +# print the JSON string representation of the object +print(DropboxOauthMulti2.to_json()) + +# convert the object into a dict +dropbox_oauth_multi2_dict = dropbox_oauth_multi2_instance.to_dict() +# create an instance of DropboxOauthMulti2 from a dict +dropbox_oauth_multi2_from_dict = DropboxOauthMulti2.from_dict(dropbox_oauth_multi2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DropboxOauthMultiCustom.md b/docs/DropboxOauthMultiCustom.md new file mode 100644 index 0000000..7ad3444 --- /dev/null +++ b/docs/DropboxOauthMultiCustom.md @@ -0,0 +1,31 @@ +# DropboxOauthMultiCustom + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"DROPBOX_OAUTH_MULTI_CUSTOM\") | +**config** | [**DROPBOXOAUTHMULTICUSTOMAuthConfig**](DROPBOXOAUTHMULTICUSTOMAuthConfig.md) | | + +## Example + +```python +from vectorize_client.models.dropbox_oauth_multi_custom import DropboxOauthMultiCustom + +# TODO update the JSON string below +json = "{}" +# create an instance of DropboxOauthMultiCustom from a JSON string +dropbox_oauth_multi_custom_instance = DropboxOauthMultiCustom.from_json(json) +# print the JSON string representation of the object +print(DropboxOauthMultiCustom.to_json()) + +# convert the object into a dict +dropbox_oauth_multi_custom_dict = dropbox_oauth_multi_custom_instance.to_dict() +# create an instance of DropboxOauthMultiCustom from a dict +dropbox_oauth_multi_custom_from_dict = DropboxOauthMultiCustom.from_dict(dropbox_oauth_multi_custom_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DropboxOauthMultiCustom1.md b/docs/DropboxOauthMultiCustom1.md new file mode 100644 index 0000000..ed825be --- /dev/null +++ b/docs/DropboxOauthMultiCustom1.md @@ -0,0 +1,29 @@ +# DropboxOauthMultiCustom1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**DROPBOXOAUTHMULTICUSTOMAuthConfig**](DROPBOXOAUTHMULTICUSTOMAuthConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.dropbox_oauth_multi_custom1 import DropboxOauthMultiCustom1 + +# TODO update the JSON string below +json = "{}" +# create an instance of DropboxOauthMultiCustom1 from a JSON string +dropbox_oauth_multi_custom1_instance = DropboxOauthMultiCustom1.from_json(json) +# print the JSON string representation of the object +print(DropboxOauthMultiCustom1.to_json()) + +# convert the object into a dict +dropbox_oauth_multi_custom1_dict = dropbox_oauth_multi_custom1_instance.to_dict() +# create an instance of DropboxOauthMultiCustom1 from a dict +dropbox_oauth_multi_custom1_from_dict = DropboxOauthMultiCustom1.from_dict(dropbox_oauth_multi_custom1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/DropboxOauthMultiCustom2.md b/docs/DropboxOauthMultiCustom2.md new file mode 100644 index 0000000..ef7a0e1 --- /dev/null +++ b/docs/DropboxOauthMultiCustom2.md @@ -0,0 +1,30 @@ +# DropboxOauthMultiCustom2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"DROPBOX_OAUTH_MULTI_CUSTOM\") | + +## Example + +```python +from vectorize_client.models.dropbox_oauth_multi_custom2 import DropboxOauthMultiCustom2 + +# TODO update the JSON string below +json = "{}" +# create an instance of DropboxOauthMultiCustom2 from a JSON string +dropbox_oauth_multi_custom2_instance = DropboxOauthMultiCustom2.from_json(json) +# print the JSON string representation of the object +print(DropboxOauthMultiCustom2.to_json()) + +# convert the object into a dict +dropbox_oauth_multi_custom2_dict = dropbox_oauth_multi_custom2_instance.to_dict() +# create an instance of DropboxOauthMultiCustom2 from a dict +dropbox_oauth_multi_custom2_from_dict = DropboxOauthMultiCustom2.from_dict(dropbox_oauth_multi_custom2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/ELASTICAuthConfig.md b/docs/ELASTICAuthConfig.md new file mode 100644 index 0000000..d6197e1 --- /dev/null +++ b/docs/ELASTICAuthConfig.md @@ -0,0 +1,33 @@ +# ELASTICAuthConfig + +Authentication configuration for Elasticsearch + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for your Elastic integration | +**host** | **str** | Host. Example: Enter your host | +**port** | **str** | Port. Example: Enter your port | +**api_key** | **str** | API Key. Example: Enter your API key | + +## Example + +```python +from vectorize_client.models.elastic_auth_config import ELASTICAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of ELASTICAuthConfig from a JSON string +elastic_auth_config_instance = ELASTICAuthConfig.from_json(json) +# print the JSON string representation of the object +print(ELASTICAuthConfig.to_json()) + +# convert the object into a dict +elastic_auth_config_dict = elastic_auth_config_instance.to_dict() +# create an instance of ELASTICAuthConfig from a dict +elastic_auth_config_from_dict = ELASTICAuthConfig.from_dict(elastic_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/ELASTICConfig.md b/docs/ELASTICConfig.md new file mode 100644 index 0000000..410d44d --- /dev/null +++ b/docs/ELASTICConfig.md @@ -0,0 +1,30 @@ +# ELASTICConfig + +Configuration for Elasticsearch connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**index** | **str** | Index Name. Example: Enter index name | + +## Example + +```python +from vectorize_client.models.elastic_config import ELASTICConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of ELASTICConfig from a JSON string +elastic_config_instance = ELASTICConfig.from_json(json) +# print the JSON string representation of the object +print(ELASTICConfig.to_json()) + +# convert the object into a dict +elastic_config_dict = elastic_config_instance.to_dict() +# create an instance of ELASTICConfig from a dict +elastic_config_from_dict = ELASTICConfig.from_dict(elastic_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Elastic.md b/docs/Elastic.md new file mode 100644 index 0000000..dc04310 --- /dev/null +++ b/docs/Elastic.md @@ -0,0 +1,31 @@ +# Elastic + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"ELASTIC\") | +**config** | [**ELASTICConfig**](ELASTICConfig.md) | | + +## Example + +```python +from vectorize_client.models.elastic import Elastic + +# TODO update the JSON string below +json = "{}" +# create an instance of Elastic from a JSON string +elastic_instance = Elastic.from_json(json) +# print the JSON string representation of the object +print(Elastic.to_json()) + +# convert the object into a dict +elastic_dict = elastic_instance.to_dict() +# create an instance of Elastic from a dict +elastic_from_dict = Elastic.from_dict(elastic_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Elastic1.md b/docs/Elastic1.md new file mode 100644 index 0000000..515a0a0 --- /dev/null +++ b/docs/Elastic1.md @@ -0,0 +1,29 @@ +# Elastic1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**ELASTICConfig**](ELASTICConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.elastic1 import Elastic1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Elastic1 from a JSON string +elastic1_instance = Elastic1.from_json(json) +# print the JSON string representation of the object +print(Elastic1.to_json()) + +# convert the object into a dict +elastic1_dict = elastic1_instance.to_dict() +# create an instance of Elastic1 from a dict +elastic1_from_dict = Elastic1.from_dict(elastic1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Elastic2.md b/docs/Elastic2.md new file mode 100644 index 0000000..ef94cf1 --- /dev/null +++ b/docs/Elastic2.md @@ -0,0 +1,30 @@ +# Elastic2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"ELASTIC\") | + +## Example + +```python +from vectorize_client.models.elastic2 import Elastic2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Elastic2 from a JSON string +elastic2_instance = Elastic2.from_json(json) +# print the JSON string representation of the object +print(Elastic2.to_json()) + +# convert the object into a dict +elastic2_dict = elastic2_instance.to_dict() +# create an instance of Elastic2 from a dict +elastic2_from_dict = Elastic2.from_dict(elastic2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/ExtractionApi.md b/docs/ExtractionApi.md new file mode 100644 index 0000000..6fea47f --- /dev/null +++ b/docs/ExtractionApi.md @@ -0,0 +1,177 @@ +# vectorize_client.ExtractionApi + +All URIs are relative to *https://api.vectorize.io/v1* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**get_extraction_result**](ExtractionApi.md#get_extraction_result) | **GET** /org/{organizationId}/extraction/{extractionId} | Get extraction result +[**start_extraction**](ExtractionApi.md#start_extraction) | **POST** /org/{organizationId}/extraction | Start content extraction from a file + + +# **get_extraction_result** +> ExtractionResultResponse get_extraction_result(organization, extraction_id) + +Get extraction result + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.extraction_result_response import ExtractionResultResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ExtractionApi(api_client) + organization = 'organization_example' # str | + extraction_id = 'extraction_id_example' # str | + + try: + # Get extraction result + api_response = api_instance.get_extraction_result(organization, extraction_id) + print("The response of ExtractionApi->get_extraction_result:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ExtractionApi->get_extraction_result: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **extraction_id** | **str**| | + +### Return type + +[**ExtractionResultResponse**](ExtractionResultResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Extraction started successfully | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **start_extraction** +> StartExtractionResponse start_extraction(organization_id, start_extraction_request) + +Start content extraction from a file + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.start_extraction_request import StartExtractionRequest +from vectorize_client.models.start_extraction_response import StartExtractionResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.ExtractionApi(api_client) + organization_id = 'organization_id_example' # str | + start_extraction_request = {"fileId":"2a53d7fa-748a-4b7f-a35b-e5f73944f444","type":"iris","chunkingStrategy":"markdown","chunkSize":20,"metadata":{"schemas":[],"inferSchema":true}} # StartExtractionRequest | + + try: + # Start content extraction from a file + api_response = api_instance.start_extraction(organization_id, start_extraction_request) + print("The response of ExtractionApi->start_extraction:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ExtractionApi->start_extraction: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + **start_extraction_request** | [**StartExtractionRequest**](StartExtractionRequest.md)| | + +### Return type + +[**StartExtractionResponse**](StartExtractionResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Extraction started successfully | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/docs/ExtractionChunkingStrategy.md b/docs/ExtractionChunkingStrategy.md new file mode 100644 index 0000000..7eace7c --- /dev/null +++ b/docs/ExtractionChunkingStrategy.md @@ -0,0 +1,10 @@ +# ExtractionChunkingStrategy + + +## Enum + +* `MARKDOWN` (value: `'markdown'`) + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/ExtractionResult.md b/docs/ExtractionResult.md new file mode 100644 index 0000000..74fa48c --- /dev/null +++ b/docs/ExtractionResult.md @@ -0,0 +1,36 @@ +# ExtractionResult + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**success** | **bool** | | +**chunks** | **List[str]** | | [optional] +**text** | **str** | | [optional] +**metadata** | **str** | | [optional] +**metadata_schema** | **str** | | [optional] +**chunks_metadata** | **List[str]** | | [optional] +**chunks_schema** | **List[str]** | | [optional] +**error** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.extraction_result import ExtractionResult + +# TODO update the JSON string below +json = "{}" +# create an instance of ExtractionResult from a JSON string +extraction_result_instance = ExtractionResult.from_json(json) +# print the JSON string representation of the object +print(ExtractionResult.to_json()) + +# convert the object into a dict +extraction_result_dict = extraction_result_instance.to_dict() +# create an instance of ExtractionResult from a dict +extraction_result_from_dict = ExtractionResult.from_dict(extraction_result_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/ExtractionResultResponse.md b/docs/ExtractionResultResponse.md new file mode 100644 index 0000000..6fecd57 --- /dev/null +++ b/docs/ExtractionResultResponse.md @@ -0,0 +1,30 @@ +# ExtractionResultResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ready** | **bool** | | +**data** | [**ExtractionResult**](ExtractionResult.md) | | [optional] + +## Example + +```python +from vectorize_client.models.extraction_result_response import ExtractionResultResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of ExtractionResultResponse from a JSON string +extraction_result_response_instance = ExtractionResultResponse.from_json(json) +# print the JSON string representation of the object +print(ExtractionResultResponse.to_json()) + +# convert the object into a dict +extraction_result_response_dict = extraction_result_response_instance.to_dict() +# create an instance of ExtractionResultResponse from a dict +extraction_result_response_from_dict = ExtractionResultResponse.from_dict(extraction_result_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/ExtractionType.md b/docs/ExtractionType.md new file mode 100644 index 0000000..56b654e --- /dev/null +++ b/docs/ExtractionType.md @@ -0,0 +1,10 @@ +# ExtractionType + + +## Enum + +* `IRIS` (value: `'iris'`) + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/FILEUPLOADAuthConfig.md b/docs/FILEUPLOADAuthConfig.md new file mode 100644 index 0000000..52b68db --- /dev/null +++ b/docs/FILEUPLOADAuthConfig.md @@ -0,0 +1,32 @@ +# FILEUPLOADAuthConfig + +Authentication configuration for File Upload + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for this connector | +**path_prefix** | **str** | Path Prefix | [optional] +**files** | **List[str]** | Choose files. Files uploaded to this connector can be used in pipelines to vectorize their contents. Note: files with the same name will be overwritten. | [optional] + +## Example + +```python +from vectorize_client.models.fileupload_auth_config import FILEUPLOADAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of FILEUPLOADAuthConfig from a JSON string +fileupload_auth_config_instance = FILEUPLOADAuthConfig.from_json(json) +# print the JSON string representation of the object +print(FILEUPLOADAuthConfig.to_json()) + +# convert the object into a dict +fileupload_auth_config_dict = fileupload_auth_config_instance.to_dict() +# create an instance of FILEUPLOADAuthConfig from a dict +fileupload_auth_config_from_dict = FILEUPLOADAuthConfig.from_dict(fileupload_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/FIRECRAWLAuthConfig.md b/docs/FIRECRAWLAuthConfig.md new file mode 100644 index 0000000..1a680e6 --- /dev/null +++ b/docs/FIRECRAWLAuthConfig.md @@ -0,0 +1,31 @@ +# FIRECRAWLAuthConfig + +Authentication configuration for Firecrawl + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**api_key** | **str** | API Key. Example: Enter your Firecrawl API Key | + +## Example + +```python +from vectorize_client.models.firecrawl_auth_config import FIRECRAWLAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of FIRECRAWLAuthConfig from a JSON string +firecrawl_auth_config_instance = FIRECRAWLAuthConfig.from_json(json) +# print the JSON string representation of the object +print(FIRECRAWLAuthConfig.to_json()) + +# convert the object into a dict +firecrawl_auth_config_dict = firecrawl_auth_config_instance.to_dict() +# create an instance of FIRECRAWLAuthConfig from a dict +firecrawl_auth_config_from_dict = FIRECRAWLAuthConfig.from_dict(firecrawl_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/FIRECRAWLConfig.md b/docs/FIRECRAWLConfig.md new file mode 100644 index 0000000..aa4ad0a --- /dev/null +++ b/docs/FIRECRAWLConfig.md @@ -0,0 +1,31 @@ +# FIRECRAWLConfig + +Configuration for Firecrawl connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**endpoint** | **str** | Endpoint. Example: Choose which api endpoint to use | [default to 'Crawl'] +**request** | **object** | Request Body. Example: JSON config for firecrawl's /crawl or /scrape endpoint. | + +## Example + +```python +from vectorize_client.models.firecrawl_config import FIRECRAWLConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of FIRECRAWLConfig from a JSON string +firecrawl_config_instance = FIRECRAWLConfig.from_json(json) +# print the JSON string representation of the object +print(FIRECRAWLConfig.to_json()) + +# convert the object into a dict +firecrawl_config_dict = firecrawl_config_instance.to_dict() +# create an instance of FIRECRAWLConfig from a dict +firecrawl_config_from_dict = FIRECRAWLConfig.from_dict(firecrawl_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/FIREFLIESAuthConfig.md b/docs/FIREFLIESAuthConfig.md new file mode 100644 index 0000000..775c8ef --- /dev/null +++ b/docs/FIREFLIESAuthConfig.md @@ -0,0 +1,31 @@ +# FIREFLIESAuthConfig + +Authentication configuration for Fireflies.ai + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**api_key** | **str** | API Key. Example: Enter your Fireflies.ai API key | + +## Example + +```python +from vectorize_client.models.fireflies_auth_config import FIREFLIESAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of FIREFLIESAuthConfig from a JSON string +fireflies_auth_config_instance = FIREFLIESAuthConfig.from_json(json) +# print the JSON string representation of the object +print(FIREFLIESAuthConfig.to_json()) + +# convert the object into a dict +fireflies_auth_config_dict = fireflies_auth_config_instance.to_dict() +# create an instance of FIREFLIESAuthConfig from a dict +fireflies_auth_config_from_dict = FIREFLIESAuthConfig.from_dict(fireflies_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/FIREFLIESConfig.md b/docs/FIREFLIESConfig.md new file mode 100644 index 0000000..b36e4ba --- /dev/null +++ b/docs/FIREFLIESConfig.md @@ -0,0 +1,36 @@ +# FIREFLIESConfig + +Configuration for Fireflies.ai connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**start_date** | **date** | Start Date. Include meetings from this date forward. Example: Enter a date: Example 2023-12-31 | +**end_date** | **date** | End Date. Include meetings up to this date only. Example: Enter a date: Example 2023-12-31 | [optional] +**title_filter_type** | **str** | | [default to 'AND'] +**title_filter** | **str** | Title Filter. Only include meetings with this text in the title. Example: Enter meeting title | [optional] +**participant_filter_type** | **str** | | [default to 'AND'] +**participant_filter** | **str** | Participant's Email Filter. Include meetings where these participants were invited. Example: Enter participant email | [optional] +**max_meetings** | **float** | Max Meetings. Enter -1 for all available meetings, or specify a limit. Example: Enter maximum number of meetings to retrieve. (-1 for all) | [optional] [default to -1] + +## Example + +```python +from vectorize_client.models.fireflies_config import FIREFLIESConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of FIREFLIESConfig from a JSON string +fireflies_config_instance = FIREFLIESConfig.from_json(json) +# print the JSON string representation of the object +print(FIREFLIESConfig.to_json()) + +# convert the object into a dict +fireflies_config_dict = fireflies_config_instance.to_dict() +# create an instance of FIREFLIESConfig from a dict +fireflies_config_from_dict = FIREFLIESConfig.from_dict(fireflies_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/FileUpload.md b/docs/FileUpload.md new file mode 100644 index 0000000..184abf6 --- /dev/null +++ b/docs/FileUpload.md @@ -0,0 +1,31 @@ +# FileUpload + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"FILE_UPLOAD\") | +**config** | [**FILEUPLOADAuthConfig**](FILEUPLOADAuthConfig.md) | | + +## Example + +```python +from vectorize_client.models.file_upload import FileUpload + +# TODO update the JSON string below +json = "{}" +# create an instance of FileUpload from a JSON string +file_upload_instance = FileUpload.from_json(json) +# print the JSON string representation of the object +print(FileUpload.to_json()) + +# convert the object into a dict +file_upload_dict = file_upload_instance.to_dict() +# create an instance of FileUpload from a dict +file_upload_from_dict = FileUpload.from_dict(file_upload_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/FileUpload1.md b/docs/FileUpload1.md new file mode 100644 index 0000000..32d0063 --- /dev/null +++ b/docs/FileUpload1.md @@ -0,0 +1,29 @@ +# FileUpload1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**FILEUPLOADAuthConfig**](FILEUPLOADAuthConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.file_upload1 import FileUpload1 + +# TODO update the JSON string below +json = "{}" +# create an instance of FileUpload1 from a JSON string +file_upload1_instance = FileUpload1.from_json(json) +# print the JSON string representation of the object +print(FileUpload1.to_json()) + +# convert the object into a dict +file_upload1_dict = file_upload1_instance.to_dict() +# create an instance of FileUpload1 from a dict +file_upload1_from_dict = FileUpload1.from_dict(file_upload1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/FileUpload2.md b/docs/FileUpload2.md new file mode 100644 index 0000000..f3f53ec --- /dev/null +++ b/docs/FileUpload2.md @@ -0,0 +1,30 @@ +# FileUpload2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"FILE_UPLOAD\") | + +## Example + +```python +from vectorize_client.models.file_upload2 import FileUpload2 + +# TODO update the JSON string below +json = "{}" +# create an instance of FileUpload2 from a JSON string +file_upload2_instance = FileUpload2.from_json(json) +# print the JSON string representation of the object +print(FileUpload2.to_json()) + +# convert the object into a dict +file_upload2_dict = file_upload2_instance.to_dict() +# create an instance of FileUpload2 from a dict +file_upload2_from_dict = FileUpload2.from_dict(file_upload2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/FilesApi.md b/docs/FilesApi.md new file mode 100644 index 0000000..7b63d57 --- /dev/null +++ b/docs/FilesApi.md @@ -0,0 +1,93 @@ +# vectorize_client.FilesApi + +All URIs are relative to *https://api.vectorize.io/v1* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**start_file_upload**](FilesApi.md#start_file_upload) | **POST** /org/{organizationId}/files | Upload a generic file to the platform + + +# **start_file_upload** +> StartFileUploadResponse start_file_upload(organization_id, start_file_upload_request) + +Upload a generic file to the platform + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.start_file_upload_request import StartFileUploadRequest +from vectorize_client.models.start_file_upload_response import StartFileUploadResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.FilesApi(api_client) + organization_id = 'organization_id_example' # str | + start_file_upload_request = {"name":"My StartFileUploadRequest","contentType":"document"} # StartFileUploadRequest | + + try: + # Upload a generic file to the platform + api_response = api_instance.start_file_upload(organization_id, start_file_upload_request) + print("The response of FilesApi->start_file_upload:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling FilesApi->start_file_upload: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + **start_file_upload_request** | [**StartFileUploadRequest**](StartFileUploadRequest.md)| | + +### Return type + +[**StartFileUploadResponse**](StartFileUploadResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | File upload started successfully | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/docs/Firecrawl.md b/docs/Firecrawl.md new file mode 100644 index 0000000..4763f05 --- /dev/null +++ b/docs/Firecrawl.md @@ -0,0 +1,31 @@ +# Firecrawl + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"FIRECRAWL\") | +**config** | [**FIRECRAWLConfig**](FIRECRAWLConfig.md) | | + +## Example + +```python +from vectorize_client.models.firecrawl import Firecrawl + +# TODO update the JSON string below +json = "{}" +# create an instance of Firecrawl from a JSON string +firecrawl_instance = Firecrawl.from_json(json) +# print the JSON string representation of the object +print(Firecrawl.to_json()) + +# convert the object into a dict +firecrawl_dict = firecrawl_instance.to_dict() +# create an instance of Firecrawl from a dict +firecrawl_from_dict = Firecrawl.from_dict(firecrawl_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Firecrawl1.md b/docs/Firecrawl1.md new file mode 100644 index 0000000..74267db --- /dev/null +++ b/docs/Firecrawl1.md @@ -0,0 +1,29 @@ +# Firecrawl1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**FIRECRAWLConfig**](FIRECRAWLConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.firecrawl1 import Firecrawl1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Firecrawl1 from a JSON string +firecrawl1_instance = Firecrawl1.from_json(json) +# print the JSON string representation of the object +print(Firecrawl1.to_json()) + +# convert the object into a dict +firecrawl1_dict = firecrawl1_instance.to_dict() +# create an instance of Firecrawl1 from a dict +firecrawl1_from_dict = Firecrawl1.from_dict(firecrawl1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Firecrawl2.md b/docs/Firecrawl2.md new file mode 100644 index 0000000..4c199c3 --- /dev/null +++ b/docs/Firecrawl2.md @@ -0,0 +1,30 @@ +# Firecrawl2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"FIRECRAWL\") | + +## Example + +```python +from vectorize_client.models.firecrawl2 import Firecrawl2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Firecrawl2 from a JSON string +firecrawl2_instance = Firecrawl2.from_json(json) +# print the JSON string representation of the object +print(Firecrawl2.to_json()) + +# convert the object into a dict +firecrawl2_dict = firecrawl2_instance.to_dict() +# create an instance of Firecrawl2 from a dict +firecrawl2_from_dict = Firecrawl2.from_dict(firecrawl2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Fireflies.md b/docs/Fireflies.md new file mode 100644 index 0000000..2b0ed57 --- /dev/null +++ b/docs/Fireflies.md @@ -0,0 +1,31 @@ +# Fireflies + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"FIREFLIES\") | +**config** | [**FIREFLIESConfig**](FIREFLIESConfig.md) | | + +## Example + +```python +from vectorize_client.models.fireflies import Fireflies + +# TODO update the JSON string below +json = "{}" +# create an instance of Fireflies from a JSON string +fireflies_instance = Fireflies.from_json(json) +# print the JSON string representation of the object +print(Fireflies.to_json()) + +# convert the object into a dict +fireflies_dict = fireflies_instance.to_dict() +# create an instance of Fireflies from a dict +fireflies_from_dict = Fireflies.from_dict(fireflies_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Fireflies1.md b/docs/Fireflies1.md new file mode 100644 index 0000000..7e6107c --- /dev/null +++ b/docs/Fireflies1.md @@ -0,0 +1,29 @@ +# Fireflies1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**FIREFLIESConfig**](FIREFLIESConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.fireflies1 import Fireflies1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Fireflies1 from a JSON string +fireflies1_instance = Fireflies1.from_json(json) +# print the JSON string representation of the object +print(Fireflies1.to_json()) + +# convert the object into a dict +fireflies1_dict = fireflies1_instance.to_dict() +# create an instance of Fireflies1 from a dict +fireflies1_from_dict = Fireflies1.from_dict(fireflies1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Fireflies2.md b/docs/Fireflies2.md new file mode 100644 index 0000000..0fe9bac --- /dev/null +++ b/docs/Fireflies2.md @@ -0,0 +1,30 @@ +# Fireflies2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"FIREFLIES\") | + +## Example + +```python +from vectorize_client.models.fireflies2 import Fireflies2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Fireflies2 from a JSON string +fireflies2_instance = Fireflies2.from_json(json) +# print the JSON string representation of the object +print(Fireflies2.to_json()) + +# convert the object into a dict +fireflies2_dict = fireflies2_instance.to_dict() +# create an instance of Fireflies2 from a dict +fireflies2_from_dict = Fireflies2.from_dict(fireflies2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GCSAuthConfig.md b/docs/GCSAuthConfig.md new file mode 100644 index 0000000..d3dedf2 --- /dev/null +++ b/docs/GCSAuthConfig.md @@ -0,0 +1,32 @@ +# GCSAuthConfig + +Authentication configuration for GCP Cloud Storage + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**service_account_json** | **str** | Service Account JSON. Example: Enter the JSON key file for the service account | +**bucket_name** | **str** | Bucket. Example: Enter bucket name | + +## Example + +```python +from vectorize_client.models.gcs_auth_config import GCSAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of GCSAuthConfig from a JSON string +gcs_auth_config_instance = GCSAuthConfig.from_json(json) +# print the JSON string representation of the object +print(GCSAuthConfig.to_json()) + +# convert the object into a dict +gcs_auth_config_dict = gcs_auth_config_instance.to_dict() +# create an instance of GCSAuthConfig from a dict +gcs_auth_config_from_dict = GCSAuthConfig.from_dict(gcs_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GCSConfig.md b/docs/GCSConfig.md new file mode 100644 index 0000000..77031f5 --- /dev/null +++ b/docs/GCSConfig.md @@ -0,0 +1,35 @@ +# GCSConfig + +Configuration for GCP Cloud Storage connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**file_extensions** | **List[str]** | File Extensions | +**idle_time** | **float** | Check for updates every (seconds) | [default to 5] +**recursive** | **bool** | Recursively scan all folders in the bucket | [optional] +**path_prefix** | **str** | Path Prefix | [optional] +**path_metadata_regex** | **str** | Path Metadata Regex | [optional] +**path_regex_group_names** | **str** | Path Regex Group Names. Example: Enter Group Name | [optional] + +## Example + +```python +from vectorize_client.models.gcs_config import GCSConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of GCSConfig from a JSON string +gcs_config_instance = GCSConfig.from_json(json) +# print the JSON string representation of the object +print(GCSConfig.to_json()) + +# convert the object into a dict +gcs_config_dict = gcs_config_instance.to_dict() +# create an instance of GCSConfig from a dict +gcs_config_from_dict = GCSConfig.from_dict(gcs_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GITHUBAuthConfig.md b/docs/GITHUBAuthConfig.md new file mode 100644 index 0000000..dbb525f --- /dev/null +++ b/docs/GITHUBAuthConfig.md @@ -0,0 +1,31 @@ +# GITHUBAuthConfig + +Authentication configuration for GitHub + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**oauth_token** | **str** | Personal Access Token. Example: Enter your GitHub personal access token | + +## Example + +```python +from vectorize_client.models.github_auth_config import GITHUBAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of GITHUBAuthConfig from a JSON string +github_auth_config_instance = GITHUBAuthConfig.from_json(json) +# print the JSON string representation of the object +print(GITHUBAuthConfig.to_json()) + +# convert the object into a dict +github_auth_config_dict = github_auth_config_instance.to_dict() +# create an instance of GITHUBAuthConfig from a dict +github_auth_config_from_dict = GITHUBAuthConfig.from_dict(github_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GITHUBConfig.md b/docs/GITHUBConfig.md new file mode 100644 index 0000000..20ddaec --- /dev/null +++ b/docs/GITHUBConfig.md @@ -0,0 +1,38 @@ +# GITHUBConfig + +Configuration for GitHub connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**repositories** | **str** | Repositories. Example: Example: owner1/repo1 | +**include_pull_requests** | **bool** | Include Pull Requests | [default to True] +**pull_request_status** | **str** | Pull Request Status | [default to 'all'] +**pull_request_labels** | **str** | Pull Request Labels. Example: Optionally filter by label. E.g. fix | [optional] +**include_issues** | **bool** | Include Issues | [default to True] +**issue_status** | **str** | Issue Status | [default to 'all'] +**issue_labels** | **str** | Issue Labels. Example: Optionally filter by label. E.g. bug | [optional] +**max_items** | **float** | Max Items. Example: Enter maximum number of items to fetch | [default to 1000] +**created_after** | **date** | Created After. Filter for items created after this date. Example: Enter a date: Example 2012-12-31 | [optional] + +## Example + +```python +from vectorize_client.models.github_config import GITHUBConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of GITHUBConfig from a JSON string +github_config_instance = GITHUBConfig.from_json(json) +# print the JSON string representation of the object +print(GITHUBConfig.to_json()) + +# convert the object into a dict +github_config_dict = github_config_instance.to_dict() +# create an instance of GITHUBConfig from a dict +github_config_from_dict = GITHUBConfig.from_dict(github_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GOOGLEDRIVEAuthConfig.md b/docs/GOOGLEDRIVEAuthConfig.md new file mode 100644 index 0000000..aec10ce --- /dev/null +++ b/docs/GOOGLEDRIVEAuthConfig.md @@ -0,0 +1,31 @@ +# GOOGLEDRIVEAuthConfig + +Authentication configuration for Google Drive (Service Account) + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**service_account_json** | **str** | Service Account JSON. Example: Enter the JSON key file for the service account | + +## Example + +```python +from vectorize_client.models.googledrive_auth_config import GOOGLEDRIVEAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of GOOGLEDRIVEAuthConfig from a JSON string +googledrive_auth_config_instance = GOOGLEDRIVEAuthConfig.from_json(json) +# print the JSON string representation of the object +print(GOOGLEDRIVEAuthConfig.to_json()) + +# convert the object into a dict +googledrive_auth_config_dict = googledrive_auth_config_instance.to_dict() +# create an instance of GOOGLEDRIVEAuthConfig from a dict +googledrive_auth_config_from_dict = GOOGLEDRIVEAuthConfig.from_dict(googledrive_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GOOGLEDRIVEConfig.md b/docs/GOOGLEDRIVEConfig.md new file mode 100644 index 0000000..1b1572c --- /dev/null +++ b/docs/GOOGLEDRIVEConfig.md @@ -0,0 +1,32 @@ +# GOOGLEDRIVEConfig + +Configuration for Google Drive (Service Account) connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**file_extensions** | **List[str]** | File Extensions | +**root_parents** | **str** | Restrict ingest to these folder URLs (optional). Example: Enter Folder URLs. Example: https://drive.google.com/drive/folders/1234aBCd5678_eFgH9012iJKL3456opqr | [optional] +**idle_time** | **float** | Polling Interval (seconds). Example: Enter polling interval in seconds | [optional] [default to 5] + +## Example + +```python +from vectorize_client.models.googledrive_config import GOOGLEDRIVEConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of GOOGLEDRIVEConfig from a JSON string +googledrive_config_instance = GOOGLEDRIVEConfig.from_json(json) +# print the JSON string representation of the object +print(GOOGLEDRIVEConfig.to_json()) + +# convert the object into a dict +googledrive_config_dict = googledrive_config_instance.to_dict() +# create an instance of GOOGLEDRIVEConfig from a dict +googledrive_config_from_dict = GOOGLEDRIVEConfig.from_dict(googledrive_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GOOGLEDRIVEOAUTHAuthConfig.md b/docs/GOOGLEDRIVEOAUTHAuthConfig.md new file mode 100644 index 0000000..b6dec7a --- /dev/null +++ b/docs/GOOGLEDRIVEOAUTHAuthConfig.md @@ -0,0 +1,34 @@ +# GOOGLEDRIVEOAUTHAuthConfig + +Authentication configuration for Google Drive OAuth + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**authorized_user** | **str** | Authorized User | [optional] +**selection_details** | **str** | Connect Google Drive to Vectorize. Example: Authorize | +**edited_users** | **str** | | [optional] +**reconnect_users** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.googledriveoauth_auth_config import GOOGLEDRIVEOAUTHAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of GOOGLEDRIVEOAUTHAuthConfig from a JSON string +googledriveoauth_auth_config_instance = GOOGLEDRIVEOAUTHAuthConfig.from_json(json) +# print the JSON string representation of the object +print(GOOGLEDRIVEOAUTHAuthConfig.to_json()) + +# convert the object into a dict +googledriveoauth_auth_config_dict = googledriveoauth_auth_config_instance.to_dict() +# create an instance of GOOGLEDRIVEOAUTHAuthConfig from a dict +googledriveoauth_auth_config_from_dict = GOOGLEDRIVEOAUTHAuthConfig.from_dict(googledriveoauth_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GOOGLEDRIVEOAUTHConfig.md b/docs/GOOGLEDRIVEOAUTHConfig.md new file mode 100644 index 0000000..4c03f70 --- /dev/null +++ b/docs/GOOGLEDRIVEOAUTHConfig.md @@ -0,0 +1,31 @@ +# GOOGLEDRIVEOAUTHConfig + +Configuration for Google Drive OAuth connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**file_extensions** | **List[str]** | File Extensions | +**idle_time** | **float** | Polling Interval (seconds). Example: Enter polling interval in seconds | [optional] [default to 5] + +## Example + +```python +from vectorize_client.models.googledriveoauth_config import GOOGLEDRIVEOAUTHConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of GOOGLEDRIVEOAUTHConfig from a JSON string +googledriveoauth_config_instance = GOOGLEDRIVEOAUTHConfig.from_json(json) +# print the JSON string representation of the object +print(GOOGLEDRIVEOAUTHConfig.to_json()) + +# convert the object into a dict +googledriveoauth_config_dict = googledriveoauth_config_instance.to_dict() +# create an instance of GOOGLEDRIVEOAUTHConfig from a dict +googledriveoauth_config_from_dict = GOOGLEDRIVEOAUTHConfig.from_dict(googledriveoauth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GOOGLEDRIVEOAUTHMULTIAuthConfig.md b/docs/GOOGLEDRIVEOAUTHMULTIAuthConfig.md new file mode 100644 index 0000000..b298062 --- /dev/null +++ b/docs/GOOGLEDRIVEOAUTHMULTIAuthConfig.md @@ -0,0 +1,33 @@ +# GOOGLEDRIVEOAUTHMULTIAuthConfig + +Authentication configuration for Google Drive Multi-User (Vectorize) + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**authorized_users** | **str** | Authorized Users | [optional] +**edited_users** | **str** | | [optional] +**deleted_users** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.googledriveoauthmulti_auth_config import GOOGLEDRIVEOAUTHMULTIAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of GOOGLEDRIVEOAUTHMULTIAuthConfig from a JSON string +googledriveoauthmulti_auth_config_instance = GOOGLEDRIVEOAUTHMULTIAuthConfig.from_json(json) +# print the JSON string representation of the object +print(GOOGLEDRIVEOAUTHMULTIAuthConfig.to_json()) + +# convert the object into a dict +googledriveoauthmulti_auth_config_dict = googledriveoauthmulti_auth_config_instance.to_dict() +# create an instance of GOOGLEDRIVEOAUTHMULTIAuthConfig from a dict +googledriveoauthmulti_auth_config_from_dict = GOOGLEDRIVEOAUTHMULTIAuthConfig.from_dict(googledriveoauthmulti_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig.md b/docs/GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig.md new file mode 100644 index 0000000..d227eeb --- /dev/null +++ b/docs/GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig.md @@ -0,0 +1,35 @@ +# GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig + +Authentication configuration for Google Drive Multi-User (White Label) + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**oauth2_client_id** | **str** | OAuth2 Client Id. Example: Enter Client Id | +**oauth2_client_secret** | **str** | OAuth2 Client Secret. Example: Enter Client Secret | +**authorized_users** | **str** | Authorized Users | [optional] +**edited_users** | **str** | | [optional] +**deleted_users** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.googledriveoauthmulticustom_auth_config import GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig from a JSON string +googledriveoauthmulticustom_auth_config_instance = GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig.from_json(json) +# print the JSON string representation of the object +print(GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig.to_json()) + +# convert the object into a dict +googledriveoauthmulticustom_auth_config_dict = googledriveoauthmulticustom_auth_config_instance.to_dict() +# create an instance of GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig from a dict +googledriveoauthmulticustom_auth_config_from_dict = GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig.from_dict(googledriveoauthmulticustom_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GOOGLEDRIVEOAUTHMULTICUSTOMConfig.md b/docs/GOOGLEDRIVEOAUTHMULTICUSTOMConfig.md new file mode 100644 index 0000000..e4155ec --- /dev/null +++ b/docs/GOOGLEDRIVEOAUTHMULTICUSTOMConfig.md @@ -0,0 +1,31 @@ +# GOOGLEDRIVEOAUTHMULTICUSTOMConfig + +Configuration for Google Drive Multi-User (White Label) connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**file_extensions** | **List[str]** | File Extensions | +**idle_time** | **float** | Polling Interval (seconds). Example: Enter polling interval in seconds | [optional] [default to 5] + +## Example + +```python +from vectorize_client.models.googledriveoauthmulticustom_config import GOOGLEDRIVEOAUTHMULTICUSTOMConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of GOOGLEDRIVEOAUTHMULTICUSTOMConfig from a JSON string +googledriveoauthmulticustom_config_instance = GOOGLEDRIVEOAUTHMULTICUSTOMConfig.from_json(json) +# print the JSON string representation of the object +print(GOOGLEDRIVEOAUTHMULTICUSTOMConfig.to_json()) + +# convert the object into a dict +googledriveoauthmulticustom_config_dict = googledriveoauthmulticustom_config_instance.to_dict() +# create an instance of GOOGLEDRIVEOAUTHMULTICUSTOMConfig from a dict +googledriveoauthmulticustom_config_from_dict = GOOGLEDRIVEOAUTHMULTICUSTOMConfig.from_dict(googledriveoauthmulticustom_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GOOGLEDRIVEOAUTHMULTIConfig.md b/docs/GOOGLEDRIVEOAUTHMULTIConfig.md new file mode 100644 index 0000000..110628b --- /dev/null +++ b/docs/GOOGLEDRIVEOAUTHMULTIConfig.md @@ -0,0 +1,31 @@ +# GOOGLEDRIVEOAUTHMULTIConfig + +Configuration for Google Drive Multi-User (Vectorize) connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**file_extensions** | **List[str]** | File Extensions | +**idle_time** | **float** | Polling Interval (seconds). Example: Enter polling interval in seconds | [optional] [default to 5] + +## Example + +```python +from vectorize_client.models.googledriveoauthmulti_config import GOOGLEDRIVEOAUTHMULTIConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of GOOGLEDRIVEOAUTHMULTIConfig from a JSON string +googledriveoauthmulti_config_instance = GOOGLEDRIVEOAUTHMULTIConfig.from_json(json) +# print the JSON string representation of the object +print(GOOGLEDRIVEOAUTHMULTIConfig.to_json()) + +# convert the object into a dict +googledriveoauthmulti_config_dict = googledriveoauthmulti_config_instance.to_dict() +# create an instance of GOOGLEDRIVEOAUTHMULTIConfig from a dict +googledriveoauthmulti_config_from_dict = GOOGLEDRIVEOAUTHMULTIConfig.from_dict(googledriveoauthmulti_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GetAIPlatformConnectors200Response.md b/docs/GetAIPlatformConnectors200Response.md new file mode 100644 index 0000000..b4a923e --- /dev/null +++ b/docs/GetAIPlatformConnectors200Response.md @@ -0,0 +1,29 @@ +# GetAIPlatformConnectors200Response + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ai_platform_connectors** | [**List[AIPlatform]**](AIPlatform.md) | | + +## Example + +```python +from vectorize_client.models.get_ai_platform_connectors200_response import GetAIPlatformConnectors200Response + +# TODO update the JSON string below +json = "{}" +# create an instance of GetAIPlatformConnectors200Response from a JSON string +get_ai_platform_connectors200_response_instance = GetAIPlatformConnectors200Response.from_json(json) +# print the JSON string representation of the object +print(GetAIPlatformConnectors200Response.to_json()) + +# convert the object into a dict +get_ai_platform_connectors200_response_dict = get_ai_platform_connectors200_response_instance.to_dict() +# create an instance of GetAIPlatformConnectors200Response from a dict +get_ai_platform_connectors200_response_from_dict = GetAIPlatformConnectors200Response.from_dict(get_ai_platform_connectors200_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GetDeepResearchResponse.md b/docs/GetDeepResearchResponse.md new file mode 100644 index 0000000..6d22f6e --- /dev/null +++ b/docs/GetDeepResearchResponse.md @@ -0,0 +1,30 @@ +# GetDeepResearchResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ready** | **bool** | | +**data** | [**DeepResearchResult**](DeepResearchResult.md) | | [optional] + +## Example + +```python +from vectorize_client.models.get_deep_research_response import GetDeepResearchResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of GetDeepResearchResponse from a JSON string +get_deep_research_response_instance = GetDeepResearchResponse.from_json(json) +# print the JSON string representation of the object +print(GetDeepResearchResponse.to_json()) + +# convert the object into a dict +get_deep_research_response_dict = get_deep_research_response_instance.to_dict() +# create an instance of GetDeepResearchResponse from a dict +get_deep_research_response_from_dict = GetDeepResearchResponse.from_dict(get_deep_research_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GetDestinationConnectors200Response.md b/docs/GetDestinationConnectors200Response.md new file mode 100644 index 0000000..a02bf9f --- /dev/null +++ b/docs/GetDestinationConnectors200Response.md @@ -0,0 +1,29 @@ +# GetDestinationConnectors200Response + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**destination_connectors** | [**List[DestinationConnector]**](DestinationConnector.md) | | + +## Example + +```python +from vectorize_client.models.get_destination_connectors200_response import GetDestinationConnectors200Response + +# TODO update the JSON string below +json = "{}" +# create an instance of GetDestinationConnectors200Response from a JSON string +get_destination_connectors200_response_instance = GetDestinationConnectors200Response.from_json(json) +# print the JSON string representation of the object +print(GetDestinationConnectors200Response.to_json()) + +# convert the object into a dict +get_destination_connectors200_response_dict = get_destination_connectors200_response_instance.to_dict() +# create an instance of GetDestinationConnectors200Response from a dict +get_destination_connectors200_response_from_dict = GetDestinationConnectors200Response.from_dict(get_destination_connectors200_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GetPipelineEventsResponse.md b/docs/GetPipelineEventsResponse.md new file mode 100644 index 0000000..f738951 --- /dev/null +++ b/docs/GetPipelineEventsResponse.md @@ -0,0 +1,31 @@ +# GetPipelineEventsResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | +**next_token** | **str** | | [optional] +**data** | [**List[PipelineEvents]**](PipelineEvents.md) | | + +## Example + +```python +from vectorize_client.models.get_pipeline_events_response import GetPipelineEventsResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of GetPipelineEventsResponse from a JSON string +get_pipeline_events_response_instance = GetPipelineEventsResponse.from_json(json) +# print the JSON string representation of the object +print(GetPipelineEventsResponse.to_json()) + +# convert the object into a dict +get_pipeline_events_response_dict = get_pipeline_events_response_instance.to_dict() +# create an instance of GetPipelineEventsResponse from a dict +get_pipeline_events_response_from_dict = GetPipelineEventsResponse.from_dict(get_pipeline_events_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GetPipelineMetricsResponse.md b/docs/GetPipelineMetricsResponse.md new file mode 100644 index 0000000..663dd4a --- /dev/null +++ b/docs/GetPipelineMetricsResponse.md @@ -0,0 +1,30 @@ +# GetPipelineMetricsResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | +**data** | [**List[PipelineMetrics]**](PipelineMetrics.md) | | + +## Example + +```python +from vectorize_client.models.get_pipeline_metrics_response import GetPipelineMetricsResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of GetPipelineMetricsResponse from a JSON string +get_pipeline_metrics_response_instance = GetPipelineMetricsResponse.from_json(json) +# print the JSON string representation of the object +print(GetPipelineMetricsResponse.to_json()) + +# convert the object into a dict +get_pipeline_metrics_response_dict = get_pipeline_metrics_response_instance.to_dict() +# create an instance of GetPipelineMetricsResponse from a dict +get_pipeline_metrics_response_from_dict = GetPipelineMetricsResponse.from_dict(get_pipeline_metrics_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GetPipelineResponse.md b/docs/GetPipelineResponse.md new file mode 100644 index 0000000..f7bdbaa --- /dev/null +++ b/docs/GetPipelineResponse.md @@ -0,0 +1,30 @@ +# GetPipelineResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | +**data** | [**PipelineSummary**](PipelineSummary.md) | | + +## Example + +```python +from vectorize_client.models.get_pipeline_response import GetPipelineResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of GetPipelineResponse from a JSON string +get_pipeline_response_instance = GetPipelineResponse.from_json(json) +# print the JSON string representation of the object +print(GetPipelineResponse.to_json()) + +# convert the object into a dict +get_pipeline_response_dict = get_pipeline_response_instance.to_dict() +# create an instance of GetPipelineResponse from a dict +get_pipeline_response_from_dict = GetPipelineResponse.from_dict(get_pipeline_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GetPipelines400Response.md b/docs/GetPipelines400Response.md new file mode 100644 index 0000000..a6bf197 --- /dev/null +++ b/docs/GetPipelines400Response.md @@ -0,0 +1,32 @@ +# GetPipelines400Response + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**error** | **str** | | +**details** | **str** | | [optional] +**failed_updates** | **List[str]** | | [optional] +**successful_updates** | **List[str]** | | [optional] + +## Example + +```python +from vectorize_client.models.get_pipelines400_response import GetPipelines400Response + +# TODO update the JSON string below +json = "{}" +# create an instance of GetPipelines400Response from a JSON string +get_pipelines400_response_instance = GetPipelines400Response.from_json(json) +# print the JSON string representation of the object +print(GetPipelines400Response.to_json()) + +# convert the object into a dict +get_pipelines400_response_dict = get_pipelines400_response_instance.to_dict() +# create an instance of GetPipelines400Response from a dict +get_pipelines400_response_from_dict = GetPipelines400Response.from_dict(get_pipelines400_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GetPipelinesResponse.md b/docs/GetPipelinesResponse.md new file mode 100644 index 0000000..190abaf --- /dev/null +++ b/docs/GetPipelinesResponse.md @@ -0,0 +1,30 @@ +# GetPipelinesResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | +**data** | [**List[PipelineListSummary]**](PipelineListSummary.md) | | + +## Example + +```python +from vectorize_client.models.get_pipelines_response import GetPipelinesResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of GetPipelinesResponse from a JSON string +get_pipelines_response_instance = GetPipelinesResponse.from_json(json) +# print the JSON string representation of the object +print(GetPipelinesResponse.to_json()) + +# convert the object into a dict +get_pipelines_response_dict = get_pipelines_response_instance.to_dict() +# create an instance of GetPipelinesResponse from a dict +get_pipelines_response_from_dict = GetPipelinesResponse.from_dict(get_pipelines_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GetSourceConnectors200Response.md b/docs/GetSourceConnectors200Response.md new file mode 100644 index 0000000..485f7f2 --- /dev/null +++ b/docs/GetSourceConnectors200Response.md @@ -0,0 +1,29 @@ +# GetSourceConnectors200Response + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**source_connectors** | [**List[SourceConnector]**](SourceConnector.md) | | + +## Example + +```python +from vectorize_client.models.get_source_connectors200_response import GetSourceConnectors200Response + +# TODO update the JSON string below +json = "{}" +# create an instance of GetSourceConnectors200Response from a JSON string +get_source_connectors200_response_instance = GetSourceConnectors200Response.from_json(json) +# print the JSON string representation of the object +print(GetSourceConnectors200Response.to_json()) + +# convert the object into a dict +get_source_connectors200_response_dict = get_source_connectors200_response_instance.to_dict() +# create an instance of GetSourceConnectors200Response from a dict +get_source_connectors200_response_from_dict = GetSourceConnectors200Response.from_dict(get_source_connectors200_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GetUploadFilesResponse.md b/docs/GetUploadFilesResponse.md new file mode 100644 index 0000000..849dcab --- /dev/null +++ b/docs/GetUploadFilesResponse.md @@ -0,0 +1,30 @@ +# GetUploadFilesResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | +**files** | [**List[UploadFile]**](UploadFile.md) | | + +## Example + +```python +from vectorize_client.models.get_upload_files_response import GetUploadFilesResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of GetUploadFilesResponse from a JSON string +get_upload_files_response_instance = GetUploadFilesResponse.from_json(json) +# print the JSON string representation of the object +print(GetUploadFilesResponse.to_json()) + +# convert the object into a dict +get_upload_files_response_dict = get_upload_files_response_instance.to_dict() +# create an instance of GetUploadFilesResponse from a dict +get_upload_files_response_from_dict = GetUploadFilesResponse.from_dict(get_upload_files_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Github.md b/docs/Github.md new file mode 100644 index 0000000..fe77e2c --- /dev/null +++ b/docs/Github.md @@ -0,0 +1,31 @@ +# Github + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"GITHUB\") | +**config** | [**GITHUBConfig**](GITHUBConfig.md) | | + +## Example + +```python +from vectorize_client.models.github import Github + +# TODO update the JSON string below +json = "{}" +# create an instance of Github from a JSON string +github_instance = Github.from_json(json) +# print the JSON string representation of the object +print(Github.to_json()) + +# convert the object into a dict +github_dict = github_instance.to_dict() +# create an instance of Github from a dict +github_from_dict = Github.from_dict(github_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Github1.md b/docs/Github1.md new file mode 100644 index 0000000..4ea1dde --- /dev/null +++ b/docs/Github1.md @@ -0,0 +1,29 @@ +# Github1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**GITHUBConfig**](GITHUBConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.github1 import Github1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Github1 from a JSON string +github1_instance = Github1.from_json(json) +# print the JSON string representation of the object +print(Github1.to_json()) + +# convert the object into a dict +github1_dict = github1_instance.to_dict() +# create an instance of Github1 from a dict +github1_from_dict = Github1.from_dict(github1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Github2.md b/docs/Github2.md new file mode 100644 index 0000000..2aa0363 --- /dev/null +++ b/docs/Github2.md @@ -0,0 +1,30 @@ +# Github2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"GITHUB\") | + +## Example + +```python +from vectorize_client.models.github2 import Github2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Github2 from a JSON string +github2_instance = Github2.from_json(json) +# print the JSON string representation of the object +print(Github2.to_json()) + +# convert the object into a dict +github2_dict = github2_instance.to_dict() +# create an instance of Github2 from a dict +github2_from_dict = Github2.from_dict(github2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GoogleCloudStorage.md b/docs/GoogleCloudStorage.md new file mode 100644 index 0000000..b805f91 --- /dev/null +++ b/docs/GoogleCloudStorage.md @@ -0,0 +1,31 @@ +# GoogleCloudStorage + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"GCS\") | +**config** | [**GCSConfig**](GCSConfig.md) | | + +## Example + +```python +from vectorize_client.models.google_cloud_storage import GoogleCloudStorage + +# TODO update the JSON string below +json = "{}" +# create an instance of GoogleCloudStorage from a JSON string +google_cloud_storage_instance = GoogleCloudStorage.from_json(json) +# print the JSON string representation of the object +print(GoogleCloudStorage.to_json()) + +# convert the object into a dict +google_cloud_storage_dict = google_cloud_storage_instance.to_dict() +# create an instance of GoogleCloudStorage from a dict +google_cloud_storage_from_dict = GoogleCloudStorage.from_dict(google_cloud_storage_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GoogleCloudStorage1.md b/docs/GoogleCloudStorage1.md new file mode 100644 index 0000000..927c4d7 --- /dev/null +++ b/docs/GoogleCloudStorage1.md @@ -0,0 +1,29 @@ +# GoogleCloudStorage1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**GCSConfig**](GCSConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.google_cloud_storage1 import GoogleCloudStorage1 + +# TODO update the JSON string below +json = "{}" +# create an instance of GoogleCloudStorage1 from a JSON string +google_cloud_storage1_instance = GoogleCloudStorage1.from_json(json) +# print the JSON string representation of the object +print(GoogleCloudStorage1.to_json()) + +# convert the object into a dict +google_cloud_storage1_dict = google_cloud_storage1_instance.to_dict() +# create an instance of GoogleCloudStorage1 from a dict +google_cloud_storage1_from_dict = GoogleCloudStorage1.from_dict(google_cloud_storage1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GoogleCloudStorage2.md b/docs/GoogleCloudStorage2.md new file mode 100644 index 0000000..4d03bc7 --- /dev/null +++ b/docs/GoogleCloudStorage2.md @@ -0,0 +1,30 @@ +# GoogleCloudStorage2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"GCS\") | + +## Example + +```python +from vectorize_client.models.google_cloud_storage2 import GoogleCloudStorage2 + +# TODO update the JSON string below +json = "{}" +# create an instance of GoogleCloudStorage2 from a JSON string +google_cloud_storage2_instance = GoogleCloudStorage2.from_json(json) +# print the JSON string representation of the object +print(GoogleCloudStorage2.to_json()) + +# convert the object into a dict +google_cloud_storage2_dict = google_cloud_storage2_instance.to_dict() +# create an instance of GoogleCloudStorage2 from a dict +google_cloud_storage2_from_dict = GoogleCloudStorage2.from_dict(google_cloud_storage2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GoogleDrive.md b/docs/GoogleDrive.md new file mode 100644 index 0000000..f5b71c1 --- /dev/null +++ b/docs/GoogleDrive.md @@ -0,0 +1,31 @@ +# GoogleDrive + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"GOOGLE_DRIVE\") | +**config** | [**GOOGLEDRIVEConfig**](GOOGLEDRIVEConfig.md) | | + +## Example + +```python +from vectorize_client.models.google_drive import GoogleDrive + +# TODO update the JSON string below +json = "{}" +# create an instance of GoogleDrive from a JSON string +google_drive_instance = GoogleDrive.from_json(json) +# print the JSON string representation of the object +print(GoogleDrive.to_json()) + +# convert the object into a dict +google_drive_dict = google_drive_instance.to_dict() +# create an instance of GoogleDrive from a dict +google_drive_from_dict = GoogleDrive.from_dict(google_drive_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GoogleDrive1.md b/docs/GoogleDrive1.md new file mode 100644 index 0000000..d615b52 --- /dev/null +++ b/docs/GoogleDrive1.md @@ -0,0 +1,29 @@ +# GoogleDrive1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**GOOGLEDRIVEConfig**](GOOGLEDRIVEConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.google_drive1 import GoogleDrive1 + +# TODO update the JSON string below +json = "{}" +# create an instance of GoogleDrive1 from a JSON string +google_drive1_instance = GoogleDrive1.from_json(json) +# print the JSON string representation of the object +print(GoogleDrive1.to_json()) + +# convert the object into a dict +google_drive1_dict = google_drive1_instance.to_dict() +# create an instance of GoogleDrive1 from a dict +google_drive1_from_dict = GoogleDrive1.from_dict(google_drive1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GoogleDrive2.md b/docs/GoogleDrive2.md new file mode 100644 index 0000000..be315cc --- /dev/null +++ b/docs/GoogleDrive2.md @@ -0,0 +1,30 @@ +# GoogleDrive2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"GOOGLE_DRIVE\") | + +## Example + +```python +from vectorize_client.models.google_drive2 import GoogleDrive2 + +# TODO update the JSON string below +json = "{}" +# create an instance of GoogleDrive2 from a JSON string +google_drive2_instance = GoogleDrive2.from_json(json) +# print the JSON string representation of the object +print(GoogleDrive2.to_json()) + +# convert the object into a dict +google_drive2_dict = google_drive2_instance.to_dict() +# create an instance of GoogleDrive2 from a dict +google_drive2_from_dict = GoogleDrive2.from_dict(google_drive2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GoogleDriveOAuth.md b/docs/GoogleDriveOAuth.md new file mode 100644 index 0000000..1a0145f --- /dev/null +++ b/docs/GoogleDriveOAuth.md @@ -0,0 +1,31 @@ +# GoogleDriveOAuth + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"GOOGLE_DRIVE_OAUTH\") | +**config** | [**GOOGLEDRIVEOAUTHConfig**](GOOGLEDRIVEOAUTHConfig.md) | | + +## Example + +```python +from vectorize_client.models.google_drive_o_auth import GoogleDriveOAuth + +# TODO update the JSON string below +json = "{}" +# create an instance of GoogleDriveOAuth from a JSON string +google_drive_o_auth_instance = GoogleDriveOAuth.from_json(json) +# print the JSON string representation of the object +print(GoogleDriveOAuth.to_json()) + +# convert the object into a dict +google_drive_o_auth_dict = google_drive_o_auth_instance.to_dict() +# create an instance of GoogleDriveOAuth from a dict +google_drive_o_auth_from_dict = GoogleDriveOAuth.from_dict(google_drive_o_auth_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GoogleDriveOAuth1.md b/docs/GoogleDriveOAuth1.md new file mode 100644 index 0000000..1ffd744 --- /dev/null +++ b/docs/GoogleDriveOAuth1.md @@ -0,0 +1,29 @@ +# GoogleDriveOAuth1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**GOOGLEDRIVEOAUTHConfig**](GOOGLEDRIVEOAUTHConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.google_drive_o_auth1 import GoogleDriveOAuth1 + +# TODO update the JSON string below +json = "{}" +# create an instance of GoogleDriveOAuth1 from a JSON string +google_drive_o_auth1_instance = GoogleDriveOAuth1.from_json(json) +# print the JSON string representation of the object +print(GoogleDriveOAuth1.to_json()) + +# convert the object into a dict +google_drive_o_auth1_dict = google_drive_o_auth1_instance.to_dict() +# create an instance of GoogleDriveOAuth1 from a dict +google_drive_o_auth1_from_dict = GoogleDriveOAuth1.from_dict(google_drive_o_auth1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GoogleDriveOAuth2.md b/docs/GoogleDriveOAuth2.md new file mode 100644 index 0000000..d93e0e9 --- /dev/null +++ b/docs/GoogleDriveOAuth2.md @@ -0,0 +1,30 @@ +# GoogleDriveOAuth2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"GOOGLE_DRIVE_OAUTH\") | + +## Example + +```python +from vectorize_client.models.google_drive_o_auth2 import GoogleDriveOAuth2 + +# TODO update the JSON string below +json = "{}" +# create an instance of GoogleDriveOAuth2 from a JSON string +google_drive_o_auth2_instance = GoogleDriveOAuth2.from_json(json) +# print the JSON string representation of the object +print(GoogleDriveOAuth2.to_json()) + +# convert the object into a dict +google_drive_o_auth2_dict = google_drive_o_auth2_instance.to_dict() +# create an instance of GoogleDriveOAuth2 from a dict +google_drive_o_auth2_from_dict = GoogleDriveOAuth2.from_dict(google_drive_o_auth2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GoogleDriveOauthMulti.md b/docs/GoogleDriveOauthMulti.md new file mode 100644 index 0000000..03374b3 --- /dev/null +++ b/docs/GoogleDriveOauthMulti.md @@ -0,0 +1,31 @@ +# GoogleDriveOauthMulti + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"GOOGLE_DRIVE_OAUTH_MULTI\") | +**config** | [**GOOGLEDRIVEOAUTHMULTIConfig**](GOOGLEDRIVEOAUTHMULTIConfig.md) | | + +## Example + +```python +from vectorize_client.models.google_drive_oauth_multi import GoogleDriveOauthMulti + +# TODO update the JSON string below +json = "{}" +# create an instance of GoogleDriveOauthMulti from a JSON string +google_drive_oauth_multi_instance = GoogleDriveOauthMulti.from_json(json) +# print the JSON string representation of the object +print(GoogleDriveOauthMulti.to_json()) + +# convert the object into a dict +google_drive_oauth_multi_dict = google_drive_oauth_multi_instance.to_dict() +# create an instance of GoogleDriveOauthMulti from a dict +google_drive_oauth_multi_from_dict = GoogleDriveOauthMulti.from_dict(google_drive_oauth_multi_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GoogleDriveOauthMulti1.md b/docs/GoogleDriveOauthMulti1.md new file mode 100644 index 0000000..371b3db --- /dev/null +++ b/docs/GoogleDriveOauthMulti1.md @@ -0,0 +1,29 @@ +# GoogleDriveOauthMulti1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**GOOGLEDRIVEOAUTHMULTIConfig**](GOOGLEDRIVEOAUTHMULTIConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.google_drive_oauth_multi1 import GoogleDriveOauthMulti1 + +# TODO update the JSON string below +json = "{}" +# create an instance of GoogleDriveOauthMulti1 from a JSON string +google_drive_oauth_multi1_instance = GoogleDriveOauthMulti1.from_json(json) +# print the JSON string representation of the object +print(GoogleDriveOauthMulti1.to_json()) + +# convert the object into a dict +google_drive_oauth_multi1_dict = google_drive_oauth_multi1_instance.to_dict() +# create an instance of GoogleDriveOauthMulti1 from a dict +google_drive_oauth_multi1_from_dict = GoogleDriveOauthMulti1.from_dict(google_drive_oauth_multi1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GoogleDriveOauthMulti2.md b/docs/GoogleDriveOauthMulti2.md new file mode 100644 index 0000000..927bd94 --- /dev/null +++ b/docs/GoogleDriveOauthMulti2.md @@ -0,0 +1,30 @@ +# GoogleDriveOauthMulti2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"GOOGLE_DRIVE_OAUTH_MULTI\") | + +## Example + +```python +from vectorize_client.models.google_drive_oauth_multi2 import GoogleDriveOauthMulti2 + +# TODO update the JSON string below +json = "{}" +# create an instance of GoogleDriveOauthMulti2 from a JSON string +google_drive_oauth_multi2_instance = GoogleDriveOauthMulti2.from_json(json) +# print the JSON string representation of the object +print(GoogleDriveOauthMulti2.to_json()) + +# convert the object into a dict +google_drive_oauth_multi2_dict = google_drive_oauth_multi2_instance.to_dict() +# create an instance of GoogleDriveOauthMulti2 from a dict +google_drive_oauth_multi2_from_dict = GoogleDriveOauthMulti2.from_dict(google_drive_oauth_multi2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GoogleDriveOauthMultiCustom.md b/docs/GoogleDriveOauthMultiCustom.md new file mode 100644 index 0000000..138ae3d --- /dev/null +++ b/docs/GoogleDriveOauthMultiCustom.md @@ -0,0 +1,31 @@ +# GoogleDriveOauthMultiCustom + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM\") | +**config** | [**GOOGLEDRIVEOAUTHMULTICUSTOMConfig**](GOOGLEDRIVEOAUTHMULTICUSTOMConfig.md) | | + +## Example + +```python +from vectorize_client.models.google_drive_oauth_multi_custom import GoogleDriveOauthMultiCustom + +# TODO update the JSON string below +json = "{}" +# create an instance of GoogleDriveOauthMultiCustom from a JSON string +google_drive_oauth_multi_custom_instance = GoogleDriveOauthMultiCustom.from_json(json) +# print the JSON string representation of the object +print(GoogleDriveOauthMultiCustom.to_json()) + +# convert the object into a dict +google_drive_oauth_multi_custom_dict = google_drive_oauth_multi_custom_instance.to_dict() +# create an instance of GoogleDriveOauthMultiCustom from a dict +google_drive_oauth_multi_custom_from_dict = GoogleDriveOauthMultiCustom.from_dict(google_drive_oauth_multi_custom_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GoogleDriveOauthMultiCustom1.md b/docs/GoogleDriveOauthMultiCustom1.md new file mode 100644 index 0000000..e14a1b0 --- /dev/null +++ b/docs/GoogleDriveOauthMultiCustom1.md @@ -0,0 +1,29 @@ +# GoogleDriveOauthMultiCustom1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**GOOGLEDRIVEOAUTHMULTICUSTOMConfig**](GOOGLEDRIVEOAUTHMULTICUSTOMConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.google_drive_oauth_multi_custom1 import GoogleDriveOauthMultiCustom1 + +# TODO update the JSON string below +json = "{}" +# create an instance of GoogleDriveOauthMultiCustom1 from a JSON string +google_drive_oauth_multi_custom1_instance = GoogleDriveOauthMultiCustom1.from_json(json) +# print the JSON string representation of the object +print(GoogleDriveOauthMultiCustom1.to_json()) + +# convert the object into a dict +google_drive_oauth_multi_custom1_dict = google_drive_oauth_multi_custom1_instance.to_dict() +# create an instance of GoogleDriveOauthMultiCustom1 from a dict +google_drive_oauth_multi_custom1_from_dict = GoogleDriveOauthMultiCustom1.from_dict(google_drive_oauth_multi_custom1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/GoogleDriveOauthMultiCustom2.md b/docs/GoogleDriveOauthMultiCustom2.md new file mode 100644 index 0000000..8d20d4f --- /dev/null +++ b/docs/GoogleDriveOauthMultiCustom2.md @@ -0,0 +1,30 @@ +# GoogleDriveOauthMultiCustom2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM\") | + +## Example + +```python +from vectorize_client.models.google_drive_oauth_multi_custom2 import GoogleDriveOauthMultiCustom2 + +# TODO update the JSON string below +json = "{}" +# create an instance of GoogleDriveOauthMultiCustom2 from a JSON string +google_drive_oauth_multi_custom2_instance = GoogleDriveOauthMultiCustom2.from_json(json) +# print the JSON string representation of the object +print(GoogleDriveOauthMultiCustom2.to_json()) + +# convert the object into a dict +google_drive_oauth_multi_custom2_dict = google_drive_oauth_multi_custom2_instance.to_dict() +# create an instance of GoogleDriveOauthMultiCustom2 from a dict +google_drive_oauth_multi_custom2_from_dict = GoogleDriveOauthMultiCustom2.from_dict(google_drive_oauth_multi_custom2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/INTERCOMAuthConfig.md b/docs/INTERCOMAuthConfig.md new file mode 100644 index 0000000..d1292b1 --- /dev/null +++ b/docs/INTERCOMAuthConfig.md @@ -0,0 +1,31 @@ +# INTERCOMAuthConfig + +Authentication configuration for Intercom + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**token** | **str** | Access Token. Example: Authorize Intercom Access | + +## Example + +```python +from vectorize_client.models.intercom_auth_config import INTERCOMAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of INTERCOMAuthConfig from a JSON string +intercom_auth_config_instance = INTERCOMAuthConfig.from_json(json) +# print the JSON string representation of the object +print(INTERCOMAuthConfig.to_json()) + +# convert the object into a dict +intercom_auth_config_dict = intercom_auth_config_instance.to_dict() +# create an instance of INTERCOMAuthConfig from a dict +intercom_auth_config_from_dict = INTERCOMAuthConfig.from_dict(intercom_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/INTERCOMConfig.md b/docs/INTERCOMConfig.md new file mode 100644 index 0000000..9744de9 --- /dev/null +++ b/docs/INTERCOMConfig.md @@ -0,0 +1,32 @@ +# INTERCOMConfig + +Configuration for Intercom connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**created_at** | **date** | Created After. Filter for conversation created after this date. Example: Enter a date: Example 2012-12-31 | +**updated_at** | **date** | Updated After. Filter for conversation updated after this date. Example: Enter a date: Example 2012-12-31 | [optional] +**state** | **List[str]** | State | [optional] + +## Example + +```python +from vectorize_client.models.intercom_config import INTERCOMConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of INTERCOMConfig from a JSON string +intercom_config_instance = INTERCOMConfig.from_json(json) +# print the JSON string representation of the object +print(INTERCOMConfig.to_json()) + +# convert the object into a dict +intercom_config_dict = intercom_config_instance.to_dict() +# create an instance of INTERCOMConfig from a dict +intercom_config_from_dict = INTERCOMConfig.from_dict(intercom_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Intercom.md b/docs/Intercom.md new file mode 100644 index 0000000..89a4732 --- /dev/null +++ b/docs/Intercom.md @@ -0,0 +1,31 @@ +# Intercom + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"INTERCOM\") | +**config** | [**INTERCOMConfig**](INTERCOMConfig.md) | | + +## Example + +```python +from vectorize_client.models.intercom import Intercom + +# TODO update the JSON string below +json = "{}" +# create an instance of Intercom from a JSON string +intercom_instance = Intercom.from_json(json) +# print the JSON string representation of the object +print(Intercom.to_json()) + +# convert the object into a dict +intercom_dict = intercom_instance.to_dict() +# create an instance of Intercom from a dict +intercom_from_dict = Intercom.from_dict(intercom_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Intercom1.md b/docs/Intercom1.md new file mode 100644 index 0000000..0f97340 --- /dev/null +++ b/docs/Intercom1.md @@ -0,0 +1,29 @@ +# Intercom1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**INTERCOMConfig**](INTERCOMConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.intercom1 import Intercom1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Intercom1 from a JSON string +intercom1_instance = Intercom1.from_json(json) +# print the JSON string representation of the object +print(Intercom1.to_json()) + +# convert the object into a dict +intercom1_dict = intercom1_instance.to_dict() +# create an instance of Intercom1 from a dict +intercom1_from_dict = Intercom1.from_dict(intercom1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Intercom2.md b/docs/Intercom2.md new file mode 100644 index 0000000..f94ed02 --- /dev/null +++ b/docs/Intercom2.md @@ -0,0 +1,30 @@ +# Intercom2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"INTERCOM\") | + +## Example + +```python +from vectorize_client.models.intercom2 import Intercom2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Intercom2 from a JSON string +intercom2_instance = Intercom2.from_json(json) +# print the JSON string representation of the object +print(Intercom2.to_json()) + +# convert the object into a dict +intercom2_dict = intercom2_instance.to_dict() +# create an instance of Intercom2 from a dict +intercom2_from_dict = Intercom2.from_dict(intercom2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/MILVUSAuthConfig.md b/docs/MILVUSAuthConfig.md new file mode 100644 index 0000000..1975d62 --- /dev/null +++ b/docs/MILVUSAuthConfig.md @@ -0,0 +1,34 @@ +# MILVUSAuthConfig + +Authentication configuration for Milvus + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for your Milvus integration | +**url** | **str** | Public Endpoint. Example: Enter your public endpoint for your Milvus cluster | +**token** | **str** | Token. Example: Enter your cluster token or Username/Password | [optional] +**username** | **str** | Username. Example: Enter your cluster Username | [optional] +**password** | **str** | Password. Example: Enter your cluster Password | [optional] + +## Example + +```python +from vectorize_client.models.milvus_auth_config import MILVUSAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of MILVUSAuthConfig from a JSON string +milvus_auth_config_instance = MILVUSAuthConfig.from_json(json) +# print the JSON string representation of the object +print(MILVUSAuthConfig.to_json()) + +# convert the object into a dict +milvus_auth_config_dict = milvus_auth_config_instance.to_dict() +# create an instance of MILVUSAuthConfig from a dict +milvus_auth_config_from_dict = MILVUSAuthConfig.from_dict(milvus_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/MILVUSConfig.md b/docs/MILVUSConfig.md new file mode 100644 index 0000000..e3298ff --- /dev/null +++ b/docs/MILVUSConfig.md @@ -0,0 +1,30 @@ +# MILVUSConfig + +Configuration for Milvus connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**collection** | **str** | Collection Name. Example: Enter collection name | + +## Example + +```python +from vectorize_client.models.milvus_config import MILVUSConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of MILVUSConfig from a JSON string +milvus_config_instance = MILVUSConfig.from_json(json) +# print the JSON string representation of the object +print(MILVUSConfig.to_json()) + +# convert the object into a dict +milvus_config_dict = milvus_config_instance.to_dict() +# create an instance of MILVUSConfig from a dict +milvus_config_from_dict = MILVUSConfig.from_dict(milvus_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/MetadataExtractionStrategy.md b/docs/MetadataExtractionStrategy.md new file mode 100644 index 0000000..4cbf77b --- /dev/null +++ b/docs/MetadataExtractionStrategy.md @@ -0,0 +1,30 @@ +# MetadataExtractionStrategy + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**schemas** | [**List[MetadataExtractionStrategySchema]**](MetadataExtractionStrategySchema.md) | | [optional] +**infer_schema** | **bool** | | [optional] + +## Example + +```python +from vectorize_client.models.metadata_extraction_strategy import MetadataExtractionStrategy + +# TODO update the JSON string below +json = "{}" +# create an instance of MetadataExtractionStrategy from a JSON string +metadata_extraction_strategy_instance = MetadataExtractionStrategy.from_json(json) +# print the JSON string representation of the object +print(MetadataExtractionStrategy.to_json()) + +# convert the object into a dict +metadata_extraction_strategy_dict = metadata_extraction_strategy_instance.to_dict() +# create an instance of MetadataExtractionStrategy from a dict +metadata_extraction_strategy_from_dict = MetadataExtractionStrategy.from_dict(metadata_extraction_strategy_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/MetadataExtractionStrategySchema.md b/docs/MetadataExtractionStrategySchema.md new file mode 100644 index 0000000..cb237b3 --- /dev/null +++ b/docs/MetadataExtractionStrategySchema.md @@ -0,0 +1,30 @@ +# MetadataExtractionStrategySchema + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | +**var_schema** | **str** | | + +## Example + +```python +from vectorize_client.models.metadata_extraction_strategy_schema import MetadataExtractionStrategySchema + +# TODO update the JSON string below +json = "{}" +# create an instance of MetadataExtractionStrategySchema from a JSON string +metadata_extraction_strategy_schema_instance = MetadataExtractionStrategySchema.from_json(json) +# print the JSON string representation of the object +print(MetadataExtractionStrategySchema.to_json()) + +# convert the object into a dict +metadata_extraction_strategy_schema_dict = metadata_extraction_strategy_schema_instance.to_dict() +# create an instance of MetadataExtractionStrategySchema from a dict +metadata_extraction_strategy_schema_from_dict = MetadataExtractionStrategySchema.from_dict(metadata_extraction_strategy_schema_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Milvus.md b/docs/Milvus.md new file mode 100644 index 0000000..1ccfa28 --- /dev/null +++ b/docs/Milvus.md @@ -0,0 +1,31 @@ +# Milvus + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"MILVUS\") | +**config** | [**MILVUSConfig**](MILVUSConfig.md) | | + +## Example + +```python +from vectorize_client.models.milvus import Milvus + +# TODO update the JSON string below +json = "{}" +# create an instance of Milvus from a JSON string +milvus_instance = Milvus.from_json(json) +# print the JSON string representation of the object +print(Milvus.to_json()) + +# convert the object into a dict +milvus_dict = milvus_instance.to_dict() +# create an instance of Milvus from a dict +milvus_from_dict = Milvus.from_dict(milvus_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Milvus1.md b/docs/Milvus1.md new file mode 100644 index 0000000..c6f8dfa --- /dev/null +++ b/docs/Milvus1.md @@ -0,0 +1,29 @@ +# Milvus1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**MILVUSConfig**](MILVUSConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.milvus1 import Milvus1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Milvus1 from a JSON string +milvus1_instance = Milvus1.from_json(json) +# print the JSON string representation of the object +print(Milvus1.to_json()) + +# convert the object into a dict +milvus1_dict = milvus1_instance.to_dict() +# create an instance of Milvus1 from a dict +milvus1_from_dict = Milvus1.from_dict(milvus1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Milvus2.md b/docs/Milvus2.md new file mode 100644 index 0000000..e69f8cd --- /dev/null +++ b/docs/Milvus2.md @@ -0,0 +1,30 @@ +# Milvus2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"MILVUS\") | + +## Example + +```python +from vectorize_client.models.milvus2 import Milvus2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Milvus2 from a JSON string +milvus2_instance = Milvus2.from_json(json) +# print the JSON string representation of the object +print(Milvus2.to_json()) + +# convert the object into a dict +milvus2_dict = milvus2_instance.to_dict() +# create an instance of Milvus2 from a dict +milvus2_from_dict = Milvus2.from_dict(milvus2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/N8NConfig.md b/docs/N8NConfig.md new file mode 100644 index 0000000..e1d6656 --- /dev/null +++ b/docs/N8NConfig.md @@ -0,0 +1,31 @@ +# N8NConfig + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**account** | **str** | | +**webhook_path** | **str** | | +**headers** | **Dict[str, str]** | | [optional] + +## Example + +```python +from vectorize_client.models.n8_n_config import N8NConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of N8NConfig from a JSON string +n8_n_config_instance = N8NConfig.from_json(json) +# print the JSON string representation of the object +print(N8NConfig.to_json()) + +# convert the object into a dict +n8_n_config_dict = n8_n_config_instance.to_dict() +# create an instance of N8NConfig from a dict +n8_n_config_from_dict = N8NConfig.from_dict(n8_n_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/NOTIONAuthConfig.md b/docs/NOTIONAuthConfig.md new file mode 100644 index 0000000..62e56ec --- /dev/null +++ b/docs/NOTIONAuthConfig.md @@ -0,0 +1,33 @@ +# NOTIONAuthConfig + +Authentication configuration for Notion + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**access_token** | **str** | Connect Notion to Vectorize - Note this will effect existing connections. test. Example: Authorize | +**s3id** | **str** | | [optional] +**edited_token** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.notion_auth_config import NOTIONAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of NOTIONAuthConfig from a JSON string +notion_auth_config_instance = NOTIONAuthConfig.from_json(json) +# print the JSON string representation of the object +print(NOTIONAuthConfig.to_json()) + +# convert the object into a dict +notion_auth_config_dict = notion_auth_config_instance.to_dict() +# create an instance of NOTIONAuthConfig from a dict +notion_auth_config_from_dict = NOTIONAuthConfig.from_dict(notion_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/NOTIONConfig.md b/docs/NOTIONConfig.md new file mode 100644 index 0000000..68f5d0a --- /dev/null +++ b/docs/NOTIONConfig.md @@ -0,0 +1,34 @@ +# NOTIONConfig + +Configuration for Notion connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**select_resources** | **str** | Select Notion Resources | +**database_ids** | **str** | Database IDs | +**database_names** | **str** | Database Names | +**page_ids** | **str** | Page IDs | +**page_names** | **str** | Page Names | + +## Example + +```python +from vectorize_client.models.notion_config import NOTIONConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of NOTIONConfig from a JSON string +notion_config_instance = NOTIONConfig.from_json(json) +# print the JSON string representation of the object +print(NOTIONConfig.to_json()) + +# convert the object into a dict +notion_config_dict = notion_config_instance.to_dict() +# create an instance of NOTIONConfig from a dict +notion_config_from_dict = NOTIONConfig.from_dict(notion_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/NOTIONOAUTHMULTIAuthConfig.md b/docs/NOTIONOAUTHMULTIAuthConfig.md new file mode 100644 index 0000000..dcd2610 --- /dev/null +++ b/docs/NOTIONOAUTHMULTIAuthConfig.md @@ -0,0 +1,33 @@ +# NOTIONOAUTHMULTIAuthConfig + +Authentication configuration for Notion Multi-User (Vectorize) + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**authorized_users** | **str** | Authorized Users. Users who have authorized access to their Notion content | [optional] +**edited_users** | **str** | | [optional] +**deleted_users** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.notionoauthmulti_auth_config import NOTIONOAUTHMULTIAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of NOTIONOAUTHMULTIAuthConfig from a JSON string +notionoauthmulti_auth_config_instance = NOTIONOAUTHMULTIAuthConfig.from_json(json) +# print the JSON string representation of the object +print(NOTIONOAUTHMULTIAuthConfig.to_json()) + +# convert the object into a dict +notionoauthmulti_auth_config_dict = notionoauthmulti_auth_config_instance.to_dict() +# create an instance of NOTIONOAUTHMULTIAuthConfig from a dict +notionoauthmulti_auth_config_from_dict = NOTIONOAUTHMULTIAuthConfig.from_dict(notionoauthmulti_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/NOTIONOAUTHMULTICUSTOMAuthConfig.md b/docs/NOTIONOAUTHMULTICUSTOMAuthConfig.md new file mode 100644 index 0000000..b9edec0 --- /dev/null +++ b/docs/NOTIONOAUTHMULTICUSTOMAuthConfig.md @@ -0,0 +1,35 @@ +# NOTIONOAUTHMULTICUSTOMAuthConfig + +Authentication configuration for Notion Multi-User (White Label) + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**client_id** | **str** | Notion Client ID. Example: Enter Client ID | +**client_secret** | **str** | Notion Client Secret. Example: Enter Client Secret | +**authorized_users** | **str** | Authorized Users | [optional] +**edited_users** | **str** | | [optional] +**deleted_users** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.notionoauthmulticustom_auth_config import NOTIONOAUTHMULTICUSTOMAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of NOTIONOAUTHMULTICUSTOMAuthConfig from a JSON string +notionoauthmulticustom_auth_config_instance = NOTIONOAUTHMULTICUSTOMAuthConfig.from_json(json) +# print the JSON string representation of the object +print(NOTIONOAUTHMULTICUSTOMAuthConfig.to_json()) + +# convert the object into a dict +notionoauthmulticustom_auth_config_dict = notionoauthmulticustom_auth_config_instance.to_dict() +# create an instance of NOTIONOAUTHMULTICUSTOMAuthConfig from a dict +notionoauthmulticustom_auth_config_from_dict = NOTIONOAUTHMULTICUSTOMAuthConfig.from_dict(notionoauthmulticustom_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Notion.md b/docs/Notion.md new file mode 100644 index 0000000..f900613 --- /dev/null +++ b/docs/Notion.md @@ -0,0 +1,31 @@ +# Notion + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"NOTION\") | +**config** | [**NOTIONConfig**](NOTIONConfig.md) | | + +## Example + +```python +from vectorize_client.models.notion import Notion + +# TODO update the JSON string below +json = "{}" +# create an instance of Notion from a JSON string +notion_instance = Notion.from_json(json) +# print the JSON string representation of the object +print(Notion.to_json()) + +# convert the object into a dict +notion_dict = notion_instance.to_dict() +# create an instance of Notion from a dict +notion_from_dict = Notion.from_dict(notion_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Notion1.md b/docs/Notion1.md new file mode 100644 index 0000000..db7e677 --- /dev/null +++ b/docs/Notion1.md @@ -0,0 +1,29 @@ +# Notion1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**NOTIONConfig**](NOTIONConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.notion1 import Notion1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Notion1 from a JSON string +notion1_instance = Notion1.from_json(json) +# print the JSON string representation of the object +print(Notion1.to_json()) + +# convert the object into a dict +notion1_dict = notion1_instance.to_dict() +# create an instance of Notion1 from a dict +notion1_from_dict = Notion1.from_dict(notion1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Notion2.md b/docs/Notion2.md new file mode 100644 index 0000000..cd8b0c2 --- /dev/null +++ b/docs/Notion2.md @@ -0,0 +1,30 @@ +# Notion2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"NOTION\") | + +## Example + +```python +from vectorize_client.models.notion2 import Notion2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Notion2 from a JSON string +notion2_instance = Notion2.from_json(json) +# print the JSON string representation of the object +print(Notion2.to_json()) + +# convert the object into a dict +notion2_dict = notion2_instance.to_dict() +# create an instance of Notion2 from a dict +notion2_from_dict = Notion2.from_dict(notion2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/NotionOauthMulti.md b/docs/NotionOauthMulti.md new file mode 100644 index 0000000..a580908 --- /dev/null +++ b/docs/NotionOauthMulti.md @@ -0,0 +1,31 @@ +# NotionOauthMulti + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"NOTION_OAUTH_MULTI\") | +**config** | [**NOTIONOAUTHMULTIAuthConfig**](NOTIONOAUTHMULTIAuthConfig.md) | | + +## Example + +```python +from vectorize_client.models.notion_oauth_multi import NotionOauthMulti + +# TODO update the JSON string below +json = "{}" +# create an instance of NotionOauthMulti from a JSON string +notion_oauth_multi_instance = NotionOauthMulti.from_json(json) +# print the JSON string representation of the object +print(NotionOauthMulti.to_json()) + +# convert the object into a dict +notion_oauth_multi_dict = notion_oauth_multi_instance.to_dict() +# create an instance of NotionOauthMulti from a dict +notion_oauth_multi_from_dict = NotionOauthMulti.from_dict(notion_oauth_multi_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/NotionOauthMulti1.md b/docs/NotionOauthMulti1.md new file mode 100644 index 0000000..483b2ae --- /dev/null +++ b/docs/NotionOauthMulti1.md @@ -0,0 +1,29 @@ +# NotionOauthMulti1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**NOTIONOAUTHMULTIAuthConfig**](NOTIONOAUTHMULTIAuthConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.notion_oauth_multi1 import NotionOauthMulti1 + +# TODO update the JSON string below +json = "{}" +# create an instance of NotionOauthMulti1 from a JSON string +notion_oauth_multi1_instance = NotionOauthMulti1.from_json(json) +# print the JSON string representation of the object +print(NotionOauthMulti1.to_json()) + +# convert the object into a dict +notion_oauth_multi1_dict = notion_oauth_multi1_instance.to_dict() +# create an instance of NotionOauthMulti1 from a dict +notion_oauth_multi1_from_dict = NotionOauthMulti1.from_dict(notion_oauth_multi1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/NotionOauthMulti2.md b/docs/NotionOauthMulti2.md new file mode 100644 index 0000000..e6df694 --- /dev/null +++ b/docs/NotionOauthMulti2.md @@ -0,0 +1,30 @@ +# NotionOauthMulti2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"NOTION_OAUTH_MULTI\") | + +## Example + +```python +from vectorize_client.models.notion_oauth_multi2 import NotionOauthMulti2 + +# TODO update the JSON string below +json = "{}" +# create an instance of NotionOauthMulti2 from a JSON string +notion_oauth_multi2_instance = NotionOauthMulti2.from_json(json) +# print the JSON string representation of the object +print(NotionOauthMulti2.to_json()) + +# convert the object into a dict +notion_oauth_multi2_dict = notion_oauth_multi2_instance.to_dict() +# create an instance of NotionOauthMulti2 from a dict +notion_oauth_multi2_from_dict = NotionOauthMulti2.from_dict(notion_oauth_multi2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/NotionOauthMultiCustom.md b/docs/NotionOauthMultiCustom.md new file mode 100644 index 0000000..c7154d0 --- /dev/null +++ b/docs/NotionOauthMultiCustom.md @@ -0,0 +1,31 @@ +# NotionOauthMultiCustom + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"NOTION_OAUTH_MULTI_CUSTOM\") | +**config** | [**NOTIONOAUTHMULTICUSTOMAuthConfig**](NOTIONOAUTHMULTICUSTOMAuthConfig.md) | | + +## Example + +```python +from vectorize_client.models.notion_oauth_multi_custom import NotionOauthMultiCustom + +# TODO update the JSON string below +json = "{}" +# create an instance of NotionOauthMultiCustom from a JSON string +notion_oauth_multi_custom_instance = NotionOauthMultiCustom.from_json(json) +# print the JSON string representation of the object +print(NotionOauthMultiCustom.to_json()) + +# convert the object into a dict +notion_oauth_multi_custom_dict = notion_oauth_multi_custom_instance.to_dict() +# create an instance of NotionOauthMultiCustom from a dict +notion_oauth_multi_custom_from_dict = NotionOauthMultiCustom.from_dict(notion_oauth_multi_custom_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/NotionOauthMultiCustom1.md b/docs/NotionOauthMultiCustom1.md new file mode 100644 index 0000000..ffb6a65 --- /dev/null +++ b/docs/NotionOauthMultiCustom1.md @@ -0,0 +1,29 @@ +# NotionOauthMultiCustom1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**NOTIONOAUTHMULTICUSTOMAuthConfig**](NOTIONOAUTHMULTICUSTOMAuthConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.notion_oauth_multi_custom1 import NotionOauthMultiCustom1 + +# TODO update the JSON string below +json = "{}" +# create an instance of NotionOauthMultiCustom1 from a JSON string +notion_oauth_multi_custom1_instance = NotionOauthMultiCustom1.from_json(json) +# print the JSON string representation of the object +print(NotionOauthMultiCustom1.to_json()) + +# convert the object into a dict +notion_oauth_multi_custom1_dict = notion_oauth_multi_custom1_instance.to_dict() +# create an instance of NotionOauthMultiCustom1 from a dict +notion_oauth_multi_custom1_from_dict = NotionOauthMultiCustom1.from_dict(notion_oauth_multi_custom1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/NotionOauthMultiCustom2.md b/docs/NotionOauthMultiCustom2.md new file mode 100644 index 0000000..d863e79 --- /dev/null +++ b/docs/NotionOauthMultiCustom2.md @@ -0,0 +1,30 @@ +# NotionOauthMultiCustom2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"NOTION_OAUTH_MULTI_CUSTOM\") | + +## Example + +```python +from vectorize_client.models.notion_oauth_multi_custom2 import NotionOauthMultiCustom2 + +# TODO update the JSON string below +json = "{}" +# create an instance of NotionOauthMultiCustom2 from a JSON string +notion_oauth_multi_custom2_instance = NotionOauthMultiCustom2.from_json(json) +# print the JSON string representation of the object +print(NotionOauthMultiCustom2.to_json()) + +# convert the object into a dict +notion_oauth_multi_custom2_dict = notion_oauth_multi_custom2_instance.to_dict() +# create an instance of NotionOauthMultiCustom2 from a dict +notion_oauth_multi_custom2_from_dict = NotionOauthMultiCustom2.from_dict(notion_oauth_multi_custom2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/ONEDRIVEAuthConfig.md b/docs/ONEDRIVEAuthConfig.md new file mode 100644 index 0000000..7f24b62 --- /dev/null +++ b/docs/ONEDRIVEAuthConfig.md @@ -0,0 +1,34 @@ +# ONEDRIVEAuthConfig + +Authentication configuration for OneDrive + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**ms_client_id** | **str** | Client Id. Example: Enter Client Id | +**ms_tenant_id** | **str** | Tenant Id. Example: Enter Tenant Id | +**ms_client_secret** | **str** | Client Secret. Example: Enter Client Secret | +**users** | **str** | Users. Example: Enter users emails to import files from. Example: developer@vectorize.io | + +## Example + +```python +from vectorize_client.models.onedrive_auth_config import ONEDRIVEAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of ONEDRIVEAuthConfig from a JSON string +onedrive_auth_config_instance = ONEDRIVEAuthConfig.from_json(json) +# print the JSON string representation of the object +print(ONEDRIVEAuthConfig.to_json()) + +# convert the object into a dict +onedrive_auth_config_dict = onedrive_auth_config_instance.to_dict() +# create an instance of ONEDRIVEAuthConfig from a dict +onedrive_auth_config_from_dict = ONEDRIVEAuthConfig.from_dict(onedrive_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/ONEDRIVEConfig.md b/docs/ONEDRIVEConfig.md new file mode 100644 index 0000000..23bb14e --- /dev/null +++ b/docs/ONEDRIVEConfig.md @@ -0,0 +1,31 @@ +# ONEDRIVEConfig + +Configuration for OneDrive connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**file_extensions** | **List[str]** | File Extensions | +**path_prefix** | **str** | Read starting from this folder (optional). Example: Enter Folder path: /exampleFolder/subFolder | [optional] + +## Example + +```python +from vectorize_client.models.onedrive_config import ONEDRIVEConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of ONEDRIVEConfig from a JSON string +onedrive_config_instance = ONEDRIVEConfig.from_json(json) +# print the JSON string representation of the object +print(ONEDRIVEConfig.to_json()) + +# convert the object into a dict +onedrive_config_dict = onedrive_config_instance.to_dict() +# create an instance of ONEDRIVEConfig from a dict +onedrive_config_from_dict = ONEDRIVEConfig.from_dict(onedrive_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/OPENAIAuthConfig.md b/docs/OPENAIAuthConfig.md new file mode 100644 index 0000000..df3e0e0 --- /dev/null +++ b/docs/OPENAIAuthConfig.md @@ -0,0 +1,31 @@ +# OPENAIAuthConfig + +Authentication configuration for OpenAI + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for your OpenAI integration | +**key** | **str** | API Key. Example: Enter your OpenAI API Key | + +## Example + +```python +from vectorize_client.models.openai_auth_config import OPENAIAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of OPENAIAuthConfig from a JSON string +openai_auth_config_instance = OPENAIAuthConfig.from_json(json) +# print the JSON string representation of the object +print(OPENAIAuthConfig.to_json()) + +# convert the object into a dict +openai_auth_config_dict = openai_auth_config_instance.to_dict() +# create an instance of OPENAIAuthConfig from a dict +openai_auth_config_from_dict = OPENAIAuthConfig.from_dict(openai_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/OneDrive.md b/docs/OneDrive.md new file mode 100644 index 0000000..eb6c950 --- /dev/null +++ b/docs/OneDrive.md @@ -0,0 +1,31 @@ +# OneDrive + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"ONE_DRIVE\") | +**config** | [**ONEDRIVEConfig**](ONEDRIVEConfig.md) | | + +## Example + +```python +from vectorize_client.models.one_drive import OneDrive + +# TODO update the JSON string below +json = "{}" +# create an instance of OneDrive from a JSON string +one_drive_instance = OneDrive.from_json(json) +# print the JSON string representation of the object +print(OneDrive.to_json()) + +# convert the object into a dict +one_drive_dict = one_drive_instance.to_dict() +# create an instance of OneDrive from a dict +one_drive_from_dict = OneDrive.from_dict(one_drive_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/OneDrive1.md b/docs/OneDrive1.md new file mode 100644 index 0000000..e97c1f0 --- /dev/null +++ b/docs/OneDrive1.md @@ -0,0 +1,29 @@ +# OneDrive1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**ONEDRIVEConfig**](ONEDRIVEConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.one_drive1 import OneDrive1 + +# TODO update the JSON string below +json = "{}" +# create an instance of OneDrive1 from a JSON string +one_drive1_instance = OneDrive1.from_json(json) +# print the JSON string representation of the object +print(OneDrive1.to_json()) + +# convert the object into a dict +one_drive1_dict = one_drive1_instance.to_dict() +# create an instance of OneDrive1 from a dict +one_drive1_from_dict = OneDrive1.from_dict(one_drive1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/OneDrive2.md b/docs/OneDrive2.md new file mode 100644 index 0000000..5b5141f --- /dev/null +++ b/docs/OneDrive2.md @@ -0,0 +1,30 @@ +# OneDrive2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"ONE_DRIVE\") | + +## Example + +```python +from vectorize_client.models.one_drive2 import OneDrive2 + +# TODO update the JSON string below +json = "{}" +# create an instance of OneDrive2 from a JSON string +one_drive2_instance = OneDrive2.from_json(json) +# print the JSON string representation of the object +print(OneDrive2.to_json()) + +# convert the object into a dict +one_drive2_dict = one_drive2_instance.to_dict() +# create an instance of OneDrive2 from a dict +one_drive2_from_dict = OneDrive2.from_dict(one_drive2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Openai.md b/docs/Openai.md new file mode 100644 index 0000000..2ac5bd2 --- /dev/null +++ b/docs/Openai.md @@ -0,0 +1,31 @@ +# Openai + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"OPENAI\") | +**config** | [**OPENAIAuthConfig**](OPENAIAuthConfig.md) | | + +## Example + +```python +from vectorize_client.models.openai import Openai + +# TODO update the JSON string below +json = "{}" +# create an instance of Openai from a JSON string +openai_instance = Openai.from_json(json) +# print the JSON string representation of the object +print(Openai.to_json()) + +# convert the object into a dict +openai_dict = openai_instance.to_dict() +# create an instance of Openai from a dict +openai_from_dict = Openai.from_dict(openai_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Openai1.md b/docs/Openai1.md new file mode 100644 index 0000000..58aafbc --- /dev/null +++ b/docs/Openai1.md @@ -0,0 +1,29 @@ +# Openai1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**OPENAIAuthConfig**](OPENAIAuthConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.openai1 import Openai1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Openai1 from a JSON string +openai1_instance = Openai1.from_json(json) +# print the JSON string representation of the object +print(Openai1.to_json()) + +# convert the object into a dict +openai1_dict = openai1_instance.to_dict() +# create an instance of Openai1 from a dict +openai1_from_dict = Openai1.from_dict(openai1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Openai2.md b/docs/Openai2.md new file mode 100644 index 0000000..3f00f74 --- /dev/null +++ b/docs/Openai2.md @@ -0,0 +1,30 @@ +# Openai2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"OPENAI\") | + +## Example + +```python +from vectorize_client.models.openai2 import Openai2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Openai2 from a JSON string +openai2_instance = Openai2.from_json(json) +# print the JSON string representation of the object +print(Openai2.to_json()) + +# convert the object into a dict +openai2_dict = openai2_instance.to_dict() +# create an instance of Openai2 from a dict +openai2_from_dict = Openai2.from_dict(openai2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/PINECONEAuthConfig.md b/docs/PINECONEAuthConfig.md new file mode 100644 index 0000000..d68fdaa --- /dev/null +++ b/docs/PINECONEAuthConfig.md @@ -0,0 +1,31 @@ +# PINECONEAuthConfig + +Authentication configuration for Pinecone + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for your Pinecone integration | +**api_key** | **str** | API Key. Example: Enter your API Key | + +## Example + +```python +from vectorize_client.models.pinecone_auth_config import PINECONEAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of PINECONEAuthConfig from a JSON string +pinecone_auth_config_instance = PINECONEAuthConfig.from_json(json) +# print the JSON string representation of the object +print(PINECONEAuthConfig.to_json()) + +# convert the object into a dict +pinecone_auth_config_dict = pinecone_auth_config_instance.to_dict() +# create an instance of PINECONEAuthConfig from a dict +pinecone_auth_config_from_dict = PINECONEAuthConfig.from_dict(pinecone_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/PINECONEConfig.md b/docs/PINECONEConfig.md new file mode 100644 index 0000000..79cb1c0 --- /dev/null +++ b/docs/PINECONEConfig.md @@ -0,0 +1,31 @@ +# PINECONEConfig + +Configuration for Pinecone connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**index** | **str** | Index Name. Example: Enter index name | +**namespace** | **str** | Namespace. Example: Enter namespace | [optional] + +## Example + +```python +from vectorize_client.models.pinecone_config import PINECONEConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of PINECONEConfig from a JSON string +pinecone_config_instance = PINECONEConfig.from_json(json) +# print the JSON string representation of the object +print(PINECONEConfig.to_json()) + +# convert the object into a dict +pinecone_config_dict = pinecone_config_instance.to_dict() +# create an instance of PINECONEConfig from a dict +pinecone_config_from_dict = PINECONEConfig.from_dict(pinecone_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/POSTGRESQLAuthConfig.md b/docs/POSTGRESQLAuthConfig.md new file mode 100644 index 0000000..6947595 --- /dev/null +++ b/docs/POSTGRESQLAuthConfig.md @@ -0,0 +1,35 @@ +# POSTGRESQLAuthConfig + +Authentication configuration for PostgreSQL + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for your PostgreSQL integration | +**host** | **str** | Host. Example: Enter the host of the deployment | +**port** | **float** | Port. Example: Enter the port of the deployment | [optional] [default to 5432] +**database** | **str** | Database. Example: Enter the database name | +**username** | **str** | Username. Example: Enter the username | +**password** | **str** | Password. Example: Enter the username's password | + +## Example + +```python +from vectorize_client.models.postgresql_auth_config import POSTGRESQLAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of POSTGRESQLAuthConfig from a JSON string +postgresql_auth_config_instance = POSTGRESQLAuthConfig.from_json(json) +# print the JSON string representation of the object +print(POSTGRESQLAuthConfig.to_json()) + +# convert the object into a dict +postgresql_auth_config_dict = postgresql_auth_config_instance.to_dict() +# create an instance of POSTGRESQLAuthConfig from a dict +postgresql_auth_config_from_dict = POSTGRESQLAuthConfig.from_dict(postgresql_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/POSTGRESQLConfig.md b/docs/POSTGRESQLConfig.md new file mode 100644 index 0000000..8a4acb2 --- /dev/null +++ b/docs/POSTGRESQLConfig.md @@ -0,0 +1,30 @@ +# POSTGRESQLConfig + +Configuration for PostgreSQL connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**table** | **str** | Table Name. Example: Enter <table name> or <schema>.<table name> | + +## Example + +```python +from vectorize_client.models.postgresql_config import POSTGRESQLConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of POSTGRESQLConfig from a JSON string +postgresql_config_instance = POSTGRESQLConfig.from_json(json) +# print the JSON string representation of the object +print(POSTGRESQLConfig.to_json()) + +# convert the object into a dict +postgresql_config_dict = postgresql_config_instance.to_dict() +# create an instance of POSTGRESQLConfig from a dict +postgresql_config_from_dict = POSTGRESQLConfig.from_dict(postgresql_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Pinecone.md b/docs/Pinecone.md new file mode 100644 index 0000000..b104ef4 --- /dev/null +++ b/docs/Pinecone.md @@ -0,0 +1,31 @@ +# Pinecone + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"PINECONE\") | +**config** | [**PINECONEConfig**](PINECONEConfig.md) | | + +## Example + +```python +from vectorize_client.models.pinecone import Pinecone + +# TODO update the JSON string below +json = "{}" +# create an instance of Pinecone from a JSON string +pinecone_instance = Pinecone.from_json(json) +# print the JSON string representation of the object +print(Pinecone.to_json()) + +# convert the object into a dict +pinecone_dict = pinecone_instance.to_dict() +# create an instance of Pinecone from a dict +pinecone_from_dict = Pinecone.from_dict(pinecone_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Pinecone1.md b/docs/Pinecone1.md new file mode 100644 index 0000000..1debb9f --- /dev/null +++ b/docs/Pinecone1.md @@ -0,0 +1,29 @@ +# Pinecone1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**PINECONEConfig**](PINECONEConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.pinecone1 import Pinecone1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Pinecone1 from a JSON string +pinecone1_instance = Pinecone1.from_json(json) +# print the JSON string representation of the object +print(Pinecone1.to_json()) + +# convert the object into a dict +pinecone1_dict = pinecone1_instance.to_dict() +# create an instance of Pinecone1 from a dict +pinecone1_from_dict = Pinecone1.from_dict(pinecone1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Pinecone2.md b/docs/Pinecone2.md new file mode 100644 index 0000000..e22ea34 --- /dev/null +++ b/docs/Pinecone2.md @@ -0,0 +1,30 @@ +# Pinecone2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"PINECONE\") | + +## Example + +```python +from vectorize_client.models.pinecone2 import Pinecone2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Pinecone2 from a JSON string +pinecone2_instance = Pinecone2.from_json(json) +# print the JSON string representation of the object +print(Pinecone2.to_json()) + +# convert the object into a dict +pinecone2_dict = pinecone2_instance.to_dict() +# create an instance of Pinecone2 from a dict +pinecone2_from_dict = Pinecone2.from_dict(pinecone2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/PipelineAIPlatformRequestInner.md b/docs/PipelineAIPlatformRequestInner.md new file mode 100644 index 0000000..cb5cef0 --- /dev/null +++ b/docs/PipelineAIPlatformRequestInner.md @@ -0,0 +1,30 @@ +# PipelineAIPlatformRequestInner + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"BEDROCK\") | + +## Example + +```python +from vectorize_client.models.pipeline_ai_platform_request_inner import PipelineAIPlatformRequestInner + +# TODO update the JSON string below +json = "{}" +# create an instance of PipelineAIPlatformRequestInner from a JSON string +pipeline_ai_platform_request_inner_instance = PipelineAIPlatformRequestInner.from_json(json) +# print the JSON string representation of the object +print(PipelineAIPlatformRequestInner.to_json()) + +# convert the object into a dict +pipeline_ai_platform_request_inner_dict = pipeline_ai_platform_request_inner_instance.to_dict() +# create an instance of PipelineAIPlatformRequestInner from a dict +pipeline_ai_platform_request_inner_from_dict = PipelineAIPlatformRequestInner.from_dict(pipeline_ai_platform_request_inner_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/PipelineConfigurationSchema.md b/docs/PipelineConfigurationSchema.md new file mode 100644 index 0000000..9de2b40 --- /dev/null +++ b/docs/PipelineConfigurationSchema.md @@ -0,0 +1,33 @@ +# PipelineConfigurationSchema + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**source_connectors** | [**List[PipelineSourceConnectorRequestInner]**](PipelineSourceConnectorRequestInner.md) | | +**destination_connector** | [**List[PipelineDestinationConnectorRequestInner]**](PipelineDestinationConnectorRequestInner.md) | | +**ai_platform** | [**List[PipelineAIPlatformRequestInner]**](PipelineAIPlatformRequestInner.md) | | +**pipeline_name** | **str** | | +**schedule** | [**ScheduleSchema**](ScheduleSchema.md) | | + +## Example + +```python +from vectorize_client.models.pipeline_configuration_schema import PipelineConfigurationSchema + +# TODO update the JSON string below +json = "{}" +# create an instance of PipelineConfigurationSchema from a JSON string +pipeline_configuration_schema_instance = PipelineConfigurationSchema.from_json(json) +# print the JSON string representation of the object +print(PipelineConfigurationSchema.to_json()) + +# convert the object into a dict +pipeline_configuration_schema_dict = pipeline_configuration_schema_instance.to_dict() +# create an instance of PipelineConfigurationSchema from a dict +pipeline_configuration_schema_from_dict = PipelineConfigurationSchema.from_dict(pipeline_configuration_schema_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/PipelineDestinationConnectorRequestInner.md b/docs/PipelineDestinationConnectorRequestInner.md new file mode 100644 index 0000000..232c0d0 --- /dev/null +++ b/docs/PipelineDestinationConnectorRequestInner.md @@ -0,0 +1,30 @@ +# PipelineDestinationConnectorRequestInner + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"CAPELLA\") | + +## Example + +```python +from vectorize_client.models.pipeline_destination_connector_request_inner import PipelineDestinationConnectorRequestInner + +# TODO update the JSON string below +json = "{}" +# create an instance of PipelineDestinationConnectorRequestInner from a JSON string +pipeline_destination_connector_request_inner_instance = PipelineDestinationConnectorRequestInner.from_json(json) +# print the JSON string representation of the object +print(PipelineDestinationConnectorRequestInner.to_json()) + +# convert the object into a dict +pipeline_destination_connector_request_inner_dict = pipeline_destination_connector_request_inner_instance.to_dict() +# create an instance of PipelineDestinationConnectorRequestInner from a dict +pipeline_destination_connector_request_inner_from_dict = PipelineDestinationConnectorRequestInner.from_dict(pipeline_destination_connector_request_inner_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/PipelineEvents.md b/docs/PipelineEvents.md new file mode 100644 index 0000000..07eb1fb --- /dev/null +++ b/docs/PipelineEvents.md @@ -0,0 +1,33 @@ +# PipelineEvents + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | +**type** | **str** | | +**timestamp** | **str** | | +**details** | **Dict[str, Optional[object]]** | | [optional] +**summary** | **Dict[str, Optional[object]]** | | [optional] + +## Example + +```python +from vectorize_client.models.pipeline_events import PipelineEvents + +# TODO update the JSON string below +json = "{}" +# create an instance of PipelineEvents from a JSON string +pipeline_events_instance = PipelineEvents.from_json(json) +# print the JSON string representation of the object +print(PipelineEvents.to_json()) + +# convert the object into a dict +pipeline_events_dict = pipeline_events_instance.to_dict() +# create an instance of PipelineEvents from a dict +pipeline_events_from_dict = PipelineEvents.from_dict(pipeline_events_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/PipelineListSummary.md b/docs/PipelineListSummary.md new file mode 100644 index 0000000..5026cf2 --- /dev/null +++ b/docs/PipelineListSummary.md @@ -0,0 +1,41 @@ +# PipelineListSummary + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | +**name** | **str** | | +**document_count** | **float** | | +**source_connector_auth_ids** | **List[str]** | | +**destination_connector_auth_ids** | **List[str]** | | +**ai_platform_auth_ids** | **List[str]** | | +**source_connector_types** | **List[str]** | | +**destination_connector_types** | **List[str]** | | +**ai_platform_types** | **List[str]** | | +**created_at** | **str** | | +**created_by** | **str** | | +**status** | **str** | | [optional] +**config_doc** | **Dict[str, Optional[object]]** | | [optional] + +## Example + +```python +from vectorize_client.models.pipeline_list_summary import PipelineListSummary + +# TODO update the JSON string below +json = "{}" +# create an instance of PipelineListSummary from a JSON string +pipeline_list_summary_instance = PipelineListSummary.from_json(json) +# print the JSON string representation of the object +print(PipelineListSummary.to_json()) + +# convert the object into a dict +pipeline_list_summary_dict = pipeline_list_summary_instance.to_dict() +# create an instance of PipelineListSummary from a dict +pipeline_list_summary_from_dict = PipelineListSummary.from_dict(pipeline_list_summary_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/PipelineMetrics.md b/docs/PipelineMetrics.md new file mode 100644 index 0000000..413e5a3 --- /dev/null +++ b/docs/PipelineMetrics.md @@ -0,0 +1,32 @@ +# PipelineMetrics + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**timestamp** | **str** | | +**new_objects** | **float** | | +**changed_objects** | **float** | | +**deleted_objects** | **float** | | + +## Example + +```python +from vectorize_client.models.pipeline_metrics import PipelineMetrics + +# TODO update the JSON string below +json = "{}" +# create an instance of PipelineMetrics from a JSON string +pipeline_metrics_instance = PipelineMetrics.from_json(json) +# print the JSON string representation of the object +print(PipelineMetrics.to_json()) + +# convert the object into a dict +pipeline_metrics_dict = pipeline_metrics_instance.to_dict() +# create an instance of PipelineMetrics from a dict +pipeline_metrics_from_dict = PipelineMetrics.from_dict(pipeline_metrics_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/PipelineSourceConnectorRequestInner.md b/docs/PipelineSourceConnectorRequestInner.md new file mode 100644 index 0000000..1694a74 --- /dev/null +++ b/docs/PipelineSourceConnectorRequestInner.md @@ -0,0 +1,30 @@ +# PipelineSourceConnectorRequestInner + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"AWS_S3\") | + +## Example + +```python +from vectorize_client.models.pipeline_source_connector_request_inner import PipelineSourceConnectorRequestInner + +# TODO update the JSON string below +json = "{}" +# create an instance of PipelineSourceConnectorRequestInner from a JSON string +pipeline_source_connector_request_inner_instance = PipelineSourceConnectorRequestInner.from_json(json) +# print the JSON string representation of the object +print(PipelineSourceConnectorRequestInner.to_json()) + +# convert the object into a dict +pipeline_source_connector_request_inner_dict = pipeline_source_connector_request_inner_instance.to_dict() +# create an instance of PipelineSourceConnectorRequestInner from a dict +pipeline_source_connector_request_inner_from_dict = PipelineSourceConnectorRequestInner.from_dict(pipeline_source_connector_request_inner_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/PipelineSummary.md b/docs/PipelineSummary.md new file mode 100644 index 0000000..a269f34 --- /dev/null +++ b/docs/PipelineSummary.md @@ -0,0 +1,44 @@ +# PipelineSummary + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | +**name** | **str** | | +**document_count** | **float** | | +**source_connector_auth_ids** | **List[str]** | | +**destination_connector_auth_ids** | **List[str]** | | +**ai_platform_auth_ids** | **List[str]** | | +**source_connector_types** | **List[str]** | | +**destination_connector_types** | **List[str]** | | +**ai_platform_types** | **List[str]** | | +**created_at** | **str** | | +**created_by** | **str** | | +**status** | **str** | | [optional] +**config_doc** | **Dict[str, Optional[object]]** | | [optional] +**source_connectors** | [**List[SourceConnector]**](SourceConnector.md) | | +**destination_connectors** | [**List[DestinationConnector]**](DestinationConnector.md) | | +**ai_platforms** | [**List[AIPlatform]**](AIPlatform.md) | | + +## Example + +```python +from vectorize_client.models.pipeline_summary import PipelineSummary + +# TODO update the JSON string below +json = "{}" +# create an instance of PipelineSummary from a JSON string +pipeline_summary_instance = PipelineSummary.from_json(json) +# print the JSON string representation of the object +print(PipelineSummary.to_json()) + +# convert the object into a dict +pipeline_summary_dict = pipeline_summary_instance.to_dict() +# create an instance of PipelineSummary from a dict +pipeline_summary_from_dict = PipelineSummary.from_dict(pipeline_summary_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/PipelinesApi.md b/docs/PipelinesApi.md new file mode 100644 index 0000000..4b4136d --- /dev/null +++ b/docs/PipelinesApi.md @@ -0,0 +1,945 @@ +# vectorize_client.PipelinesApi + +All URIs are relative to *https://api.vectorize.io/v1* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**create_pipeline**](PipelinesApi.md#create_pipeline) | **POST** /org/{organizationId}/pipelines | Create a new pipeline +[**delete_pipeline**](PipelinesApi.md#delete_pipeline) | **DELETE** /org/{organizationId}/pipelines/{pipelineId} | Delete a pipeline +[**get_deep_research_result**](PipelinesApi.md#get_deep_research_result) | **GET** /org/{organizationId}/pipelines/{pipelineId}/deep-research/{researchId} | Get deep research result +[**get_pipeline**](PipelinesApi.md#get_pipeline) | **GET** /org/{organizationId}/pipelines/{pipelineId} | Get a pipeline +[**get_pipeline_events**](PipelinesApi.md#get_pipeline_events) | **GET** /org/{organizationId}/pipelines/{pipelineId}/events | Get pipeline events +[**get_pipeline_metrics**](PipelinesApi.md#get_pipeline_metrics) | **GET** /org/{organizationId}/pipelines/{pipelineId}/metrics | Get pipeline metrics +[**get_pipelines**](PipelinesApi.md#get_pipelines) | **GET** /org/{organizationId}/pipelines | Get all pipelines +[**retrieve_documents**](PipelinesApi.md#retrieve_documents) | **POST** /org/{organizationId}/pipelines/{pipelineId}/retrieval | Retrieve documents from a pipeline +[**start_deep_research**](PipelinesApi.md#start_deep_research) | **POST** /org/{organizationId}/pipelines/{pipelineId}/deep-research | Start a deep research +[**start_pipeline**](PipelinesApi.md#start_pipeline) | **POST** /org/{organizationId}/pipelines/{pipelineId}/start | Start a pipeline +[**stop_pipeline**](PipelinesApi.md#stop_pipeline) | **POST** /org/{organizationId}/pipelines/{pipelineId}/stop | Stop a pipeline + + +# **create_pipeline** +> CreatePipelineResponse create_pipeline(organization_id, pipeline_configuration_schema) + +Create a new pipeline + +Creates a new pipeline with source connectors, destination connector, and AI platform configuration. The specific configuration fields required depend on the connector types selected. + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.create_pipeline_response import CreatePipelineResponse +from vectorize_client.models.pipeline_configuration_schema import PipelineConfigurationSchema +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.PipelinesApi(api_client) + organization_id = 'organization_id_example' # str | + pipeline_configuration_schema = {"sourceConnectors":[{"id":"4d61dfa9-ce3c-48df-824f-85d1d7421a84","type":"AWS_S3"}],"destinationConnector":[{"id":"e6d268f5-7164-4411-a24b-3d59c78958c8","type":"CAPELLA"}],"aiPlatform":[{"id":"65b8d1f0-32ad-459f-8799-7d359abf4ee4","type":"BEDROCK"}],"pipelineName":"Data Processing Pipeline","schedule":{"type":"manual"}} # PipelineConfigurationSchema | + + try: + # Create a new pipeline + api_response = api_instance.create_pipeline(organization_id, pipeline_configuration_schema) + print("The response of PipelinesApi->create_pipeline:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling PipelinesApi->create_pipeline: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + **pipeline_configuration_schema** | [**PipelineConfigurationSchema**](PipelineConfigurationSchema.md)| | + +### Return type + +[**CreatePipelineResponse**](CreatePipelineResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Pipeline created successfully | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_pipeline** +> DeletePipelineResponse delete_pipeline(organization_id, pipeline_id) + +Delete a pipeline + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.delete_pipeline_response import DeletePipelineResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.PipelinesApi(api_client) + organization_id = 'organization_id_example' # str | + pipeline_id = 'pipeline_id_example' # str | + + try: + # Delete a pipeline + api_response = api_instance.delete_pipeline(organization_id, pipeline_id) + print("The response of PipelinesApi->delete_pipeline:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling PipelinesApi->delete_pipeline: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + **pipeline_id** | **str**| | + +### Return type + +[**DeletePipelineResponse**](DeletePipelineResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Pipeline deleted successfully | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_deep_research_result** +> GetDeepResearchResponse get_deep_research_result(organization, pipeline, research_id) + +Get deep research result + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.get_deep_research_response import GetDeepResearchResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.PipelinesApi(api_client) + organization = 'organization_example' # str | + pipeline = 'pipeline_example' # str | + research_id = 'research_id_example' # str | + + try: + # Get deep research result + api_response = api_instance.get_deep_research_result(organization, pipeline, research_id) + print("The response of PipelinesApi->get_deep_research_result:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling PipelinesApi->get_deep_research_result: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **pipeline** | **str**| | + **research_id** | **str**| | + +### Return type + +[**GetDeepResearchResponse**](GetDeepResearchResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Get Deep Research was successful | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_pipeline** +> GetPipelineResponse get_pipeline(organization_id, pipeline_id) + +Get a pipeline + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.get_pipeline_response import GetPipelineResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.PipelinesApi(api_client) + organization_id = 'organization_id_example' # str | + pipeline_id = 'pipeline_id_example' # str | + + try: + # Get a pipeline + api_response = api_instance.get_pipeline(organization_id, pipeline_id) + print("The response of PipelinesApi->get_pipeline:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling PipelinesApi->get_pipeline: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + **pipeline_id** | **str**| | + +### Return type + +[**GetPipelineResponse**](GetPipelineResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Pipeline fetched successfully | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_pipeline_events** +> GetPipelineEventsResponse get_pipeline_events(organization_id, pipeline_id, next_token=next_token) + +Get pipeline events + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.get_pipeline_events_response import GetPipelineEventsResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.PipelinesApi(api_client) + organization_id = 'organization_id_example' # str | + pipeline_id = 'pipeline_id_example' # str | + next_token = 'next_token_example' # str | (optional) + + try: + # Get pipeline events + api_response = api_instance.get_pipeline_events(organization_id, pipeline_id, next_token=next_token) + print("The response of PipelinesApi->get_pipeline_events:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling PipelinesApi->get_pipeline_events: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + **pipeline_id** | **str**| | + **next_token** | **str**| | [optional] + +### Return type + +[**GetPipelineEventsResponse**](GetPipelineEventsResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Pipeline events fetched successfully | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_pipeline_metrics** +> GetPipelineMetricsResponse get_pipeline_metrics(organization_id, pipeline_id) + +Get pipeline metrics + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.get_pipeline_metrics_response import GetPipelineMetricsResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.PipelinesApi(api_client) + organization_id = 'organization_id_example' # str | + pipeline_id = 'pipeline_id_example' # str | + + try: + # Get pipeline metrics + api_response = api_instance.get_pipeline_metrics(organization_id, pipeline_id) + print("The response of PipelinesApi->get_pipeline_metrics:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling PipelinesApi->get_pipeline_metrics: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + **pipeline_id** | **str**| | + +### Return type + +[**GetPipelineMetricsResponse**](GetPipelineMetricsResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Pipeline metrics fetched successfully | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_pipelines** +> GetPipelinesResponse get_pipelines(organization_id) + +Get all pipelines + +Returns a list of all pipelines in the organization + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.get_pipelines_response import GetPipelinesResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.PipelinesApi(api_client) + organization_id = 'organization_id_example' # str | + + try: + # Get all pipelines + api_response = api_instance.get_pipelines(organization_id) + print("The response of PipelinesApi->get_pipelines:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling PipelinesApi->get_pipelines: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + +### Return type + +[**GetPipelinesResponse**](GetPipelinesResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Pipelines retrieved successfully | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **retrieve_documents** +> RetrieveDocumentsResponse retrieve_documents(organization_id, pipeline_id, retrieve_documents_request) + +Retrieve documents from a pipeline + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.retrieve_documents_request import RetrieveDocumentsRequest +from vectorize_client.models.retrieve_documents_response import RetrieveDocumentsResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.PipelinesApi(api_client) + organization_id = 'organization_id_example' # str | + pipeline_id = 'pipeline_id_example' # str | + retrieve_documents_request = {"question":"example-question","numResults":100,"rerank":true,"metadata-filters":[],"context":{"messages":[]}} # RetrieveDocumentsRequest | + + try: + # Retrieve documents from a pipeline + api_response = api_instance.retrieve_documents(organization_id, pipeline_id, retrieve_documents_request) + print("The response of PipelinesApi->retrieve_documents:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling PipelinesApi->retrieve_documents: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + **pipeline_id** | **str**| | + **retrieve_documents_request** | [**RetrieveDocumentsRequest**](RetrieveDocumentsRequest.md)| | + +### Return type + +[**RetrieveDocumentsResponse**](RetrieveDocumentsResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Documents retrieved successfully | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **start_deep_research** +> StartDeepResearchResponse start_deep_research(organization_id, pipeline_id, start_deep_research_request) + +Start a deep research + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.start_deep_research_request import StartDeepResearchRequest +from vectorize_client.models.start_deep_research_response import StartDeepResearchResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.PipelinesApi(api_client) + organization_id = 'organization_id_example' # str | + pipeline_id = 'pipeline_id_example' # str | + start_deep_research_request = {"query":"example-query","webSearch":true,"schema":"example-schema","n8n":{"account":"example-account","webhookPath":"/example/path","headers":{}}} # StartDeepResearchRequest | + + try: + # Start a deep research + api_response = api_instance.start_deep_research(organization_id, pipeline_id, start_deep_research_request) + print("The response of PipelinesApi->start_deep_research:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling PipelinesApi->start_deep_research: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + **pipeline_id** | **str**| | + **start_deep_research_request** | [**StartDeepResearchRequest**](StartDeepResearchRequest.md)| | + +### Return type + +[**StartDeepResearchResponse**](StartDeepResearchResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Deep Research started successfully | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **start_pipeline** +> StartPipelineResponse start_pipeline(organization_id, pipeline_id) + +Start a pipeline + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.start_pipeline_response import StartPipelineResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.PipelinesApi(api_client) + organization_id = 'organization_id_example' # str | + pipeline_id = 'pipeline_id_example' # str | + + try: + # Start a pipeline + api_response = api_instance.start_pipeline(organization_id, pipeline_id) + print("The response of PipelinesApi->start_pipeline:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling PipelinesApi->start_pipeline: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + **pipeline_id** | **str**| | + +### Return type + +[**StartPipelineResponse**](StartPipelineResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Pipeline started successfully | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **stop_pipeline** +> StopPipelineResponse stop_pipeline(organization_id, pipeline_id) + +Stop a pipeline + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.stop_pipeline_response import StopPipelineResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.PipelinesApi(api_client) + organization_id = 'organization_id_example' # str | + pipeline_id = 'pipeline_id_example' # str | + + try: + # Stop a pipeline + api_response = api_instance.stop_pipeline(organization_id, pipeline_id) + print("The response of PipelinesApi->stop_pipeline:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling PipelinesApi->stop_pipeline: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization_id** | **str**| | + **pipeline_id** | **str**| | + +### Return type + +[**StopPipelineResponse**](StopPipelineResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Pipeline stopped successfully | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/docs/Postgresql.md b/docs/Postgresql.md new file mode 100644 index 0000000..0af32d5 --- /dev/null +++ b/docs/Postgresql.md @@ -0,0 +1,31 @@ +# Postgresql + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"POSTGRESQL\") | +**config** | [**POSTGRESQLConfig**](POSTGRESQLConfig.md) | | + +## Example + +```python +from vectorize_client.models.postgresql import Postgresql + +# TODO update the JSON string below +json = "{}" +# create an instance of Postgresql from a JSON string +postgresql_instance = Postgresql.from_json(json) +# print the JSON string representation of the object +print(Postgresql.to_json()) + +# convert the object into a dict +postgresql_dict = postgresql_instance.to_dict() +# create an instance of Postgresql from a dict +postgresql_from_dict = Postgresql.from_dict(postgresql_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Postgresql1.md b/docs/Postgresql1.md new file mode 100644 index 0000000..4889710 --- /dev/null +++ b/docs/Postgresql1.md @@ -0,0 +1,29 @@ +# Postgresql1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**POSTGRESQLConfig**](POSTGRESQLConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.postgresql1 import Postgresql1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Postgresql1 from a JSON string +postgresql1_instance = Postgresql1.from_json(json) +# print the JSON string representation of the object +print(Postgresql1.to_json()) + +# convert the object into a dict +postgresql1_dict = postgresql1_instance.to_dict() +# create an instance of Postgresql1 from a dict +postgresql1_from_dict = Postgresql1.from_dict(postgresql1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Postgresql2.md b/docs/Postgresql2.md new file mode 100644 index 0000000..99ebeaa --- /dev/null +++ b/docs/Postgresql2.md @@ -0,0 +1,30 @@ +# Postgresql2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"POSTGRESQL\") | + +## Example + +```python +from vectorize_client.models.postgresql2 import Postgresql2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Postgresql2 from a JSON string +postgresql2_instance = Postgresql2.from_json(json) +# print the JSON string representation of the object +print(Postgresql2.to_json()) + +# convert the object into a dict +postgresql2_dict = postgresql2_instance.to_dict() +# create an instance of Postgresql2 from a dict +postgresql2_from_dict = Postgresql2.from_dict(postgresql2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/QDRANTAuthConfig.md b/docs/QDRANTAuthConfig.md new file mode 100644 index 0000000..8d0f91b --- /dev/null +++ b/docs/QDRANTAuthConfig.md @@ -0,0 +1,32 @@ +# QDRANTAuthConfig + +Authentication configuration for Qdrant + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for your Qdrant integration | +**host** | **str** | Host. Example: Enter your host | +**api_key** | **str** | API Key. Example: Enter your API key | + +## Example + +```python +from vectorize_client.models.qdrant_auth_config import QDRANTAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of QDRANTAuthConfig from a JSON string +qdrant_auth_config_instance = QDRANTAuthConfig.from_json(json) +# print the JSON string representation of the object +print(QDRANTAuthConfig.to_json()) + +# convert the object into a dict +qdrant_auth_config_dict = qdrant_auth_config_instance.to_dict() +# create an instance of QDRANTAuthConfig from a dict +qdrant_auth_config_from_dict = QDRANTAuthConfig.from_dict(qdrant_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/QDRANTConfig.md b/docs/QDRANTConfig.md new file mode 100644 index 0000000..a66d012 --- /dev/null +++ b/docs/QDRANTConfig.md @@ -0,0 +1,30 @@ +# QDRANTConfig + +Configuration for Qdrant connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**collection** | **str** | Collection Name. Example: Enter collection name | + +## Example + +```python +from vectorize_client.models.qdrant_config import QDRANTConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of QDRANTConfig from a JSON string +qdrant_config_instance = QDRANTConfig.from_json(json) +# print the JSON string representation of the object +print(QDRANTConfig.to_json()) + +# convert the object into a dict +qdrant_config_dict = qdrant_config_instance.to_dict() +# create an instance of QDRANTConfig from a dict +qdrant_config_from_dict = QDRANTConfig.from_dict(qdrant_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Qdrant.md b/docs/Qdrant.md new file mode 100644 index 0000000..f4cda66 --- /dev/null +++ b/docs/Qdrant.md @@ -0,0 +1,31 @@ +# Qdrant + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"QDRANT\") | +**config** | [**QDRANTConfig**](QDRANTConfig.md) | | + +## Example + +```python +from vectorize_client.models.qdrant import Qdrant + +# TODO update the JSON string below +json = "{}" +# create an instance of Qdrant from a JSON string +qdrant_instance = Qdrant.from_json(json) +# print the JSON string representation of the object +print(Qdrant.to_json()) + +# convert the object into a dict +qdrant_dict = qdrant_instance.to_dict() +# create an instance of Qdrant from a dict +qdrant_from_dict = Qdrant.from_dict(qdrant_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Qdrant1.md b/docs/Qdrant1.md new file mode 100644 index 0000000..2aad746 --- /dev/null +++ b/docs/Qdrant1.md @@ -0,0 +1,29 @@ +# Qdrant1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**QDRANTConfig**](QDRANTConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.qdrant1 import Qdrant1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Qdrant1 from a JSON string +qdrant1_instance = Qdrant1.from_json(json) +# print the JSON string representation of the object +print(Qdrant1.to_json()) + +# convert the object into a dict +qdrant1_dict = qdrant1_instance.to_dict() +# create an instance of Qdrant1 from a dict +qdrant1_from_dict = Qdrant1.from_dict(qdrant1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Qdrant2.md b/docs/Qdrant2.md new file mode 100644 index 0000000..8337423 --- /dev/null +++ b/docs/Qdrant2.md @@ -0,0 +1,30 @@ +# Qdrant2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"QDRANT\") | + +## Example + +```python +from vectorize_client.models.qdrant2 import Qdrant2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Qdrant2 from a JSON string +qdrant2_instance = Qdrant2.from_json(json) +# print the JSON string representation of the object +print(Qdrant2.to_json()) + +# convert the object into a dict +qdrant2_dict = qdrant2_instance.to_dict() +# create an instance of Qdrant2 from a dict +qdrant2_from_dict = Qdrant2.from_dict(qdrant2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/RemoveUserFromSourceConnectorRequest.md b/docs/RemoveUserFromSourceConnectorRequest.md new file mode 100644 index 0000000..a34c16a --- /dev/null +++ b/docs/RemoveUserFromSourceConnectorRequest.md @@ -0,0 +1,29 @@ +# RemoveUserFromSourceConnectorRequest + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**user_id** | **str** | | + +## Example + +```python +from vectorize_client.models.remove_user_from_source_connector_request import RemoveUserFromSourceConnectorRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of RemoveUserFromSourceConnectorRequest from a JSON string +remove_user_from_source_connector_request_instance = RemoveUserFromSourceConnectorRequest.from_json(json) +# print the JSON string representation of the object +print(RemoveUserFromSourceConnectorRequest.to_json()) + +# convert the object into a dict +remove_user_from_source_connector_request_dict = remove_user_from_source_connector_request_instance.to_dict() +# create an instance of RemoveUserFromSourceConnectorRequest from a dict +remove_user_from_source_connector_request_from_dict = RemoveUserFromSourceConnectorRequest.from_dict(remove_user_from_source_connector_request_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/RemoveUserFromSourceConnectorResponse.md b/docs/RemoveUserFromSourceConnectorResponse.md new file mode 100644 index 0000000..74de0f4 --- /dev/null +++ b/docs/RemoveUserFromSourceConnectorResponse.md @@ -0,0 +1,29 @@ +# RemoveUserFromSourceConnectorResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | + +## Example + +```python +from vectorize_client.models.remove_user_from_source_connector_response import RemoveUserFromSourceConnectorResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of RemoveUserFromSourceConnectorResponse from a JSON string +remove_user_from_source_connector_response_instance = RemoveUserFromSourceConnectorResponse.from_json(json) +# print the JSON string representation of the object +print(RemoveUserFromSourceConnectorResponse.to_json()) + +# convert the object into a dict +remove_user_from_source_connector_response_dict = remove_user_from_source_connector_response_instance.to_dict() +# create an instance of RemoveUserFromSourceConnectorResponse from a dict +remove_user_from_source_connector_response_from_dict = RemoveUserFromSourceConnectorResponse.from_dict(remove_user_from_source_connector_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/RetrieveContext.md b/docs/RetrieveContext.md new file mode 100644 index 0000000..30be06c --- /dev/null +++ b/docs/RetrieveContext.md @@ -0,0 +1,29 @@ +# RetrieveContext + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**messages** | [**List[RetrieveContextMessage]**](RetrieveContextMessage.md) | | + +## Example + +```python +from vectorize_client.models.retrieve_context import RetrieveContext + +# TODO update the JSON string below +json = "{}" +# create an instance of RetrieveContext from a JSON string +retrieve_context_instance = RetrieveContext.from_json(json) +# print the JSON string representation of the object +print(RetrieveContext.to_json()) + +# convert the object into a dict +retrieve_context_dict = retrieve_context_instance.to_dict() +# create an instance of RetrieveContext from a dict +retrieve_context_from_dict = RetrieveContext.from_dict(retrieve_context_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/RetrieveContextMessage.md b/docs/RetrieveContextMessage.md new file mode 100644 index 0000000..8f2c6ac --- /dev/null +++ b/docs/RetrieveContextMessage.md @@ -0,0 +1,30 @@ +# RetrieveContextMessage + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**role** | **str** | | +**content** | **str** | | + +## Example + +```python +from vectorize_client.models.retrieve_context_message import RetrieveContextMessage + +# TODO update the JSON string below +json = "{}" +# create an instance of RetrieveContextMessage from a JSON string +retrieve_context_message_instance = RetrieveContextMessage.from_json(json) +# print the JSON string representation of the object +print(RetrieveContextMessage.to_json()) + +# convert the object into a dict +retrieve_context_message_dict = retrieve_context_message_instance.to_dict() +# create an instance of RetrieveContextMessage from a dict +retrieve_context_message_from_dict = RetrieveContextMessage.from_dict(retrieve_context_message_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/RetrieveDocumentsRequest.md b/docs/RetrieveDocumentsRequest.md new file mode 100644 index 0000000..392856b --- /dev/null +++ b/docs/RetrieveDocumentsRequest.md @@ -0,0 +1,34 @@ +# RetrieveDocumentsRequest + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**question** | **str** | | +**num_results** | **float** | | +**rerank** | **bool** | | [optional] [default to True] +**metadata_filters** | **List[Dict[str, Optional[object]]]** | | [optional] +**context** | [**RetrieveContext**](RetrieveContext.md) | | [optional] +**advanced_query** | [**AdvancedQuery**](AdvancedQuery.md) | | [optional] + +## Example + +```python +from vectorize_client.models.retrieve_documents_request import RetrieveDocumentsRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of RetrieveDocumentsRequest from a JSON string +retrieve_documents_request_instance = RetrieveDocumentsRequest.from_json(json) +# print the JSON string representation of the object +print(RetrieveDocumentsRequest.to_json()) + +# convert the object into a dict +retrieve_documents_request_dict = retrieve_documents_request_instance.to_dict() +# create an instance of RetrieveDocumentsRequest from a dict +retrieve_documents_request_from_dict = RetrieveDocumentsRequest.from_dict(retrieve_documents_request_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/RetrieveDocumentsResponse.md b/docs/RetrieveDocumentsResponse.md new file mode 100644 index 0000000..75c2951 --- /dev/null +++ b/docs/RetrieveDocumentsResponse.md @@ -0,0 +1,32 @@ +# RetrieveDocumentsResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**question** | **str** | | +**documents** | [**List[Document]**](Document.md) | | +**average_relevancy** | **float** | | +**ndcg** | **float** | | + +## Example + +```python +from vectorize_client.models.retrieve_documents_response import RetrieveDocumentsResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of RetrieveDocumentsResponse from a JSON string +retrieve_documents_response_instance = RetrieveDocumentsResponse.from_json(json) +# print the JSON string representation of the object +print(RetrieveDocumentsResponse.to_json()) + +# convert the object into a dict +retrieve_documents_response_dict = retrieve_documents_response_instance.to_dict() +# create an instance of RetrieveDocumentsResponse from a dict +retrieve_documents_response_from_dict = RetrieveDocumentsResponse.from_dict(retrieve_documents_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/SHAREPOINTAuthConfig.md b/docs/SHAREPOINTAuthConfig.md new file mode 100644 index 0000000..f3516bd --- /dev/null +++ b/docs/SHAREPOINTAuthConfig.md @@ -0,0 +1,33 @@ +# SHAREPOINTAuthConfig + +Authentication configuration for SharePoint + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**ms_client_id** | **str** | Client Id. Example: Enter Client Id | +**ms_tenant_id** | **str** | Tenant Id. Example: Enter Tenant Id | +**ms_client_secret** | **str** | Client Secret. Example: Enter Client Secret | + +## Example + +```python +from vectorize_client.models.sharepoint_auth_config import SHAREPOINTAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of SHAREPOINTAuthConfig from a JSON string +sharepoint_auth_config_instance = SHAREPOINTAuthConfig.from_json(json) +# print the JSON string representation of the object +print(SHAREPOINTAuthConfig.to_json()) + +# convert the object into a dict +sharepoint_auth_config_dict = sharepoint_auth_config_instance.to_dict() +# create an instance of SHAREPOINTAuthConfig from a dict +sharepoint_auth_config_from_dict = SHAREPOINTAuthConfig.from_dict(sharepoint_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/SHAREPOINTConfig.md b/docs/SHAREPOINTConfig.md new file mode 100644 index 0000000..fcc0e78 --- /dev/null +++ b/docs/SHAREPOINTConfig.md @@ -0,0 +1,31 @@ +# SHAREPOINTConfig + +Configuration for SharePoint connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**file_extensions** | **List[str]** | File Extensions | +**sites** | **str** | Site Name(s). Example: Filter by site name. All sites if empty. | [optional] + +## Example + +```python +from vectorize_client.models.sharepoint_config import SHAREPOINTConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of SHAREPOINTConfig from a JSON string +sharepoint_config_instance = SHAREPOINTConfig.from_json(json) +# print the JSON string representation of the object +print(SHAREPOINTConfig.to_json()) + +# convert the object into a dict +sharepoint_config_dict = sharepoint_config_instance.to_dict() +# create an instance of SHAREPOINTConfig from a dict +sharepoint_config_from_dict = SHAREPOINTConfig.from_dict(sharepoint_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/SINGLESTOREAuthConfig.md b/docs/SINGLESTOREAuthConfig.md new file mode 100644 index 0000000..86f4857 --- /dev/null +++ b/docs/SINGLESTOREAuthConfig.md @@ -0,0 +1,35 @@ +# SINGLESTOREAuthConfig + +Authentication configuration for SingleStore + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for your SingleStore integration | +**host** | **str** | Host. Example: Enter the host of the deployment | +**port** | **float** | Port. Example: Enter the port of the deployment | +**database** | **str** | Database. Example: Enter the database name | +**username** | **str** | Username. Example: Enter the username | +**password** | **str** | Password. Example: Enter the username's password | + +## Example + +```python +from vectorize_client.models.singlestore_auth_config import SINGLESTOREAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of SINGLESTOREAuthConfig from a JSON string +singlestore_auth_config_instance = SINGLESTOREAuthConfig.from_json(json) +# print the JSON string representation of the object +print(SINGLESTOREAuthConfig.to_json()) + +# convert the object into a dict +singlestore_auth_config_dict = singlestore_auth_config_instance.to_dict() +# create an instance of SINGLESTOREAuthConfig from a dict +singlestore_auth_config_from_dict = SINGLESTOREAuthConfig.from_dict(singlestore_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/SINGLESTOREConfig.md b/docs/SINGLESTOREConfig.md new file mode 100644 index 0000000..4b2b33f --- /dev/null +++ b/docs/SINGLESTOREConfig.md @@ -0,0 +1,30 @@ +# SINGLESTOREConfig + +Configuration for SingleStore connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**table** | **str** | Table Name. Example: Enter table name | + +## Example + +```python +from vectorize_client.models.singlestore_config import SINGLESTOREConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of SINGLESTOREConfig from a JSON string +singlestore_config_instance = SINGLESTOREConfig.from_json(json) +# print the JSON string representation of the object +print(SINGLESTOREConfig.to_json()) + +# convert the object into a dict +singlestore_config_dict = singlestore_config_instance.to_dict() +# create an instance of SINGLESTOREConfig from a dict +singlestore_config_from_dict = SINGLESTOREConfig.from_dict(singlestore_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/SUPABASEAuthConfig.md b/docs/SUPABASEAuthConfig.md new file mode 100644 index 0000000..5eedb64 --- /dev/null +++ b/docs/SUPABASEAuthConfig.md @@ -0,0 +1,35 @@ +# SUPABASEAuthConfig + +Authentication configuration for Supabase + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for your Supabase integration | +**host** | **str** | Host. Example: Enter the host of the deployment | [default to 'aws-0-us-east-1.pooler.supabase.com'] +**port** | **float** | Port. Example: Enter the port of the deployment | [optional] [default to 5432] +**database** | **str** | Database. Example: Enter the database name | +**username** | **str** | Username. Example: Enter the username | +**password** | **str** | Password. Example: Enter the username's password | + +## Example + +```python +from vectorize_client.models.supabase_auth_config import SUPABASEAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of SUPABASEAuthConfig from a JSON string +supabase_auth_config_instance = SUPABASEAuthConfig.from_json(json) +# print the JSON string representation of the object +print(SUPABASEAuthConfig.to_json()) + +# convert the object into a dict +supabase_auth_config_dict = supabase_auth_config_instance.to_dict() +# create an instance of SUPABASEAuthConfig from a dict +supabase_auth_config_from_dict = SUPABASEAuthConfig.from_dict(supabase_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/SUPABASEConfig.md b/docs/SUPABASEConfig.md new file mode 100644 index 0000000..5328143 --- /dev/null +++ b/docs/SUPABASEConfig.md @@ -0,0 +1,30 @@ +# SUPABASEConfig + +Configuration for Supabase connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**table** | **str** | Table Name. Example: Enter <table name> or <schema>.<table name> | + +## Example + +```python +from vectorize_client.models.supabase_config import SUPABASEConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of SUPABASEConfig from a JSON string +supabase_config_instance = SUPABASEConfig.from_json(json) +# print the JSON string representation of the object +print(SUPABASEConfig.to_json()) + +# convert the object into a dict +supabase_config_dict = supabase_config_instance.to_dict() +# create an instance of SUPABASEConfig from a dict +supabase_config_from_dict = SUPABASEConfig.from_dict(supabase_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/ScheduleSchema.md b/docs/ScheduleSchema.md new file mode 100644 index 0000000..7a8f93b --- /dev/null +++ b/docs/ScheduleSchema.md @@ -0,0 +1,29 @@ +# ScheduleSchema + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**type** | [**ScheduleSchemaType**](ScheduleSchemaType.md) | | + +## Example + +```python +from vectorize_client.models.schedule_schema import ScheduleSchema + +# TODO update the JSON string below +json = "{}" +# create an instance of ScheduleSchema from a JSON string +schedule_schema_instance = ScheduleSchema.from_json(json) +# print the JSON string representation of the object +print(ScheduleSchema.to_json()) + +# convert the object into a dict +schedule_schema_dict = schedule_schema_instance.to_dict() +# create an instance of ScheduleSchema from a dict +schedule_schema_from_dict = ScheduleSchema.from_dict(schedule_schema_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/ScheduleSchemaType.md b/docs/ScheduleSchemaType.md new file mode 100644 index 0000000..3da057e --- /dev/null +++ b/docs/ScheduleSchemaType.md @@ -0,0 +1,14 @@ +# ScheduleSchemaType + + +## Enum + +* `MANUAL` (value: `'manual'`) + +* `REALTIME` (value: `'realtime'`) + +* `CUSTOM` (value: `'custom'`) + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Sharepoint.md b/docs/Sharepoint.md new file mode 100644 index 0000000..2595c5b --- /dev/null +++ b/docs/Sharepoint.md @@ -0,0 +1,31 @@ +# Sharepoint + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"SHAREPOINT\") | +**config** | [**SHAREPOINTConfig**](SHAREPOINTConfig.md) | | + +## Example + +```python +from vectorize_client.models.sharepoint import Sharepoint + +# TODO update the JSON string below +json = "{}" +# create an instance of Sharepoint from a JSON string +sharepoint_instance = Sharepoint.from_json(json) +# print the JSON string representation of the object +print(Sharepoint.to_json()) + +# convert the object into a dict +sharepoint_dict = sharepoint_instance.to_dict() +# create an instance of Sharepoint from a dict +sharepoint_from_dict = Sharepoint.from_dict(sharepoint_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Sharepoint1.md b/docs/Sharepoint1.md new file mode 100644 index 0000000..b97802a --- /dev/null +++ b/docs/Sharepoint1.md @@ -0,0 +1,29 @@ +# Sharepoint1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**SHAREPOINTConfig**](SHAREPOINTConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.sharepoint1 import Sharepoint1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Sharepoint1 from a JSON string +sharepoint1_instance = Sharepoint1.from_json(json) +# print the JSON string representation of the object +print(Sharepoint1.to_json()) + +# convert the object into a dict +sharepoint1_dict = sharepoint1_instance.to_dict() +# create an instance of Sharepoint1 from a dict +sharepoint1_from_dict = Sharepoint1.from_dict(sharepoint1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Sharepoint2.md b/docs/Sharepoint2.md new file mode 100644 index 0000000..858414c --- /dev/null +++ b/docs/Sharepoint2.md @@ -0,0 +1,30 @@ +# Sharepoint2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"SHAREPOINT\") | + +## Example + +```python +from vectorize_client.models.sharepoint2 import Sharepoint2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Sharepoint2 from a JSON string +sharepoint2_instance = Sharepoint2.from_json(json) +# print the JSON string representation of the object +print(Sharepoint2.to_json()) + +# convert the object into a dict +sharepoint2_dict = sharepoint2_instance.to_dict() +# create an instance of Sharepoint2 from a dict +sharepoint2_from_dict = Sharepoint2.from_dict(sharepoint2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Singlestore.md b/docs/Singlestore.md new file mode 100644 index 0000000..54ff31f --- /dev/null +++ b/docs/Singlestore.md @@ -0,0 +1,31 @@ +# Singlestore + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"SINGLESTORE\") | +**config** | [**SINGLESTOREConfig**](SINGLESTOREConfig.md) | | + +## Example + +```python +from vectorize_client.models.singlestore import Singlestore + +# TODO update the JSON string below +json = "{}" +# create an instance of Singlestore from a JSON string +singlestore_instance = Singlestore.from_json(json) +# print the JSON string representation of the object +print(Singlestore.to_json()) + +# convert the object into a dict +singlestore_dict = singlestore_instance.to_dict() +# create an instance of Singlestore from a dict +singlestore_from_dict = Singlestore.from_dict(singlestore_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Singlestore1.md b/docs/Singlestore1.md new file mode 100644 index 0000000..0922783 --- /dev/null +++ b/docs/Singlestore1.md @@ -0,0 +1,29 @@ +# Singlestore1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**SINGLESTOREConfig**](SINGLESTOREConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.singlestore1 import Singlestore1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Singlestore1 from a JSON string +singlestore1_instance = Singlestore1.from_json(json) +# print the JSON string representation of the object +print(Singlestore1.to_json()) + +# convert the object into a dict +singlestore1_dict = singlestore1_instance.to_dict() +# create an instance of Singlestore1 from a dict +singlestore1_from_dict = Singlestore1.from_dict(singlestore1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Singlestore2.md b/docs/Singlestore2.md new file mode 100644 index 0000000..aeaddee --- /dev/null +++ b/docs/Singlestore2.md @@ -0,0 +1,30 @@ +# Singlestore2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"SINGLESTORE\") | + +## Example + +```python +from vectorize_client.models.singlestore2 import Singlestore2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Singlestore2 from a JSON string +singlestore2_instance = Singlestore2.from_json(json) +# print the JSON string representation of the object +print(Singlestore2.to_json()) + +# convert the object into a dict +singlestore2_dict = singlestore2_instance.to_dict() +# create an instance of Singlestore2 from a dict +singlestore2_from_dict = Singlestore2.from_dict(singlestore2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/SourceConnector.md b/docs/SourceConnector.md new file mode 100644 index 0000000..295d952 --- /dev/null +++ b/docs/SourceConnector.md @@ -0,0 +1,39 @@ +# SourceConnector + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | +**type** | **str** | | +**name** | **str** | | +**config_doc** | **Dict[str, Optional[object]]** | | [optional] +**created_at** | **str** | | [optional] +**created_by_id** | **str** | | [optional] +**last_updated_by_id** | **str** | | [optional] +**created_by_email** | **str** | | [optional] +**last_updated_by_email** | **str** | | [optional] +**error_message** | **str** | | [optional] +**verification_status** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.source_connector import SourceConnector + +# TODO update the JSON string below +json = "{}" +# create an instance of SourceConnector from a JSON string +source_connector_instance = SourceConnector.from_json(json) +# print the JSON string representation of the object +print(SourceConnector.to_json()) + +# convert the object into a dict +source_connector_dict = source_connector_instance.to_dict() +# create an instance of SourceConnector from a dict +source_connector_from_dict = SourceConnector.from_dict(source_connector_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/SourceConnectorInput.md b/docs/SourceConnectorInput.md new file mode 100644 index 0000000..609c590 --- /dev/null +++ b/docs/SourceConnectorInput.md @@ -0,0 +1,32 @@ +# SourceConnectorInput + +Source connector configuration + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the source connector | +**type** | **str** | Type of source connector | +**config** | [**SourceConnectorInputConfig**](SourceConnectorInputConfig.md) | | + +## Example + +```python +from vectorize_client.models.source_connector_input import SourceConnectorInput + +# TODO update the JSON string below +json = "{}" +# create an instance of SourceConnectorInput from a JSON string +source_connector_input_instance = SourceConnectorInput.from_json(json) +# print the JSON string representation of the object +print(SourceConnectorInput.to_json()) + +# convert the object into a dict +source_connector_input_dict = source_connector_input_instance.to_dict() +# create an instance of SourceConnectorInput from a dict +source_connector_input_from_dict = SourceConnectorInput.from_dict(source_connector_input_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/SourceConnectorInputConfig.md b/docs/SourceConnectorInputConfig.md new file mode 100644 index 0000000..0ee3030 --- /dev/null +++ b/docs/SourceConnectorInputConfig.md @@ -0,0 +1,78 @@ +# SourceConnectorInputConfig + +Configuration specific to the connector type + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**file_extensions** | **List[str]** | File Extensions | +**idle_time** | **float** | Check for updates every (seconds) | [default to 5] +**recursive** | **bool** | Recursively scan all folders in the bucket | [optional] +**path_prefix** | **str** | Read starting from this folder (optional). Example: Enter Folder path: /exampleFolder/subFolder | [optional] +**path_metadata_regex** | **str** | Path Metadata Regex | [optional] +**path_regex_group_names** | **str** | Path Regex Group Names. Example: Enter Group Name | [optional] +**spaces** | **str** | Spaces. Example: Spaces to include (name, key or id) | +**root_parents** | **str** | Restrict ingest to these folder URLs (optional). Example: Enter Folder URLs. Example: https://drive.google.com/drive/folders/1234aBCd5678_eFgH9012iJKL3456opqr | [optional] +**emoji** | **str** | Emoji Filter. Example: Enter custom emoji filter name | [optional] +**author** | **str** | Author Filter. Example: Enter author name | [optional] +**ignore_author** | **str** | Ignore Author Filter. Example: Enter ignore author name | [optional] +**limit** | **float** | Limit. Example: Enter limit | [optional] [default to 10000] +**thread_message_inclusion** | **str** | Thread Message Inclusion | [optional] [default to 'ALL'] +**filter_logic** | **str** | Filter Logic | [optional] [default to 'AND'] +**thread_message_mode** | **str** | Thread Message Mode | [optional] [default to 'CONCATENATE'] +**endpoint** | **str** | Endpoint. Example: Choose which api endpoint to use | [default to 'Crawl'] +**request** | **object** | Request Body. Example: JSON config for firecrawl's /crawl or /scrape endpoint. | +**created_at** | **date** | Created After. Filter for conversation created after this date. Example: Enter a date: Example 2012-12-31 | +**updated_at** | **date** | Updated After. Filter for conversation updated after this date. Example: Enter a date: Example 2012-12-31 | [optional] +**state** | **List[str]** | State | [optional] +**select_resources** | **str** | Select Notion Resources | +**database_ids** | **str** | Database IDs | +**database_names** | **str** | Database Names | +**page_ids** | **str** | Page IDs | +**page_names** | **str** | Page Names | +**sites** | **str** | Site Name(s). Example: Filter by site name. All sites if empty. | [optional] +**allowed_domains_opt** | **str** | Additional Allowed URLs or prefix(es). Add one or more allowed URLs or URL prefixes. The crawler will read URLs that match these patterns in addition to the seed URL(s).. Example: (e.g. https://docs.example.com) | [optional] +**forbidden_paths** | **str** | Forbidden Paths. Example: Enter forbidden paths (e.g. /admin) | [optional] +**min_time_between_requests** | **float** | Throttle (ms). Example: Enter minimum time between requests in milliseconds | [optional] [default to 500] +**max_error_count** | **float** | Max Error Count. Example: Enter maximum error count | [optional] [default to 5] +**max_urls** | **float** | Max URLs. Example: Enter maximum number of URLs to crawl | [optional] [default to 1000] +**max_depth** | **float** | Max Depth. Example: Enter maximum crawl depth | [optional] [default to 50] +**reindex_interval_seconds** | **float** | Reindex Interval (seconds). Example: Enter reindex interval in seconds | [optional] [default to 3600] +**repositories** | **str** | Repositories. Example: Example: owner1/repo1 | +**include_pull_requests** | **bool** | Include Pull Requests | [default to True] +**pull_request_status** | **str** | Pull Request Status | [default to 'all'] +**pull_request_labels** | **str** | Pull Request Labels. Example: Optionally filter by label. E.g. fix | [optional] +**include_issues** | **bool** | Include Issues | [default to True] +**issue_status** | **str** | Issue Status | [default to 'all'] +**issue_labels** | **str** | Issue Labels. Example: Optionally filter by label. E.g. bug | [optional] +**max_items** | **float** | Max Items. Example: Enter maximum number of items to fetch | [default to 1000] +**created_after** | **date** | Created After. Filter for items created after this date. Example: Enter a date: Example 2012-12-31 | [optional] +**start_date** | **date** | Start Date. Include meetings from this date forward. Example: Enter a date: Example 2023-12-31 | +**end_date** | **date** | End Date. Include meetings up to this date only. Example: Enter a date: Example 2023-12-31 | [optional] +**title_filter_type** | **str** | | [default to 'AND'] +**title_filter** | **str** | Title Filter. Only include meetings with this text in the title. Example: Enter meeting title | [optional] +**participant_filter_type** | **str** | | [default to 'AND'] +**participant_filter** | **str** | Participant's Email Filter. Include meetings where these participants were invited. Example: Enter participant email | [optional] +**max_meetings** | **float** | Max Meetings. Enter -1 for all available meetings, or specify a limit. Example: Enter maximum number of meetings to retrieve. (-1 for all) | [optional] [default to -1] + +## Example + +```python +from vectorize_client.models.source_connector_input_config import SourceConnectorInputConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of SourceConnectorInputConfig from a JSON string +source_connector_input_config_instance = SourceConnectorInputConfig.from_json(json) +# print the JSON string representation of the object +print(SourceConnectorInputConfig.to_json()) + +# convert the object into a dict +source_connector_input_config_dict = source_connector_input_config_instance.to_dict() +# create an instance of SourceConnectorInputConfig from a dict +source_connector_input_config_from_dict = SourceConnectorInputConfig.from_dict(source_connector_input_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/SourceConnectorSchema.md b/docs/SourceConnectorSchema.md new file mode 100644 index 0000000..44f7835 --- /dev/null +++ b/docs/SourceConnectorSchema.md @@ -0,0 +1,31 @@ +# SourceConnectorSchema + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | +**type** | [**SourceConnectorType**](SourceConnectorType.md) | | +**config** | **Dict[str, Optional[object]]** | | + +## Example + +```python +from vectorize_client.models.source_connector_schema import SourceConnectorSchema + +# TODO update the JSON string below +json = "{}" +# create an instance of SourceConnectorSchema from a JSON string +source_connector_schema_instance = SourceConnectorSchema.from_json(json) +# print the JSON string representation of the object +print(SourceConnectorSchema.to_json()) + +# convert the object into a dict +source_connector_schema_dict = source_connector_schema_instance.to_dict() +# create an instance of SourceConnectorSchema from a dict +source_connector_schema_from_dict = SourceConnectorSchema.from_dict(source_connector_schema_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/SourceConnectorType.md b/docs/SourceConnectorType.md new file mode 100644 index 0000000..65739cf --- /dev/null +++ b/docs/SourceConnectorType.md @@ -0,0 +1,56 @@ +# SourceConnectorType + + +## Enum + +* `AWS_S3` (value: `'AWS_S3'`) + +* `AZURE_BLOB` (value: `'AZURE_BLOB'`) + +* `CONFLUENCE` (value: `'CONFLUENCE'`) + +* `DISCORD` (value: `'DISCORD'`) + +* `DROPBOX` (value: `'DROPBOX'`) + +* `DROPBOX_OAUTH` (value: `'DROPBOX_OAUTH'`) + +* `DROPBOX_OAUTH_MULTI` (value: `'DROPBOX_OAUTH_MULTI'`) + +* `DROPBOX_OAUTH_MULTI_CUSTOM` (value: `'DROPBOX_OAUTH_MULTI_CUSTOM'`) + +* `GOOGLE_DRIVE_OAUTH` (value: `'GOOGLE_DRIVE_OAUTH'`) + +* `GOOGLE_DRIVE` (value: `'GOOGLE_DRIVE'`) + +* `GOOGLE_DRIVE_OAUTH_MULTI` (value: `'GOOGLE_DRIVE_OAUTH_MULTI'`) + +* `GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM` (value: `'GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM'`) + +* `FIRECRAWL` (value: `'FIRECRAWL'`) + +* `GCS` (value: `'GCS'`) + +* `INTERCOM` (value: `'INTERCOM'`) + +* `NOTION` (value: `'NOTION'`) + +* `NOTION_OAUTH_MULTI` (value: `'NOTION_OAUTH_MULTI'`) + +* `NOTION_OAUTH_MULTI_CUSTOM` (value: `'NOTION_OAUTH_MULTI_CUSTOM'`) + +* `ONE_DRIVE` (value: `'ONE_DRIVE'`) + +* `SHAREPOINT` (value: `'SHAREPOINT'`) + +* `WEB_CRAWLER` (value: `'WEB_CRAWLER'`) + +* `FILE_UPLOAD` (value: `'FILE_UPLOAD'`) + +* `GITHUB` (value: `'GITHUB'`) + +* `FIREFLIES` (value: `'FIREFLIES'`) + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/StartDeepResearchRequest.md b/docs/StartDeepResearchRequest.md new file mode 100644 index 0000000..918c347 --- /dev/null +++ b/docs/StartDeepResearchRequest.md @@ -0,0 +1,32 @@ +# StartDeepResearchRequest + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**query** | **str** | | +**web_search** | **bool** | | [optional] [default to False] +**var_schema** | **str** | | [optional] +**n8n** | [**N8NConfig**](N8NConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.start_deep_research_request import StartDeepResearchRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of StartDeepResearchRequest from a JSON string +start_deep_research_request_instance = StartDeepResearchRequest.from_json(json) +# print the JSON string representation of the object +print(StartDeepResearchRequest.to_json()) + +# convert the object into a dict +start_deep_research_request_dict = start_deep_research_request_instance.to_dict() +# create an instance of StartDeepResearchRequest from a dict +start_deep_research_request_from_dict = StartDeepResearchRequest.from_dict(start_deep_research_request_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/StartDeepResearchResponse.md b/docs/StartDeepResearchResponse.md new file mode 100644 index 0000000..7130a15 --- /dev/null +++ b/docs/StartDeepResearchResponse.md @@ -0,0 +1,29 @@ +# StartDeepResearchResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**research_id** | **str** | | + +## Example + +```python +from vectorize_client.models.start_deep_research_response import StartDeepResearchResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of StartDeepResearchResponse from a JSON string +start_deep_research_response_instance = StartDeepResearchResponse.from_json(json) +# print the JSON string representation of the object +print(StartDeepResearchResponse.to_json()) + +# convert the object into a dict +start_deep_research_response_dict = start_deep_research_response_instance.to_dict() +# create an instance of StartDeepResearchResponse from a dict +start_deep_research_response_from_dict = StartDeepResearchResponse.from_dict(start_deep_research_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/StartExtractionRequest.md b/docs/StartExtractionRequest.md new file mode 100644 index 0000000..237b1d3 --- /dev/null +++ b/docs/StartExtractionRequest.md @@ -0,0 +1,33 @@ +# StartExtractionRequest + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**file_id** | **str** | | +**type** | [**ExtractionType**](ExtractionType.md) | | [optional] [default to ExtractionType.IRIS] +**chunking_strategy** | [**ExtractionChunkingStrategy**](ExtractionChunkingStrategy.md) | | [optional] [default to ExtractionChunkingStrategy.MARKDOWN] +**chunk_size** | **float** | | [optional] [default to 256] +**metadata** | [**MetadataExtractionStrategy**](MetadataExtractionStrategy.md) | | [optional] + +## Example + +```python +from vectorize_client.models.start_extraction_request import StartExtractionRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of StartExtractionRequest from a JSON string +start_extraction_request_instance = StartExtractionRequest.from_json(json) +# print the JSON string representation of the object +print(StartExtractionRequest.to_json()) + +# convert the object into a dict +start_extraction_request_dict = start_extraction_request_instance.to_dict() +# create an instance of StartExtractionRequest from a dict +start_extraction_request_from_dict = StartExtractionRequest.from_dict(start_extraction_request_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/StartExtractionResponse.md b/docs/StartExtractionResponse.md new file mode 100644 index 0000000..72aeeed --- /dev/null +++ b/docs/StartExtractionResponse.md @@ -0,0 +1,30 @@ +# StartExtractionResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | +**extraction_id** | **str** | | + +## Example + +```python +from vectorize_client.models.start_extraction_response import StartExtractionResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of StartExtractionResponse from a JSON string +start_extraction_response_instance = StartExtractionResponse.from_json(json) +# print the JSON string representation of the object +print(StartExtractionResponse.to_json()) + +# convert the object into a dict +start_extraction_response_dict = start_extraction_response_instance.to_dict() +# create an instance of StartExtractionResponse from a dict +start_extraction_response_from_dict = StartExtractionResponse.from_dict(start_extraction_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/StartFileUploadRequest.md b/docs/StartFileUploadRequest.md new file mode 100644 index 0000000..221e641 --- /dev/null +++ b/docs/StartFileUploadRequest.md @@ -0,0 +1,30 @@ +# StartFileUploadRequest + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | +**content_type** | **str** | | + +## Example + +```python +from vectorize_client.models.start_file_upload_request import StartFileUploadRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of StartFileUploadRequest from a JSON string +start_file_upload_request_instance = StartFileUploadRequest.from_json(json) +# print the JSON string representation of the object +print(StartFileUploadRequest.to_json()) + +# convert the object into a dict +start_file_upload_request_dict = start_file_upload_request_instance.to_dict() +# create an instance of StartFileUploadRequest from a dict +start_file_upload_request_from_dict = StartFileUploadRequest.from_dict(start_file_upload_request_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/StartFileUploadResponse.md b/docs/StartFileUploadResponse.md new file mode 100644 index 0000000..d321ab2 --- /dev/null +++ b/docs/StartFileUploadResponse.md @@ -0,0 +1,30 @@ +# StartFileUploadResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**file_id** | **str** | | +**upload_url** | **str** | | + +## Example + +```python +from vectorize_client.models.start_file_upload_response import StartFileUploadResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of StartFileUploadResponse from a JSON string +start_file_upload_response_instance = StartFileUploadResponse.from_json(json) +# print the JSON string representation of the object +print(StartFileUploadResponse.to_json()) + +# convert the object into a dict +start_file_upload_response_dict = start_file_upload_response_instance.to_dict() +# create an instance of StartFileUploadResponse from a dict +start_file_upload_response_from_dict = StartFileUploadResponse.from_dict(start_file_upload_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/StartFileUploadToConnectorRequest.md b/docs/StartFileUploadToConnectorRequest.md new file mode 100644 index 0000000..f6da191 --- /dev/null +++ b/docs/StartFileUploadToConnectorRequest.md @@ -0,0 +1,31 @@ +# StartFileUploadToConnectorRequest + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | +**content_type** | **str** | | +**metadata** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.start_file_upload_to_connector_request import StartFileUploadToConnectorRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of StartFileUploadToConnectorRequest from a JSON string +start_file_upload_to_connector_request_instance = StartFileUploadToConnectorRequest.from_json(json) +# print the JSON string representation of the object +print(StartFileUploadToConnectorRequest.to_json()) + +# convert the object into a dict +start_file_upload_to_connector_request_dict = start_file_upload_to_connector_request_instance.to_dict() +# create an instance of StartFileUploadToConnectorRequest from a dict +start_file_upload_to_connector_request_from_dict = StartFileUploadToConnectorRequest.from_dict(start_file_upload_to_connector_request_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/StartFileUploadToConnectorResponse.md b/docs/StartFileUploadToConnectorResponse.md new file mode 100644 index 0000000..62a380f --- /dev/null +++ b/docs/StartFileUploadToConnectorResponse.md @@ -0,0 +1,29 @@ +# StartFileUploadToConnectorResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**upload_url** | **str** | | + +## Example + +```python +from vectorize_client.models.start_file_upload_to_connector_response import StartFileUploadToConnectorResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of StartFileUploadToConnectorResponse from a JSON string +start_file_upload_to_connector_response_instance = StartFileUploadToConnectorResponse.from_json(json) +# print the JSON string representation of the object +print(StartFileUploadToConnectorResponse.to_json()) + +# convert the object into a dict +start_file_upload_to_connector_response_dict = start_file_upload_to_connector_response_instance.to_dict() +# create an instance of StartFileUploadToConnectorResponse from a dict +start_file_upload_to_connector_response_from_dict = StartFileUploadToConnectorResponse.from_dict(start_file_upload_to_connector_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/StartPipelineResponse.md b/docs/StartPipelineResponse.md new file mode 100644 index 0000000..d70222b --- /dev/null +++ b/docs/StartPipelineResponse.md @@ -0,0 +1,29 @@ +# StartPipelineResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | + +## Example + +```python +from vectorize_client.models.start_pipeline_response import StartPipelineResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of StartPipelineResponse from a JSON string +start_pipeline_response_instance = StartPipelineResponse.from_json(json) +# print the JSON string representation of the object +print(StartPipelineResponse.to_json()) + +# convert the object into a dict +start_pipeline_response_dict = start_pipeline_response_instance.to_dict() +# create an instance of StartPipelineResponse from a dict +start_pipeline_response_from_dict = StartPipelineResponse.from_dict(start_pipeline_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/StopPipelineResponse.md b/docs/StopPipelineResponse.md new file mode 100644 index 0000000..0917c99 --- /dev/null +++ b/docs/StopPipelineResponse.md @@ -0,0 +1,29 @@ +# StopPipelineResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | + +## Example + +```python +from vectorize_client.models.stop_pipeline_response import StopPipelineResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of StopPipelineResponse from a JSON string +stop_pipeline_response_instance = StopPipelineResponse.from_json(json) +# print the JSON string representation of the object +print(StopPipelineResponse.to_json()) + +# convert the object into a dict +stop_pipeline_response_dict = stop_pipeline_response_instance.to_dict() +# create an instance of StopPipelineResponse from a dict +stop_pipeline_response_from_dict = StopPipelineResponse.from_dict(stop_pipeline_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Supabase.md b/docs/Supabase.md new file mode 100644 index 0000000..fed78fa --- /dev/null +++ b/docs/Supabase.md @@ -0,0 +1,31 @@ +# Supabase + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"SUPABASE\") | +**config** | [**SUPABASEConfig**](SUPABASEConfig.md) | | + +## Example + +```python +from vectorize_client.models.supabase import Supabase + +# TODO update the JSON string below +json = "{}" +# create an instance of Supabase from a JSON string +supabase_instance = Supabase.from_json(json) +# print the JSON string representation of the object +print(Supabase.to_json()) + +# convert the object into a dict +supabase_dict = supabase_instance.to_dict() +# create an instance of Supabase from a dict +supabase_from_dict = Supabase.from_dict(supabase_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Supabase1.md b/docs/Supabase1.md new file mode 100644 index 0000000..2dce336 --- /dev/null +++ b/docs/Supabase1.md @@ -0,0 +1,29 @@ +# Supabase1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**SUPABASEConfig**](SUPABASEConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.supabase1 import Supabase1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Supabase1 from a JSON string +supabase1_instance = Supabase1.from_json(json) +# print the JSON string representation of the object +print(Supabase1.to_json()) + +# convert the object into a dict +supabase1_dict = supabase1_instance.to_dict() +# create an instance of Supabase1 from a dict +supabase1_from_dict = Supabase1.from_dict(supabase1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Supabase2.md b/docs/Supabase2.md new file mode 100644 index 0000000..bfa3372 --- /dev/null +++ b/docs/Supabase2.md @@ -0,0 +1,30 @@ +# Supabase2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"SUPABASE\") | + +## Example + +```python +from vectorize_client.models.supabase2 import Supabase2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Supabase2 from a JSON string +supabase2_instance = Supabase2.from_json(json) +# print the JSON string representation of the object +print(Supabase2.to_json()) + +# convert the object into a dict +supabase2_dict = supabase2_instance.to_dict() +# create an instance of Supabase2 from a dict +supabase2_from_dict = Supabase2.from_dict(supabase2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/TURBOPUFFERAuthConfig.md b/docs/TURBOPUFFERAuthConfig.md new file mode 100644 index 0000000..c184b76 --- /dev/null +++ b/docs/TURBOPUFFERAuthConfig.md @@ -0,0 +1,31 @@ +# TURBOPUFFERAuthConfig + +Authentication configuration for Turbopuffer + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for your Turbopuffer integration | +**api_key** | **str** | API Key. Example: Enter your API key | + +## Example + +```python +from vectorize_client.models.turbopuffer_auth_config import TURBOPUFFERAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of TURBOPUFFERAuthConfig from a JSON string +turbopuffer_auth_config_instance = TURBOPUFFERAuthConfig.from_json(json) +# print the JSON string representation of the object +print(TURBOPUFFERAuthConfig.to_json()) + +# convert the object into a dict +turbopuffer_auth_config_dict = turbopuffer_auth_config_instance.to_dict() +# create an instance of TURBOPUFFERAuthConfig from a dict +turbopuffer_auth_config_from_dict = TURBOPUFFERAuthConfig.from_dict(turbopuffer_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/TURBOPUFFERConfig.md b/docs/TURBOPUFFERConfig.md new file mode 100644 index 0000000..049615f --- /dev/null +++ b/docs/TURBOPUFFERConfig.md @@ -0,0 +1,30 @@ +# TURBOPUFFERConfig + +Configuration for Turbopuffer connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**namespace** | **str** | Namespace. Example: Enter namespace name | + +## Example + +```python +from vectorize_client.models.turbopuffer_config import TURBOPUFFERConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of TURBOPUFFERConfig from a JSON string +turbopuffer_config_instance = TURBOPUFFERConfig.from_json(json) +# print the JSON string representation of the object +print(TURBOPUFFERConfig.to_json()) + +# convert the object into a dict +turbopuffer_config_dict = turbopuffer_config_instance.to_dict() +# create an instance of TURBOPUFFERConfig from a dict +turbopuffer_config_from_dict = TURBOPUFFERConfig.from_dict(turbopuffer_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Turbopuffer.md b/docs/Turbopuffer.md new file mode 100644 index 0000000..dce2d74 --- /dev/null +++ b/docs/Turbopuffer.md @@ -0,0 +1,31 @@ +# Turbopuffer + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"TURBOPUFFER\") | +**config** | [**TURBOPUFFERConfig**](TURBOPUFFERConfig.md) | | + +## Example + +```python +from vectorize_client.models.turbopuffer import Turbopuffer + +# TODO update the JSON string below +json = "{}" +# create an instance of Turbopuffer from a JSON string +turbopuffer_instance = Turbopuffer.from_json(json) +# print the JSON string representation of the object +print(Turbopuffer.to_json()) + +# convert the object into a dict +turbopuffer_dict = turbopuffer_instance.to_dict() +# create an instance of Turbopuffer from a dict +turbopuffer_from_dict = Turbopuffer.from_dict(turbopuffer_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Turbopuffer1.md b/docs/Turbopuffer1.md new file mode 100644 index 0000000..ccda8f0 --- /dev/null +++ b/docs/Turbopuffer1.md @@ -0,0 +1,29 @@ +# Turbopuffer1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**TURBOPUFFERConfig**](TURBOPUFFERConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.turbopuffer1 import Turbopuffer1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Turbopuffer1 from a JSON string +turbopuffer1_instance = Turbopuffer1.from_json(json) +# print the JSON string representation of the object +print(Turbopuffer1.to_json()) + +# convert the object into a dict +turbopuffer1_dict = turbopuffer1_instance.to_dict() +# create an instance of Turbopuffer1 from a dict +turbopuffer1_from_dict = Turbopuffer1.from_dict(turbopuffer1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Turbopuffer2.md b/docs/Turbopuffer2.md new file mode 100644 index 0000000..d08c428 --- /dev/null +++ b/docs/Turbopuffer2.md @@ -0,0 +1,30 @@ +# Turbopuffer2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"TURBOPUFFER\") | + +## Example + +```python +from vectorize_client.models.turbopuffer2 import Turbopuffer2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Turbopuffer2 from a JSON string +turbopuffer2_instance = Turbopuffer2.from_json(json) +# print the JSON string representation of the object +print(Turbopuffer2.to_json()) + +# convert the object into a dict +turbopuffer2_dict = turbopuffer2_instance.to_dict() +# create an instance of Turbopuffer2 from a dict +turbopuffer2_from_dict = Turbopuffer2.from_dict(turbopuffer2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/UpdateAIPlatformConnectorRequest.md b/docs/UpdateAIPlatformConnectorRequest.md new file mode 100644 index 0000000..2fad614 --- /dev/null +++ b/docs/UpdateAIPlatformConnectorRequest.md @@ -0,0 +1,29 @@ +# UpdateAiplatformConnectorRequest + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**VOYAGEAuthConfig**](VOYAGEAuthConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.update_aiplatform_connector_request import UpdateAiplatformConnectorRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of UpdateAiplatformConnectorRequest from a JSON string +update_aiplatform_connector_request_instance = UpdateAiplatformConnectorRequest.from_json(json) +# print the JSON string representation of the object +print(UpdateAiplatformConnectorRequest.to_json()) + +# convert the object into a dict +update_aiplatform_connector_request_dict = update_aiplatform_connector_request_instance.to_dict() +# create an instance of UpdateAiplatformConnectorRequest from a dict +update_aiplatform_connector_request_from_dict = UpdateAiplatformConnectorRequest.from_dict(update_aiplatform_connector_request_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/UpdateAIPlatformConnectorResponse.md b/docs/UpdateAIPlatformConnectorResponse.md new file mode 100644 index 0000000..5209503 --- /dev/null +++ b/docs/UpdateAIPlatformConnectorResponse.md @@ -0,0 +1,30 @@ +# UpdateAIPlatformConnectorResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | +**data** | [**UpdatedAIPlatformConnectorData**](UpdatedAIPlatformConnectorData.md) | | + +## Example + +```python +from vectorize_client.models.update_ai_platform_connector_response import UpdateAIPlatformConnectorResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of UpdateAIPlatformConnectorResponse from a JSON string +update_ai_platform_connector_response_instance = UpdateAIPlatformConnectorResponse.from_json(json) +# print the JSON string representation of the object +print(UpdateAIPlatformConnectorResponse.to_json()) + +# convert the object into a dict +update_ai_platform_connector_response_dict = update_ai_platform_connector_response_instance.to_dict() +# create an instance of UpdateAIPlatformConnectorResponse from a dict +update_ai_platform_connector_response_from_dict = UpdateAIPlatformConnectorResponse.from_dict(update_ai_platform_connector_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/UpdateDestinationConnectorRequest.md b/docs/UpdateDestinationConnectorRequest.md new file mode 100644 index 0000000..7998678 --- /dev/null +++ b/docs/UpdateDestinationConnectorRequest.md @@ -0,0 +1,29 @@ +# UpdateDestinationConnectorRequest + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**TURBOPUFFERConfig**](TURBOPUFFERConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.update_destination_connector_request import UpdateDestinationConnectorRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of UpdateDestinationConnectorRequest from a JSON string +update_destination_connector_request_instance = UpdateDestinationConnectorRequest.from_json(json) +# print the JSON string representation of the object +print(UpdateDestinationConnectorRequest.to_json()) + +# convert the object into a dict +update_destination_connector_request_dict = update_destination_connector_request_instance.to_dict() +# create an instance of UpdateDestinationConnectorRequest from a dict +update_destination_connector_request_from_dict = UpdateDestinationConnectorRequest.from_dict(update_destination_connector_request_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/UpdateDestinationConnectorResponse.md b/docs/UpdateDestinationConnectorResponse.md new file mode 100644 index 0000000..58c1725 --- /dev/null +++ b/docs/UpdateDestinationConnectorResponse.md @@ -0,0 +1,30 @@ +# UpdateDestinationConnectorResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | +**data** | [**UpdatedDestinationConnectorData**](UpdatedDestinationConnectorData.md) | | + +## Example + +```python +from vectorize_client.models.update_destination_connector_response import UpdateDestinationConnectorResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of UpdateDestinationConnectorResponse from a JSON string +update_destination_connector_response_instance = UpdateDestinationConnectorResponse.from_json(json) +# print the JSON string representation of the object +print(UpdateDestinationConnectorResponse.to_json()) + +# convert the object into a dict +update_destination_connector_response_dict = update_destination_connector_response_instance.to_dict() +# create an instance of UpdateDestinationConnectorResponse from a dict +update_destination_connector_response_from_dict = UpdateDestinationConnectorResponse.from_dict(update_destination_connector_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/UpdateSourceConnectorRequest.md b/docs/UpdateSourceConnectorRequest.md new file mode 100644 index 0000000..47d6740 --- /dev/null +++ b/docs/UpdateSourceConnectorRequest.md @@ -0,0 +1,29 @@ +# UpdateSourceConnectorRequest + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**FIREFLIESConfig**](FIREFLIESConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.update_source_connector_request import UpdateSourceConnectorRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of UpdateSourceConnectorRequest from a JSON string +update_source_connector_request_instance = UpdateSourceConnectorRequest.from_json(json) +# print the JSON string representation of the object +print(UpdateSourceConnectorRequest.to_json()) + +# convert the object into a dict +update_source_connector_request_dict = update_source_connector_request_instance.to_dict() +# create an instance of UpdateSourceConnectorRequest from a dict +update_source_connector_request_from_dict = UpdateSourceConnectorRequest.from_dict(update_source_connector_request_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/UpdateSourceConnectorResponse.md b/docs/UpdateSourceConnectorResponse.md new file mode 100644 index 0000000..2cb49dc --- /dev/null +++ b/docs/UpdateSourceConnectorResponse.md @@ -0,0 +1,30 @@ +# UpdateSourceConnectorResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | +**data** | [**UpdateSourceConnectorResponseData**](UpdateSourceConnectorResponseData.md) | | + +## Example + +```python +from vectorize_client.models.update_source_connector_response import UpdateSourceConnectorResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of UpdateSourceConnectorResponse from a JSON string +update_source_connector_response_instance = UpdateSourceConnectorResponse.from_json(json) +# print the JSON string representation of the object +print(UpdateSourceConnectorResponse.to_json()) + +# convert the object into a dict +update_source_connector_response_dict = update_source_connector_response_instance.to_dict() +# create an instance of UpdateSourceConnectorResponse from a dict +update_source_connector_response_from_dict = UpdateSourceConnectorResponse.from_dict(update_source_connector_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/UpdateSourceConnectorResponseData.md b/docs/UpdateSourceConnectorResponseData.md new file mode 100644 index 0000000..3dfc9c4 --- /dev/null +++ b/docs/UpdateSourceConnectorResponseData.md @@ -0,0 +1,30 @@ +# UpdateSourceConnectorResponseData + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**updated_connector** | [**SourceConnector**](SourceConnector.md) | | +**pipeline_ids** | **List[str]** | | [optional] + +## Example + +```python +from vectorize_client.models.update_source_connector_response_data import UpdateSourceConnectorResponseData + +# TODO update the JSON string below +json = "{}" +# create an instance of UpdateSourceConnectorResponseData from a JSON string +update_source_connector_response_data_instance = UpdateSourceConnectorResponseData.from_json(json) +# print the JSON string representation of the object +print(UpdateSourceConnectorResponseData.to_json()) + +# convert the object into a dict +update_source_connector_response_data_dict = update_source_connector_response_data_instance.to_dict() +# create an instance of UpdateSourceConnectorResponseData from a dict +update_source_connector_response_data_from_dict = UpdateSourceConnectorResponseData.from_dict(update_source_connector_response_data_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/UpdateUserInSourceConnectorRequest.md b/docs/UpdateUserInSourceConnectorRequest.md new file mode 100644 index 0000000..6834124 --- /dev/null +++ b/docs/UpdateUserInSourceConnectorRequest.md @@ -0,0 +1,32 @@ +# UpdateUserInSourceConnectorRequest + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**user_id** | **str** | | +**selected_files** | [**AddUserToSourceConnectorRequestSelectedFiles**](AddUserToSourceConnectorRequestSelectedFiles.md) | | [optional] +**refresh_token** | **str** | | [optional] +**access_token** | **str** | | [optional] + +## Example + +```python +from vectorize_client.models.update_user_in_source_connector_request import UpdateUserInSourceConnectorRequest + +# TODO update the JSON string below +json = "{}" +# create an instance of UpdateUserInSourceConnectorRequest from a JSON string +update_user_in_source_connector_request_instance = UpdateUserInSourceConnectorRequest.from_json(json) +# print the JSON string representation of the object +print(UpdateUserInSourceConnectorRequest.to_json()) + +# convert the object into a dict +update_user_in_source_connector_request_dict = update_user_in_source_connector_request_instance.to_dict() +# create an instance of UpdateUserInSourceConnectorRequest from a dict +update_user_in_source_connector_request_from_dict = UpdateUserInSourceConnectorRequest.from_dict(update_user_in_source_connector_request_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/UpdateUserInSourceConnectorResponse.md b/docs/UpdateUserInSourceConnectorResponse.md new file mode 100644 index 0000000..642f1d9 --- /dev/null +++ b/docs/UpdateUserInSourceConnectorResponse.md @@ -0,0 +1,29 @@ +# UpdateUserInSourceConnectorResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | | + +## Example + +```python +from vectorize_client.models.update_user_in_source_connector_response import UpdateUserInSourceConnectorResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of UpdateUserInSourceConnectorResponse from a JSON string +update_user_in_source_connector_response_instance = UpdateUserInSourceConnectorResponse.from_json(json) +# print the JSON string representation of the object +print(UpdateUserInSourceConnectorResponse.to_json()) + +# convert the object into a dict +update_user_in_source_connector_response_dict = update_user_in_source_connector_response_instance.to_dict() +# create an instance of UpdateUserInSourceConnectorResponse from a dict +update_user_in_source_connector_response_from_dict = UpdateUserInSourceConnectorResponse.from_dict(update_user_in_source_connector_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/UpdatedAIPlatformConnectorData.md b/docs/UpdatedAIPlatformConnectorData.md new file mode 100644 index 0000000..ea27d35 --- /dev/null +++ b/docs/UpdatedAIPlatformConnectorData.md @@ -0,0 +1,30 @@ +# UpdatedAIPlatformConnectorData + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**updated_connector** | [**AIPlatform**](AIPlatform.md) | | +**pipeline_ids** | **List[str]** | | [optional] + +## Example + +```python +from vectorize_client.models.updated_ai_platform_connector_data import UpdatedAIPlatformConnectorData + +# TODO update the JSON string below +json = "{}" +# create an instance of UpdatedAIPlatformConnectorData from a JSON string +updated_ai_platform_connector_data_instance = UpdatedAIPlatformConnectorData.from_json(json) +# print the JSON string representation of the object +print(UpdatedAIPlatformConnectorData.to_json()) + +# convert the object into a dict +updated_ai_platform_connector_data_dict = updated_ai_platform_connector_data_instance.to_dict() +# create an instance of UpdatedAIPlatformConnectorData from a dict +updated_ai_platform_connector_data_from_dict = UpdatedAIPlatformConnectorData.from_dict(updated_ai_platform_connector_data_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/UpdatedDestinationConnectorData.md b/docs/UpdatedDestinationConnectorData.md new file mode 100644 index 0000000..ce8ccd8 --- /dev/null +++ b/docs/UpdatedDestinationConnectorData.md @@ -0,0 +1,30 @@ +# UpdatedDestinationConnectorData + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**updated_connector** | [**DestinationConnector**](DestinationConnector.md) | | +**pipeline_ids** | **List[str]** | | [optional] + +## Example + +```python +from vectorize_client.models.updated_destination_connector_data import UpdatedDestinationConnectorData + +# TODO update the JSON string below +json = "{}" +# create an instance of UpdatedDestinationConnectorData from a JSON string +updated_destination_connector_data_instance = UpdatedDestinationConnectorData.from_json(json) +# print the JSON string representation of the object +print(UpdatedDestinationConnectorData.to_json()) + +# convert the object into a dict +updated_destination_connector_data_dict = updated_destination_connector_data_instance.to_dict() +# create an instance of UpdatedDestinationConnectorData from a dict +updated_destination_connector_data_from_dict = UpdatedDestinationConnectorData.from_dict(updated_destination_connector_data_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/UploadFile.md b/docs/UploadFile.md new file mode 100644 index 0000000..cbd0bc5 --- /dev/null +++ b/docs/UploadFile.md @@ -0,0 +1,34 @@ +# UploadFile + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**key** | **str** | | +**name** | **str** | | +**size** | **float** | | +**extension** | **str** | | [optional] +**last_modified** | **str** | | +**metadata** | **Dict[str, str]** | | + +## Example + +```python +from vectorize_client.models.upload_file import UploadFile + +# TODO update the JSON string below +json = "{}" +# create an instance of UploadFile from a JSON string +upload_file_instance = UploadFile.from_json(json) +# print the JSON string representation of the object +print(UploadFile.to_json()) + +# convert the object into a dict +upload_file_dict = upload_file_instance.to_dict() +# create an instance of UploadFile from a dict +upload_file_from_dict = UploadFile.from_dict(upload_file_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/UploadsApi.md b/docs/UploadsApi.md new file mode 100644 index 0000000..55ec4ad --- /dev/null +++ b/docs/UploadsApi.md @@ -0,0 +1,263 @@ +# vectorize_client.UploadsApi + +All URIs are relative to *https://api.vectorize.io/v1* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**delete_file_from_connector**](UploadsApi.md#delete_file_from_connector) | **DELETE** /org/{organizationId}/uploads/{connectorId}/files | Delete a file from a file upload connector +[**get_upload_files_from_connector**](UploadsApi.md#get_upload_files_from_connector) | **GET** /org/{organizationId}/uploads/{connectorId}/files | Get uploaded files from a file upload connector +[**start_file_upload_to_connector**](UploadsApi.md#start_file_upload_to_connector) | **PUT** /org/{organizationId}/uploads/{connectorId}/files | Upload a file to a file upload connector + + +# **delete_file_from_connector** +> DeleteFileResponse delete_file_from_connector(organization, connector_id) + +Delete a file from a file upload connector + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.delete_file_response import DeleteFileResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.UploadsApi(api_client) + organization = 'organization_example' # str | + connector_id = 'connector_id_example' # str | + + try: + # Delete a file from a file upload connector + api_response = api_instance.delete_file_from_connector(organization, connector_id) + print("The response of UploadsApi->delete_file_from_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling UploadsApi->delete_file_from_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **connector_id** | **str**| | + +### Return type + +[**DeleteFileResponse**](DeleteFileResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | File deleted successfully | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_upload_files_from_connector** +> GetUploadFilesResponse get_upload_files_from_connector(organization, connector_id) + +Get uploaded files from a file upload connector + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.get_upload_files_response import GetUploadFilesResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.UploadsApi(api_client) + organization = 'organization_example' # str | + connector_id = 'connector_id_example' # str | + + try: + # Get uploaded files from a file upload connector + api_response = api_instance.get_upload_files_from_connector(organization, connector_id) + print("The response of UploadsApi->get_upload_files_from_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling UploadsApi->get_upload_files_from_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **connector_id** | **str**| | + +### Return type + +[**GetUploadFilesResponse**](GetUploadFilesResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Files retrieved successfully | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **start_file_upload_to_connector** +> StartFileUploadToConnectorResponse start_file_upload_to_connector(organization, connector_id, start_file_upload_to_connector_request) + +Upload a file to a file upload connector + +### Example + +* Bearer (JWT) Authentication (bearerAuth): + +```python +import vectorize_client +from vectorize_client.models.start_file_upload_to_connector_request import StartFileUploadToConnectorRequest +from vectorize_client.models.start_file_upload_to_connector_response import StartFileUploadToConnectorResponse +from vectorize_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://api.vectorize.io/v1 +# See configuration.py for a list of all supported configuration parameters. +configuration = vectorize_client.Configuration( + host = "https://api.vectorize.io/v1" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure Bearer authorization (JWT): bearerAuth +configuration = vectorize_client.Configuration( + access_token = os.environ["BEARER_TOKEN"] +) + +# Enter a context with an instance of the API client +with vectorize_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = vectorize_client.UploadsApi(api_client) + organization = 'organization_example' # str | + connector_id = 'connector_id_example' # str | + start_file_upload_to_connector_request = {"name":"My StartFileUploadToConnectorRequest","contentType":"document","metadata":"example-metadata"} # StartFileUploadToConnectorRequest | + + try: + # Upload a file to a file upload connector + api_response = api_instance.start_file_upload_to_connector(organization, connector_id, start_file_upload_to_connector_request) + print("The response of UploadsApi->start_file_upload_to_connector:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling UploadsApi->start_file_upload_to_connector: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organization** | **str**| | + **connector_id** | **str**| | + **start_file_upload_to_connector_request** | [**StartFileUploadToConnectorRequest**](StartFileUploadToConnectorRequest.md)| | + +### Return type + +[**StartFileUploadToConnectorResponse**](StartFileUploadToConnectorResponse.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | File ready to be uploaded | - | +**400** | Invalid request | - | +**401** | Unauthorized | - | +**403** | Forbidden | - | +**404** | Not found | - | +**500** | Internal server error | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/docs/VERTEXAuthConfig.md b/docs/VERTEXAuthConfig.md new file mode 100644 index 0000000..afa1b6a --- /dev/null +++ b/docs/VERTEXAuthConfig.md @@ -0,0 +1,32 @@ +# VERTEXAuthConfig + +Authentication configuration for Google Vertex AI + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for your Google Vertex AI integration | +**key** | **str** | Service Account Json. Example: Enter the contents of your Google Vertex AI Service Account JSON file | +**region** | **str** | Region. Example: Region Name, e.g. us-central1 | + +## Example + +```python +from vectorize_client.models.vertex_auth_config import VERTEXAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of VERTEXAuthConfig from a JSON string +vertex_auth_config_instance = VERTEXAuthConfig.from_json(json) +# print the JSON string representation of the object +print(VERTEXAuthConfig.to_json()) + +# convert the object into a dict +vertex_auth_config_dict = vertex_auth_config_instance.to_dict() +# create an instance of VERTEXAuthConfig from a dict +vertex_auth_config_from_dict = VERTEXAuthConfig.from_dict(vertex_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/VOYAGEAuthConfig.md b/docs/VOYAGEAuthConfig.md new file mode 100644 index 0000000..b2884c7 --- /dev/null +++ b/docs/VOYAGEAuthConfig.md @@ -0,0 +1,31 @@ +# VOYAGEAuthConfig + +Authentication configuration for Voyage AI + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for your Voyage AI integration | +**key** | **str** | API Key. Example: Enter your Voyage AI API Key | + +## Example + +```python +from vectorize_client.models.voyage_auth_config import VOYAGEAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of VOYAGEAuthConfig from a JSON string +voyage_auth_config_instance = VOYAGEAuthConfig.from_json(json) +# print the JSON string representation of the object +print(VOYAGEAuthConfig.to_json()) + +# convert the object into a dict +voyage_auth_config_dict = voyage_auth_config_instance.to_dict() +# create an instance of VOYAGEAuthConfig from a dict +voyage_auth_config_from_dict = VOYAGEAuthConfig.from_dict(voyage_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Vertex.md b/docs/Vertex.md new file mode 100644 index 0000000..93bcd49 --- /dev/null +++ b/docs/Vertex.md @@ -0,0 +1,31 @@ +# Vertex + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"VERTEX\") | +**config** | [**VERTEXAuthConfig**](VERTEXAuthConfig.md) | | + +## Example + +```python +from vectorize_client.models.vertex import Vertex + +# TODO update the JSON string below +json = "{}" +# create an instance of Vertex from a JSON string +vertex_instance = Vertex.from_json(json) +# print the JSON string representation of the object +print(Vertex.to_json()) + +# convert the object into a dict +vertex_dict = vertex_instance.to_dict() +# create an instance of Vertex from a dict +vertex_from_dict = Vertex.from_dict(vertex_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Vertex1.md b/docs/Vertex1.md new file mode 100644 index 0000000..75cb7c4 --- /dev/null +++ b/docs/Vertex1.md @@ -0,0 +1,29 @@ +# Vertex1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**VERTEXAuthConfig**](VERTEXAuthConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.vertex1 import Vertex1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Vertex1 from a JSON string +vertex1_instance = Vertex1.from_json(json) +# print the JSON string representation of the object +print(Vertex1.to_json()) + +# convert the object into a dict +vertex1_dict = vertex1_instance.to_dict() +# create an instance of Vertex1 from a dict +vertex1_from_dict = Vertex1.from_dict(vertex1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Vertex2.md b/docs/Vertex2.md new file mode 100644 index 0000000..08db100 --- /dev/null +++ b/docs/Vertex2.md @@ -0,0 +1,30 @@ +# Vertex2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"VERTEX\") | + +## Example + +```python +from vectorize_client.models.vertex2 import Vertex2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Vertex2 from a JSON string +vertex2_instance = Vertex2.from_json(json) +# print the JSON string representation of the object +print(Vertex2.to_json()) + +# convert the object into a dict +vertex2_dict = vertex2_instance.to_dict() +# create an instance of Vertex2 from a dict +vertex2_from_dict = Vertex2.from_dict(vertex2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Voyage.md b/docs/Voyage.md new file mode 100644 index 0000000..9ff5813 --- /dev/null +++ b/docs/Voyage.md @@ -0,0 +1,31 @@ +# Voyage + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"VOYAGE\") | +**config** | [**VOYAGEAuthConfig**](VOYAGEAuthConfig.md) | | + +## Example + +```python +from vectorize_client.models.voyage import Voyage + +# TODO update the JSON string below +json = "{}" +# create an instance of Voyage from a JSON string +voyage_instance = Voyage.from_json(json) +# print the JSON string representation of the object +print(Voyage.to_json()) + +# convert the object into a dict +voyage_dict = voyage_instance.to_dict() +# create an instance of Voyage from a dict +voyage_from_dict = Voyage.from_dict(voyage_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Voyage1.md b/docs/Voyage1.md new file mode 100644 index 0000000..86631df --- /dev/null +++ b/docs/Voyage1.md @@ -0,0 +1,29 @@ +# Voyage1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**VOYAGEAuthConfig**](VOYAGEAuthConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.voyage1 import Voyage1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Voyage1 from a JSON string +voyage1_instance = Voyage1.from_json(json) +# print the JSON string representation of the object +print(Voyage1.to_json()) + +# convert the object into a dict +voyage1_dict = voyage1_instance.to_dict() +# create an instance of Voyage1 from a dict +voyage1_from_dict = Voyage1.from_dict(voyage1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Voyage2.md b/docs/Voyage2.md new file mode 100644 index 0000000..636a5ba --- /dev/null +++ b/docs/Voyage2.md @@ -0,0 +1,30 @@ +# Voyage2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"VOYAGE\") | + +## Example + +```python +from vectorize_client.models.voyage2 import Voyage2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Voyage2 from a JSON string +voyage2_instance = Voyage2.from_json(json) +# print the JSON string representation of the object +print(Voyage2.to_json()) + +# convert the object into a dict +voyage2_dict = voyage2_instance.to_dict() +# create an instance of Voyage2 from a dict +voyage2_from_dict = Voyage2.from_dict(voyage2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/WEAVIATEAuthConfig.md b/docs/WEAVIATEAuthConfig.md new file mode 100644 index 0000000..333a5a9 --- /dev/null +++ b/docs/WEAVIATEAuthConfig.md @@ -0,0 +1,32 @@ +# WEAVIATEAuthConfig + +Authentication configuration for Weaviate + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name for your Weaviate integration | +**host** | **str** | Endpoint. Example: Enter your Weaviate Cluster REST Endpoint | +**api_key** | **str** | API Key. Example: Enter your API key | + +## Example + +```python +from vectorize_client.models.weaviate_auth_config import WEAVIATEAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of WEAVIATEAuthConfig from a JSON string +weaviate_auth_config_instance = WEAVIATEAuthConfig.from_json(json) +# print the JSON string representation of the object +print(WEAVIATEAuthConfig.to_json()) + +# convert the object into a dict +weaviate_auth_config_dict = weaviate_auth_config_instance.to_dict() +# create an instance of WEAVIATEAuthConfig from a dict +weaviate_auth_config_from_dict = WEAVIATEAuthConfig.from_dict(weaviate_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/WEAVIATEConfig.md b/docs/WEAVIATEConfig.md new file mode 100644 index 0000000..62c6582 --- /dev/null +++ b/docs/WEAVIATEConfig.md @@ -0,0 +1,30 @@ +# WEAVIATEConfig + +Configuration for Weaviate connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**collection** | **str** | Collection Name. Example: Enter collection name | + +## Example + +```python +from vectorize_client.models.weaviate_config import WEAVIATEConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of WEAVIATEConfig from a JSON string +weaviate_config_instance = WEAVIATEConfig.from_json(json) +# print the JSON string representation of the object +print(WEAVIATEConfig.to_json()) + +# convert the object into a dict +weaviate_config_dict = weaviate_config_instance.to_dict() +# create an instance of WEAVIATEConfig from a dict +weaviate_config_from_dict = WEAVIATEConfig.from_dict(weaviate_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/WEBCRAWLERAuthConfig.md b/docs/WEBCRAWLERAuthConfig.md new file mode 100644 index 0000000..561ca7a --- /dev/null +++ b/docs/WEBCRAWLERAuthConfig.md @@ -0,0 +1,31 @@ +# WEBCRAWLERAuthConfig + +Authentication configuration for Web Crawler + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name. Example: Enter a descriptive name | +**seed_urls** | **str** | Seed URL(s). Add one or more seed URLs to crawl. The crawler will start from these URLs and follow links to other pages.. Example: (e.g. https://example.com) | + +## Example + +```python +from vectorize_client.models.webcrawler_auth_config import WEBCRAWLERAuthConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of WEBCRAWLERAuthConfig from a JSON string +webcrawler_auth_config_instance = WEBCRAWLERAuthConfig.from_json(json) +# print the JSON string representation of the object +print(WEBCRAWLERAuthConfig.to_json()) + +# convert the object into a dict +webcrawler_auth_config_dict = webcrawler_auth_config_instance.to_dict() +# create an instance of WEBCRAWLERAuthConfig from a dict +webcrawler_auth_config_from_dict = WEBCRAWLERAuthConfig.from_dict(webcrawler_auth_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/WEBCRAWLERConfig.md b/docs/WEBCRAWLERConfig.md new file mode 100644 index 0000000..e87a82a --- /dev/null +++ b/docs/WEBCRAWLERConfig.md @@ -0,0 +1,36 @@ +# WEBCRAWLERConfig + +Configuration for Web Crawler connector + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**allowed_domains_opt** | **str** | Additional Allowed URLs or prefix(es). Add one or more allowed URLs or URL prefixes. The crawler will read URLs that match these patterns in addition to the seed URL(s).. Example: (e.g. https://docs.example.com) | [optional] +**forbidden_paths** | **str** | Forbidden Paths. Example: Enter forbidden paths (e.g. /admin) | [optional] +**min_time_between_requests** | **float** | Throttle (ms). Example: Enter minimum time between requests in milliseconds | [optional] [default to 500] +**max_error_count** | **float** | Max Error Count. Example: Enter maximum error count | [optional] [default to 5] +**max_urls** | **float** | Max URLs. Example: Enter maximum number of URLs to crawl | [optional] [default to 1000] +**max_depth** | **float** | Max Depth. Example: Enter maximum crawl depth | [optional] [default to 50] +**reindex_interval_seconds** | **float** | Reindex Interval (seconds). Example: Enter reindex interval in seconds | [optional] [default to 3600] + +## Example + +```python +from vectorize_client.models.webcrawler_config import WEBCRAWLERConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of WEBCRAWLERConfig from a JSON string +webcrawler_config_instance = WEBCRAWLERConfig.from_json(json) +# print the JSON string representation of the object +print(WEBCRAWLERConfig.to_json()) + +# convert the object into a dict +webcrawler_config_dict = webcrawler_config_instance.to_dict() +# create an instance of WEBCRAWLERConfig from a dict +webcrawler_config_from_dict = WEBCRAWLERConfig.from_dict(webcrawler_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Weaviate.md b/docs/Weaviate.md new file mode 100644 index 0000000..f546d25 --- /dev/null +++ b/docs/Weaviate.md @@ -0,0 +1,31 @@ +# Weaviate + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"WEAVIATE\") | +**config** | [**WEAVIATEConfig**](WEAVIATEConfig.md) | | + +## Example + +```python +from vectorize_client.models.weaviate import Weaviate + +# TODO update the JSON string below +json = "{}" +# create an instance of Weaviate from a JSON string +weaviate_instance = Weaviate.from_json(json) +# print the JSON string representation of the object +print(Weaviate.to_json()) + +# convert the object into a dict +weaviate_dict = weaviate_instance.to_dict() +# create an instance of Weaviate from a dict +weaviate_from_dict = Weaviate.from_dict(weaviate_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Weaviate1.md b/docs/Weaviate1.md new file mode 100644 index 0000000..2c6c0f5 --- /dev/null +++ b/docs/Weaviate1.md @@ -0,0 +1,29 @@ +# Weaviate1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**WEAVIATEConfig**](WEAVIATEConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.weaviate1 import Weaviate1 + +# TODO update the JSON string below +json = "{}" +# create an instance of Weaviate1 from a JSON string +weaviate1_instance = Weaviate1.from_json(json) +# print the JSON string representation of the object +print(Weaviate1.to_json()) + +# convert the object into a dict +weaviate1_dict = weaviate1_instance.to_dict() +# create an instance of Weaviate1 from a dict +weaviate1_from_dict = Weaviate1.from_dict(weaviate1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/Weaviate2.md b/docs/Weaviate2.md new file mode 100644 index 0000000..7a92014 --- /dev/null +++ b/docs/Weaviate2.md @@ -0,0 +1,30 @@ +# Weaviate2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"WEAVIATE\") | + +## Example + +```python +from vectorize_client.models.weaviate2 import Weaviate2 + +# TODO update the JSON string below +json = "{}" +# create an instance of Weaviate2 from a JSON string +weaviate2_instance = Weaviate2.from_json(json) +# print the JSON string representation of the object +print(Weaviate2.to_json()) + +# convert the object into a dict +weaviate2_dict = weaviate2_instance.to_dict() +# create an instance of Weaviate2 from a dict +weaviate2_from_dict = Weaviate2.from_dict(weaviate2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/WebCrawler.md b/docs/WebCrawler.md new file mode 100644 index 0000000..a928e44 --- /dev/null +++ b/docs/WebCrawler.md @@ -0,0 +1,31 @@ +# WebCrawler + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | Name of the connector | +**type** | **str** | Connector type (must be \"WEB_CRAWLER\") | +**config** | [**WEBCRAWLERConfig**](WEBCRAWLERConfig.md) | | + +## Example + +```python +from vectorize_client.models.web_crawler import WebCrawler + +# TODO update the JSON string below +json = "{}" +# create an instance of WebCrawler from a JSON string +web_crawler_instance = WebCrawler.from_json(json) +# print the JSON string representation of the object +print(WebCrawler.to_json()) + +# convert the object into a dict +web_crawler_dict = web_crawler_instance.to_dict() +# create an instance of WebCrawler from a dict +web_crawler_from_dict = WebCrawler.from_dict(web_crawler_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/WebCrawler1.md b/docs/WebCrawler1.md new file mode 100644 index 0000000..44dac8e --- /dev/null +++ b/docs/WebCrawler1.md @@ -0,0 +1,29 @@ +# WebCrawler1 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config** | [**WEBCRAWLERConfig**](WEBCRAWLERConfig.md) | | [optional] + +## Example + +```python +from vectorize_client.models.web_crawler1 import WebCrawler1 + +# TODO update the JSON string below +json = "{}" +# create an instance of WebCrawler1 from a JSON string +web_crawler1_instance = WebCrawler1.from_json(json) +# print the JSON string representation of the object +print(WebCrawler1.to_json()) + +# convert the object into a dict +web_crawler1_dict = web_crawler1_instance.to_dict() +# create an instance of WebCrawler1 from a dict +web_crawler1_from_dict = WebCrawler1.from_dict(web_crawler1_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/docs/WebCrawler2.md b/docs/WebCrawler2.md new file mode 100644 index 0000000..ba9988a --- /dev/null +++ b/docs/WebCrawler2.md @@ -0,0 +1,30 @@ +# WebCrawler2 + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Unique identifier for the connector | +**type** | **str** | Connector type (must be \"WEB_CRAWLER\") | + +## Example + +```python +from vectorize_client.models.web_crawler2 import WebCrawler2 + +# TODO update the JSON string below +json = "{}" +# create an instance of WebCrawler2 from a JSON string +web_crawler2_instance = WebCrawler2.from_json(json) +# print the JSON string representation of the object +print(WebCrawler2.to_json()) + +# convert the object into a dict +web_crawler2_dict = web_crawler2_instance.to_dict() +# create an instance of WebCrawler2 from a dict +web_crawler2_from_dict = WebCrawler2.from_dict(web_crawler2_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/git_push.sh b/git_push.sh new file mode 100644 index 0000000..f53a75d --- /dev/null +++ b/git_push.sh @@ -0,0 +1,57 @@ +#!/bin/sh +# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/ +# +# Usage example: /bin/sh ./git_push.sh wing328 openapi-petstore-perl "minor update" "gitlab.com" + +git_user_id=$1 +git_repo_id=$2 +release_note=$3 +git_host=$4 + +if [ "$git_host" = "" ]; then + git_host="github.com" + echo "[INFO] No command line input provided. Set \$git_host to $git_host" +fi + +if [ "$git_user_id" = "" ]; then + git_user_id="GIT_USER_ID" + echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id" +fi + +if [ "$git_repo_id" = "" ]; then + git_repo_id="GIT_REPO_ID" + echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id" +fi + +if [ "$release_note" = "" ]; then + release_note="Minor update" + echo "[INFO] No command line input provided. Set \$release_note to $release_note" +fi + +# Initialize the local directory as a Git repository +git init + +# Adds the files in the local repository and stages them for commit. +git add . + +# Commits the tracked changes and prepares them to be pushed to a remote repository. +git commit -m "$release_note" + +# Sets the new remote +git_remote=$(git remote) +if [ "$git_remote" = "" ]; then # git remote not defined + + if [ "$GIT_TOKEN" = "" ]; then + echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment." + git remote add origin https://${git_host}/${git_user_id}/${git_repo_id}.git + else + git remote add origin https://${git_user_id}:"${GIT_TOKEN}"@${git_host}/${git_user_id}/${git_repo_id}.git + fi + +fi + +git pull origin master + +# Pushes (Forces) the changes in the local repository up to the remote repository +echo "Git pushing to https://${git_host}/${git_user_id}/${git_repo_id}.git" +git push origin master 2>&1 | grep -v 'To https' diff --git a/node_modules/.bin/markdown-it b/node_modules/.bin/markdown-it new file mode 120000 index 0000000..8a64108 --- /dev/null +++ b/node_modules/.bin/markdown-it @@ -0,0 +1 @@ +../markdown-it/bin/markdown-it.mjs \ No newline at end of file diff --git a/node_modules/.bin/tsc b/node_modules/.bin/tsc new file mode 120000 index 0000000..0863208 --- /dev/null +++ b/node_modules/.bin/tsc @@ -0,0 +1 @@ +../typescript/bin/tsc \ No newline at end of file diff --git a/node_modules/.bin/tsserver b/node_modules/.bin/tsserver new file mode 120000 index 0000000..f8f8f1a --- /dev/null +++ b/node_modules/.bin/tsserver @@ -0,0 +1 @@ +../typescript/bin/tsserver \ No newline at end of file diff --git a/node_modules/.bin/typedoc b/node_modules/.bin/typedoc new file mode 120000 index 0000000..8303b02 --- /dev/null +++ b/node_modules/.bin/typedoc @@ -0,0 +1 @@ +../typedoc/bin/typedoc \ No newline at end of file diff --git a/node_modules/.bin/yaml b/node_modules/.bin/yaml new file mode 120000 index 0000000..0368324 --- /dev/null +++ b/node_modules/.bin/yaml @@ -0,0 +1 @@ +../yaml/bin.mjs \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json new file mode 100644 index 0000000..abc510a --- /dev/null +++ b/node_modules/.package-lock.json @@ -0,0 +1,241 @@ +{ + "name": "vectorize-clients", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/@gerrit0/mini-shiki": { + "version": "1.27.2", + "resolved": "https://registry.npmjs.org/@gerrit0/mini-shiki/-/mini-shiki-1.27.2.tgz", + "integrity": "sha512-GeWyHz8ao2gBiUW4OJnQDxXQnFgZQwwQk05t/CVVgNBN7/rK8XZ7xY6YhLVv9tH3VppWWmr9DCl3MwemB/i+Og==", + "dev": true, + "license": "MIT", + "dependencies": { + "@shikijs/engine-oniguruma": "^1.27.2", + "@shikijs/types": "^1.27.2", + "@shikijs/vscode-textmate": "^10.0.1" + } + }, + "node_modules/@iarna/toml": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/@iarna/toml/-/toml-2.2.5.tgz", + "integrity": "sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg==", + "license": "ISC" + }, + "node_modules/@shikijs/engine-oniguruma": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-1.29.2.tgz", + "integrity": "sha512-7iiOx3SG8+g1MnlzZVDYiaeHe7Ez2Kf2HrJzdmGwkRisT7r4rak0e655AcM/tF9JG/kg5fMNYlLLKglbN7gBqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@shikijs/types": "1.29.2", + "@shikijs/vscode-textmate": "^10.0.1" + } + }, + "node_modules/@shikijs/types": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-1.29.2.tgz", + "integrity": "sha512-VJjK0eIijTZf0QSTODEXCqinjBn0joAHQ+aPSBzrv4O2d/QSbsMw+ZeSRx03kV34Hy7NzUvV/7NqfYGRLrASmw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@shikijs/vscode-textmate": "^10.0.1", + "@types/hast": "^3.0.4" + } + }, + "node_modules/@shikijs/vscode-textmate": { + "version": "10.0.2", + "resolved": "https://registry.npmjs.org/@shikijs/vscode-textmate/-/vscode-textmate-10.0.2.tgz", + "integrity": "sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "uc.micro": "^2.0.0" + } + }, + "node_modules/lunr": { + "version": "2.3.9", + "resolved": "https://registry.npmjs.org/lunr/-/lunr-2.3.9.tgz", + "integrity": "sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow==", + "dev": true, + "license": "MIT" + }, + "node_modules/markdown-it": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", + "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1", + "entities": "^4.4.0", + "linkify-it": "^5.0.0", + "mdurl": "^2.0.0", + "punycode.js": "^2.3.1", + "uc.micro": "^2.1.0" + }, + "bin": { + "markdown-it": "bin/markdown-it.mjs" + } + }, + "node_modules/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==", + "dev": true, + "license": "MIT" + }, + "node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/punycode.js": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", + "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/toml": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/toml/-/toml-3.0.0.tgz", + "integrity": "sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==", + "license": "MIT" + }, + "node_modules/typedoc": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.27.8.tgz", + "integrity": "sha512-q0/2TUunNEDmWkn23ULKGXieK8cgGuAmBUXC/HcZ/rgzMI9Yr4Nq3in1K1vT1NZ9zx6M78yTk3kmIPbwJgK5KA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@gerrit0/mini-shiki": "^1.24.0", + "lunr": "^2.3.9", + "markdown-it": "^14.1.0", + "minimatch": "^9.0.5", + "yaml": "^2.6.1" + }, + "bin": { + "typedoc": "bin/typedoc" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "typescript": "5.0.x || 5.1.x || 5.2.x || 5.3.x || 5.4.x || 5.5.x || 5.6.x || 5.7.x" + } + }, + "node_modules/typescript": { + "version": "5.7.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.3.tgz", + "integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==", + "dev": true, + "license": "Apache-2.0", + "peer": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/uc.micro": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==", + "dev": true, + "license": "MIT" + }, + "node_modules/yaml": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.0.tgz", + "integrity": "sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA==", + "dev": true, + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14" + } + } + } +} diff --git a/node_modules/@gerrit0/mini-shiki/CHANGELOG.md b/node_modules/@gerrit0/mini-shiki/CHANGELOG.md new file mode 100644 index 0000000..9c8a67c --- /dev/null +++ b/node_modules/@gerrit0/mini-shiki/CHANGELOG.md @@ -0,0 +1,53 @@ +# Changelog + +## v1.27.2 (2025-01-16) + +- Update to Shiki v1.27.2 + +## v1.27.0 (2025-01-15) + +- Update to Shiki v1.27.0 + +## v1.26.1 (2025-01-04) + +- Update to Shiki v1.26.1 + +## v1.25.1 (2025-01-03) + +- Update to Shiki v1.25.1 + +## v1.24.4 (2024-12-22) + +- Update to Shiki v1.24.4 + +## v1.24.3 (2024-12-20) + +- Update to Shiki v1.24.3 + +## v1.24.4 (2024-12-13) + +- Update to Shiki v1.24.2 + +## v1.24.3 (2024-12-11) + +- Update to Shiki v1.24.2 + +## v1.24.2 (2024-12-10) + +- Update to Shiki v1.24.1 + +## v1.24.1 (2024-11-29) + +- Support `require` with Node's `--experimental-require-module` flag + +## v1.24.0 (2024-11-28) + +- Update to Shiki v1.24.0 + +## v1.23.2 (2024-11-24) + +- Fix publish, include built source + +## v1.23.1 (2024-11-24) + +- Initial release, Shiki v1.23.1 diff --git a/node_modules/@gerrit0/mini-shiki/LICENSE b/node_modules/@gerrit0/mini-shiki/LICENSE new file mode 100644 index 0000000..008c15d --- /dev/null +++ b/node_modules/@gerrit0/mini-shiki/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Gerrit Birkeland + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@gerrit0/mini-shiki/README.md b/node_modules/@gerrit0/mini-shiki/README.md new file mode 100644 index 0000000..3364665 --- /dev/null +++ b/node_modules/@gerrit0/mini-shiki/README.md @@ -0,0 +1,54 @@ +# @gerrit0/mini-shiki + +This is a re-bundled version of [Shiki](https://shiki.style/) which strips out +the dependencies which aren't necessary for [TypeDoc](https://typedoc.org/)'s usage. + +## Why? + +Compare Shiki's dependency tree: + + + +To this package's dependency tree: + + + +The Shiki maintainers [have declined](https://github.com/shikijs/shiki/issues/844) to split +up the package in a way which makes it possible to avoid these dependencies when just relying +on shikijs published packages. + +## Releases + +This package will be released and keep the same major/minor version numbers as Shiki. +Patch versions will generally be the same as Shiki, but may differ if adjustments are +necessary to fix compatibility issues. + +## ESM / CommonJS + +This package is ESM, but does not use top level await, so may be `require`d in +Node 23, or Node 20.17+ with the `--experimental-require-module` flag. + +## Usage + +```js +import { + codeToTokensWithThemes, + createShikiInternal, + createOnigurumaEngine, + bundledLanguages, + bundledThemes, + loadBuiltinWasm, +} from "@gerrit0/mini-shiki"; + +await loadBuiltinWasm(); +const shiki = await createShikiInternal({ + engine: createOnigurumaEngine(), + langs: [bundledLanguages.typescript], + themes: [bundledThemes["light-plus"]], +}); + +const lines = codeToTokensWithThemes(shiki, "console.log('Hello world!')", { + themes: { light: "light-plus" }, + lang: "typescript", +}); +``` diff --git a/node_modules/@gerrit0/mini-shiki/package.json b/node_modules/@gerrit0/mini-shiki/package.json new file mode 100644 index 0000000..53ce215 --- /dev/null +++ b/node_modules/@gerrit0/mini-shiki/package.json @@ -0,0 +1,45 @@ +{ + "type": "module", + "license": "MIT", + "name": "@gerrit0/mini-shiki", + "version": "1.27.2", + "exports": { + ".": { + "types": "./dist/shiki.d.ts", + "default": "./dist/shiki.js" + }, + "./onig.wasm": { + "import": "./dist/onig.wasm" + } + }, + "repository": { + "type": "git", + "url": "git+https://github.com/Gerrit0/mini-shiki.git" + }, + "scripts": { + "build": "./scripts/build.sh", + "test": "node --experimental-require-module --test" + }, + "devDependencies": { + "@rollup/plugin-node-resolve": "15.3.0", + "@rollup/plugin-typescript": "12.1.1", + "@types/node": "^22.9.3", + "dts-bundle-generator": "^9.5.1", + "rollup": "4.27.4", + "rollup-plugin-dts": "^6.1.1", + "semver": "7.6.3", + "shiki": "^1.27.2" + }, + "files": [ + "static", + "dist", + "README.md", + "CHANGELOG.md", + "LICENSE" + ], + "dependencies": { + "@shikijs/engine-oniguruma": "^1.27.2", + "@shikijs/types": "^1.27.2", + "@shikijs/vscode-textmate": "^10.0.1" + } +} diff --git a/node_modules/@gerrit0/mini-shiki/static/mini-shiki-dependency-tree.svg b/node_modules/@gerrit0/mini-shiki/static/mini-shiki-dependency-tree.svg new file mode 100644 index 0000000..71a902f --- /dev/null +++ b/node_modules/@gerrit0/mini-shiki/static/mini-shiki-dependency-tree.svg @@ -0,0 +1,108 @@ + + + +@gerrit0/mini-shiki + + +@gerrit0/mini-shiki@1.23.1 + + +@gerrit0/mini-shiki@1.23.1 + + + + + +@shikijs/engine-oniguruma@1.23.1 + + +@shikijs/engine-oniguruma@1.23.1 + + + + + +@gerrit0/mini-shiki@1.23.1->@shikijs/engine-oniguruma@1.23.1 + + + + + +@shikijs/types@1.23.1 + + +@shikijs/types@1.23.1 + + + + + +@gerrit0/mini-shiki@1.23.1->@shikijs/types@1.23.1 + + + + + +@shikijs/vscode-textmate@9.3.0 + + +@shikijs/vscode-textmate@9.3.0 + + + + + +@gerrit0/mini-shiki@1.23.1->@shikijs/vscode-textmate@9.3.0 + + + + + +@shikijs/engine-oniguruma@1.23.1->@shikijs/types@1.23.1 + + + + + +@shikijs/engine-oniguruma@1.23.1->@shikijs/vscode-textmate@9.3.0 + + + + + +@shikijs/types@1.23.1->@shikijs/vscode-textmate@9.3.0 + + + + + +@types/hast@3.0.4 + + +@types/hast@3.0.4 + + + + + +@shikijs/types@1.23.1->@types/hast@3.0.4 + + + + + +@types/unist@3.0.3 + + +@types/unist@3.0.3 + + + + + +@types/hast@3.0.4->@types/unist@3.0.3 + + + + + \ No newline at end of file diff --git a/node_modules/@gerrit0/mini-shiki/static/shiki-dependency-tree.svg b/node_modules/@gerrit0/mini-shiki/static/shiki-dependency-tree.svg new file mode 100644 index 0000000..aec5e0f --- /dev/null +++ b/node_modules/@gerrit0/mini-shiki/static/shiki-dependency-tree.svg @@ -0,0 +1,786 @@ + + + +shiki + + +shiki@1.23.1 + + +shiki@1.23.1 + + + + + +@shikijs/core@1.23.1 + + +@shikijs/core@1.23.1 + + + + + +shiki@1.23.1->@shikijs/core@1.23.1 + + + + + +@shikijs/engine-javascript@1.23.1 + + +@shikijs/engine-javascript@1.23.1 + + + + + +shiki@1.23.1->@shikijs/engine-javascript@1.23.1 + + + + + +@shikijs/engine-oniguruma@1.23.1 + + +@shikijs/engine-oniguruma@1.23.1 + + + + + +shiki@1.23.1->@shikijs/engine-oniguruma@1.23.1 + + + + + +@shikijs/types@1.23.1 + + +@shikijs/types@1.23.1 + + + + + +shiki@1.23.1->@shikijs/types@1.23.1 + + + + + +@shikijs/vscode-textmate@9.3.0 + + +@shikijs/vscode-textmate@9.3.0 + + + + + +shiki@1.23.1->@shikijs/vscode-textmate@9.3.0 + + + + + +@types/hast@3.0.4 + + +@types/hast@3.0.4 + + + + + +shiki@1.23.1->@types/hast@3.0.4 + + + + + +@shikijs/core@1.23.1->@shikijs/engine-javascript@1.23.1 + + + + + +@shikijs/core@1.23.1->@shikijs/engine-oniguruma@1.23.1 + + + + + +@shikijs/core@1.23.1->@shikijs/types@1.23.1 + + + + + +@shikijs/core@1.23.1->@shikijs/vscode-textmate@9.3.0 + + + + + +@shikijs/core@1.23.1->@types/hast@3.0.4 + + + + + +hast-util-to-html@9.0.3 + + +hast-util-to-html@9.0.3 + + + + + +@shikijs/core@1.23.1->hast-util-to-html@9.0.3 + + + + + +@shikijs/engine-javascript@1.23.1->@shikijs/types@1.23.1 + + + + + +@shikijs/engine-javascript@1.23.1->@shikijs/vscode-textmate@9.3.0 + + + + + +oniguruma-to-es@0.4.1 + + +oniguruma-to-es@0.4.1 + + + + + +@shikijs/engine-javascript@1.23.1->oniguruma-to-es@0.4.1 + + + + + +@shikijs/engine-oniguruma@1.23.1->@shikijs/types@1.23.1 + + + + + +@shikijs/engine-oniguruma@1.23.1->@shikijs/vscode-textmate@9.3.0 + + + + + +@shikijs/types@1.23.1->@shikijs/vscode-textmate@9.3.0 + + + + + +@shikijs/types@1.23.1->@types/hast@3.0.4 + + + + + +@types/unist@3.0.3 + + +@types/unist@3.0.3 + + + + + +@types/hast@3.0.4->@types/unist@3.0.3 + + + + + +hast-util-to-html@9.0.3->@types/hast@3.0.4 + + + + + +hast-util-to-html@9.0.3->@types/unist@3.0.3 + + + + + +ccount@2.0.1 + + +ccount@2.0.1 + + + + + +hast-util-to-html@9.0.3->ccount@2.0.1 + + + + + +comma-separated-tokens@2.0.3 + + +comma-separated-tokens@2.0.3 + + + + + +hast-util-to-html@9.0.3->comma-separated-tokens@2.0.3 + + + + + +hast-util-whitespace@3.0.0 + + +hast-util-whitespace@3.0.0 + + + + + +hast-util-to-html@9.0.3->hast-util-whitespace@3.0.0 + + + + + +html-void-elements@3.0.0 + + +html-void-elements@3.0.0 + + + + + +hast-util-to-html@9.0.3->html-void-elements@3.0.0 + + + + + +mdast-util-to-hast@13.2.0 + + +mdast-util-to-hast@13.2.0 + + + + + +hast-util-to-html@9.0.3->mdast-util-to-hast@13.2.0 + + + + + +property-information@6.5.0 + + +property-information@6.5.0 + + + + + +hast-util-to-html@9.0.3->property-information@6.5.0 + + + + + +space-separated-tokens@2.0.2 + + +space-separated-tokens@2.0.2 + + + + + +hast-util-to-html@9.0.3->space-separated-tokens@2.0.2 + + + + + +stringify-entities@4.0.4 + + +stringify-entities@4.0.4 + + + + + +hast-util-to-html@9.0.3->stringify-entities@4.0.4 + + + + + +zwitch@2.0.4 + + +zwitch@2.0.4 + + + + + +hast-util-to-html@9.0.3->zwitch@2.0.4 + + + + + +emoji-regex-xs@1.0.0 + + +emoji-regex-xs@1.0.0 + + + + + +oniguruma-to-es@0.4.1->emoji-regex-xs@1.0.0 + + + + + +regex-recursion@4.2.1 + + +regex-recursion@4.2.1 + + + + + +oniguruma-to-es@0.4.1->regex-recursion@4.2.1 + + + + + +regex@5.0.2 + + +regex@5.0.2 + + + + + +oniguruma-to-es@0.4.1->regex@5.0.2 + + + + + +hast-util-whitespace@3.0.0->@types/hast@3.0.4 + + + + + +mdast-util-to-hast@13.2.0->@types/hast@3.0.4 + + + + + +@types/mdast@4.0.4 + + +@types/mdast@4.0.4 + + + + + +mdast-util-to-hast@13.2.0->@types/mdast@4.0.4 + + + + + +@ungap/structured-clone@1.2.0 + + +@ungap/structured-clone@1.2.0 + + + + + +mdast-util-to-hast@13.2.0->@ungap/structured-clone@1.2.0 + + + + + +devlop@1.1.0 + + +devlop@1.1.0 + + + + + +mdast-util-to-hast@13.2.0->devlop@1.1.0 + + + + + +micromark-util-sanitize-uri@2.0.1 + + +micromark-util-sanitize-uri@2.0.1 + + + + + +mdast-util-to-hast@13.2.0->micromark-util-sanitize-uri@2.0.1 + + + + + +trim-lines@3.0.1 + + +trim-lines@3.0.1 + + + + + +mdast-util-to-hast@13.2.0->trim-lines@3.0.1 + + + + + +unist-util-position@5.0.0 + + +unist-util-position@5.0.0 + + + + + +mdast-util-to-hast@13.2.0->unist-util-position@5.0.0 + + + + + +unist-util-visit@5.0.0 + + +unist-util-visit@5.0.0 + + + + + +mdast-util-to-hast@13.2.0->unist-util-visit@5.0.0 + + + + + +vfile@6.0.3 + + +vfile@6.0.3 + + + + + +mdast-util-to-hast@13.2.0->vfile@6.0.3 + + + + + +regex-utilities@2.3.0 + + +regex-utilities@2.3.0 + + + + + +regex-recursion@4.2.1->regex-utilities@2.3.0 + + + + + +regex@5.0.2->regex-utilities@2.3.0 + + + + + +character-entities-html4@2.1.0 + + +character-entities-html4@2.1.0 + + + + + +stringify-entities@4.0.4->character-entities-html4@2.1.0 + + + + + +character-entities-legacy@3.0.0 + + +character-entities-legacy@3.0.0 + + + + + +stringify-entities@4.0.4->character-entities-legacy@3.0.0 + + + + + +@types/mdast@4.0.4->@types/unist@3.0.3 + + + + + +dequal@2.0.3 + + +dequal@2.0.3 + + + + + +devlop@1.1.0->dequal@2.0.3 + + + + + +micromark-util-character@2.1.1 + + +micromark-util-character@2.1.1 + + + + + +micromark-util-sanitize-uri@2.0.1->micromark-util-character@2.1.1 + + + + + +micromark-util-encode@2.0.1 + + +micromark-util-encode@2.0.1 + + + + + +micromark-util-sanitize-uri@2.0.1->micromark-util-encode@2.0.1 + + + + + +micromark-util-symbol@2.0.1 + + +micromark-util-symbol@2.0.1 + + + + + +micromark-util-sanitize-uri@2.0.1->micromark-util-symbol@2.0.1 + + + + + +unist-util-position@5.0.0->@types/unist@3.0.3 + + + + + +unist-util-visit@5.0.0->@types/unist@3.0.3 + + + + + +unist-util-is@6.0.0 + + +unist-util-is@6.0.0 + + + + + +unist-util-visit@5.0.0->unist-util-is@6.0.0 + + + + + +unist-util-visit-parents@6.0.1 + + +unist-util-visit-parents@6.0.1 + + + + + +unist-util-visit@5.0.0->unist-util-visit-parents@6.0.1 + + + + + +vfile@6.0.3->@types/unist@3.0.3 + + + + + +vfile-message@4.0.2 + + +vfile-message@4.0.2 + + + + + +vfile@6.0.3->vfile-message@4.0.2 + + + + + +micromark-util-character@2.1.1->micromark-util-symbol@2.0.1 + + + + + +micromark-util-types@2.0.1 + + +micromark-util-types@2.0.1 + + + + + +micromark-util-character@2.1.1->micromark-util-types@2.0.1 + + + + + +unist-util-is@6.0.0->@types/unist@3.0.3 + + + + + +unist-util-visit-parents@6.0.1->@types/unist@3.0.3 + + + + + +unist-util-visit-parents@6.0.1->unist-util-is@6.0.0 + + + + + +vfile-message@4.0.2->@types/unist@3.0.3 + + + + + +unist-util-stringify-position@4.0.0 + + +unist-util-stringify-position@4.0.0 + + + + + +vfile-message@4.0.2->unist-util-stringify-position@4.0.0 + + + + + +unist-util-stringify-position@4.0.0->@types/unist@3.0.3 + + + + + diff --git a/node_modules/@iarna/toml/CHANGELOG.md b/node_modules/@iarna/toml/CHANGELOG.md new file mode 100755 index 0000000..21964f9 --- /dev/null +++ b/node_modules/@iarna/toml/CHANGELOG.md @@ -0,0 +1,278 @@ +# 2.2.5 + +* Docs: Updated benchmark results. Add fast-toml to result list. Improved benchmark layout. +* Update @sgarciac/bombadil and @ltd/j-toml in benchmarks and compliance tests. +* Dev: Some dev dep updates that shouldn't have any impact. + +# 2.2.4 + +* Bug fix: Plain date literals (not datetime) immediately followed by another statement (no whitespace or blank line) would crash. Fixes [#19](https://github.com/iarna/iarna-toml/issues/19) and [#23](https://github.com/iarna/iarna-toml/issues/23), thank you [@arnau](https://github.com/arnau) and [@jschaf](https://github.com/jschaf) for reporting this! +* Bug fix: Hex literals with lowercase Es would throw errors. (Thank you [@DaeCatt](https://github.com/DaeCatt) for this fix!) Fixed [#20](https://github.com/iarna/iarna-toml/issues/20) +* Some minor doc tweaks +* Added Node 12 and 13 to Travis. (Node 6 is failing there now, mysteriously. It works on my machine™, shipping anyway. 🙃) + +# 2.2.3 + +This release just updates the spec compliance tests and benchmark data to +better represent @ltd/j-toml. + +# 2.2.2 + +## Fixes + +* Support parsing and stringifying objects with `__proto__` properties. ([@LongTengDao](https://github.com/LongTengDao)) + +## Misc + +* Updates for spec compliance and benchmarking: + * @sgarciac/bombadil -> 2.1.0 + * toml -> 3.0.0 +* Added spec compliance and benchmarking for: + * @ltd/j-toml + +# 2.2.1 + +## Fixes + +* Fix bug where keys with names matching javascript Object methods would + error. Thanks [@LongTengDao](https://github.com/LongTengDao) for finding this! +* Fix bug where a bundled version would fail if `util.inspect` wasn't + provided. This was supposed to be guarded against, but there was a bug in + the guard. Thanks [@agriffis](https://github.com/agriffis) for finding and fixing this! + +## Misc + +* Update the version of bombadil for spec compliance and benchmarking purposes to 2.0.0 + +## Did you know? + +Node 6 and 8 are measurably slower than Node 6, 10 and 11, at least when it comes to parsing TOML! + +![](https://pbs.twimg.com/media/DtDeVjmU4AE5apz.jpg) + +# 2.2.0 + +## Features + +* Typescript: Lots of improvements to our type definitions, many many to + [@jorgegonzalez](https://github.com/jorgegonzalez) and [@momocow](https://github.com/momocow) for working through these. + +## Fixes + +* Very large integers (>52bit) are stored as BigInts on runtimes that + support them. BigInts are 128bits, but the TOML spec limits its integers + to 64bits. We now limit our integers to 64bits + as well. +* Fix a bug in stringify where control characters were being emitted as unicode chars and not escape sequences. + +## Misc + +* Moved our spec tests out to an external repo +* Improved the styling of the spec compliance comparison + +# 2.1.1 + +## Fixes + +* Oops, type defs didn't end up in the tarball, ty [@jorgegonzalez](https://github.com/jorgegonzalez)‼ + +# 2.1.0 + +## Features + +* Types for typescript support, thank you [@momocow](https://github.com/momocow)! + +## Fixes + +* stringify: always strip invalid dates. This fixes a bug where an + invalid date in an inline array would not be removed and would instead + result in an error. +* stringify: if an invalid type is found make sure it's thrown as an + error object. Previously the type name was, unhelpfully, being thrown. +* stringify: Multiline strings ending in a quote would generate invalid TOML. +* parse: Error if a signed integer has a leading zero, eg, `-01` or `+01`. +* parse: Error if \_ appears at the end of the integer part of a float, eg `1_.0`. \_ is only valid between _digits_. + +## Fun + +* BurntSushi's comprehensive TOML 0.4.0 test suite is now used in addition to our existing test suite. +* You can see exactly how the other JS TOML libraries stack up in testing + against both BurntSushi's tests and my own in the new + [TOML-SPEC-SUPPORT](TOML-SPEC-SUPPORT.md) doc. + +# 2.0.0 + +With 2.0.0, @iarna/toml supports the TOML v0.5.0 specification. TOML 0.5.0 +brings some changes: + +* Delete characters (U+007F) are not allowed in plain strings. You can include them with + escaped unicode characters, eg `\u007f`. +* Integers are specified as being 64bit unsigned values. These are + supported using `BigInt`s if you are using Node 10 or later. +* Keys may be literal strings, that is, you can use single quoted strings to + quote key names, so the following is now valid: + 'a"b"c' = 123 +* The floating point values `nan`, `inf` and `-inf` are supported. The stringifier will no + longer strip NaN, Infinity and -Infinity, instead serializing them as these new values.. +* Datetimes can separate the date and time with a space instead of a T, so + `2017-12-01T00:00:00Z` can be written as `2017-12-01 00:00:00Z`. +* Datetimes can be floating, that is, they can be represented without a timezone. + These are represented in javascript as Date objects whose `isFloating` property is true and + whose `toISOString` method will return a representation without a timezone. +* Dates without times are now supported. Dates do not have timezones. Dates + are represented in javascript as a Date object whose `isDate` property is true and + whose `toISOString` method returns just the date. +* Times without dates are now supported. Times do not have timezones. Times + are represented in javascript as a Date object whose `isTime` property is true and + whose `toISOString` method returns just the time. +* Keys can now include dots to directly address deeper structures, so `a.b = 23` is + the equivalent of `a = {b = 23}` or ```[a] +b = 23```. These can be used both as keys to regular tables and inline tables. +* Integers can now be specified in binary, octal and hexadecimal by prefixing the + number with `0b`, `0o` and `0x` respectively. It is now illegal to left + pad a decimal value with zeros. + +Some parser details were also fixed: + +* Negative zero (`-0.0`) and positive zero (`0.0`) are distinct floating point values. +* Negative integer zero (`-0`) is not distinguished from positive zero (`0`). + +# 1.7.1 + +Another 18% speed boost on our overall benchmarks! This time it came from +switching from string comparisons to integer by converting each character to +its respective code point. This also necessitated rewriting the boolean +parser to actually parse character-by-character as it should. End-of-stream +is now marked with a numeric value outside of the Unicode range, rather than +a Symbol, meaning that the parser's char property is now monomorphic. + +Bug fix, previously, `'abc''def'''` was accepted (as the value: `abcdef`). +Now it will correctly raise an error. + +Spec tests now run against bombadil as well (it fails some, which is unsurprising +given its incomplete state). + +# 1.7.0 + +This release features an overall 15% speed boost on our benchmarks. This +came from a few things: + +* Date parsing was rewritten to not use regexps, resulting in a huge speed increase. +* Strings of all kinds and bare keywords now use tight loops to collect characters when this will help. +* Regexps in general were mostly removed. This didn't result in a speed + change, but it did allow refactoring the parser to be a lot easier to + follow. +* The internal state tracking now uses a class and is constructed with a + fixed set of properties, allowing v8's optimizer to be more effective. + +In the land of new features: + +* Errors in the syntax of your TOML will now have the `fromTOML` property + set to true. This is in addition to the `line`, `col` and `pos` + properties they already have. + + The main use of this is to make it possible to distinguish between errors + in the TOML and errors in the parser code itself. This is of particular utility + when testing parse errors. + +# 1.6.0 + +**FIXES** + +* TOML.stringify: Allow toJSON properties that aren't functions, to align with JSON.stringify's behavior. +* TOML.stringify: Don't use ever render keys as literal strings. +* TOML.stringify: Don't try to escape control characters in literal strings. + +**FEATURES** + +* New Export: TOML.stringify.value, for encoding a stand alone inline value as TOML would. This produces + a TOML fragment, not a complete valid document. + +# 1.5.6 + +* String literals are NOT supported as key names. +* Accessing a shallower table after accessing it more deeply is ok and no longer crashes, eg: + ```toml + [a.b] + [a] + ``` +* Unicode characters in the reserved range now crash. +* Empty bare keys, eg `[.abc]` or `[]` now crash. +* Multiline backslash trimming supports CRs. +* Multiline post quote trimming supports CRs. +* Strings may not contain bare control chars (0x00-0x1f), except for \n, \r and \t. + +# 1.5.5 + +* Yet MORE README fixes. 🙃 + +# 1.5.4 + +* README fix + +# 1.5.3 + +* Benchmarks! +* More tests! +* More complete LICENSE information (some dev files are from other, MIT + licensed, projects, this is now more explicitly documented.) + +# 1.5.2 + +* parse: Arrays with mixed types now throw errors, per the spec. +* parse: Fix a parser bug that would result in errors when trying to parse arrays of numbers or dates + that were not separated by a space from the closing ]. +* parse: Fix a bug in the error pretty printer that resulted in errors on + the first line not getting the pretty print treatment. +* stringify: Fix long standing bug where an array of Numbers, some of which required + decimals, would be emitted in a way that parsers would treat as mixed + Integer and Float values. Now if any Numbers in an array must be + represented with a decimal then all will be emitted such that parsers will + understand them to be Float. + +# 1.5.1 + +* README fix + +# 1.5.0 + +* A brand new TOML parser, from scratch, that performs like `toml-j0.4` + without the crashes and with vastly better error messages. +* 100% test coverage for both the new parser and the existing stringifier. Some subtle bugs squashed! + +# v1.4.2 + +* Revert fallback due to its having issues with the same files. (New plan + will be to write my own.) + +# v1.4.1 + +* Depend on both `toml` and `toml-j0.4` with fallback from the latter to the + former when the latter crashes. + +# v1.4.0 + +* Ducktype dates to make them compatible with `moment` and other `Date` replacements. + +# v1.3.1 + +* Update docs with new toml module. + +# v1.3.0 + +* Switch from `toml` to `toml-j0.4`, which is between 20x and 200x faster. + (The larger the input, the faster it is compared to `toml`). + +# v1.2.0 + +* Return null when passed in null as the top level object. +* Detect and skip invalid dates and numbers + +# v1.1.0 + +* toJSON transformations are now honored (for everything except Date objects, as JSON represents them as strings). +* Undefined/null values no longer result in exceptions, they now just result in the associated key being elided. + +# v1.0.1 + +* Initial release diff --git a/node_modules/@iarna/toml/LICENSE b/node_modules/@iarna/toml/LICENSE new file mode 100755 index 0000000..51bcf57 --- /dev/null +++ b/node_modules/@iarna/toml/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2016, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/@iarna/toml/README.md b/node_modules/@iarna/toml/README.md new file mode 100755 index 0000000..1958324 --- /dev/null +++ b/node_modules/@iarna/toml/README.md @@ -0,0 +1,301 @@ +# @iarna/toml + +Better TOML parsing and stringifying all in that familiar JSON interface. + +[![Coverage Status](https://coveralls.io/repos/github/iarna/iarna-toml/badge.svg)](https://coveralls.io/github/iarna/iarna-toml) + +# ** TOML 0.5.0 ** + +### TOML Spec Support + +The most recent version as of 2018-07-26: [v0.5.0](https://github.com/mojombo/toml/blob/master/versions/en/toml-v0.5.0.md) + +### Example + +```js +const TOML = require('@iarna/toml') +const obj = TOML.parse(`[abc] +foo = 123 +bar = [1,2,3]`) +/* obj = +{abc: {foo: 123, bar: [1,2,3]}} +*/ +const str = TOML.stringify(obj) +/* str = +[abc] +foo = 123 +bar = [ 1, 2, 3 ] +*/ +``` + +Visit the project github [for more examples](https://github.com/iarna/iarna-toml/tree/latest/examples)! + + +## Why @iarna/toml + +* See [TOML-SPEC-SUPPORT](https://shared.by.re-becca.org/misc/TOML-SPEC-SUPPORT.html) + for a comparison of which TOML features are supported by the various + Node.js TOML parsers. +* BigInt support on Node 10! +* 100% test coverage. +* Fast parsing. It's as much as 100 times + faster than `toml` and 3 times faster than `toml-j0.4`. However a recent + newcomer [`@ltd/j-toml`](https://www.npmjs.com/package/@ltd/j-toml) has + appeared with 0.5 support and astoundingly fast parsing speeds for large + text blocks. All I can say is you'll have to test your specific work loads + if you want to know which of @iarna/toml and @ltd/j-toml is faster for + you, as we currently excell in different areas. +* Careful adherence to spec. Tests go beyond simple coverage. +* Smallest parser bundle (if you use `@iarna/toml/parse-string`). +* No deps. +* Detailed and easy to read error messages‼ + +```console +> TOML.parse(src) +Error: Unexpected character, expecting string, number, datetime, boolean, inline array or inline table at row 6, col 5, pos 87: +5: "abc\"" = { abc=123,def="abc" } +6> foo=sdkfj + ^ +7: +``` + +## TOML.parse(str) → Object [(example)](https://github.com/iarna/iarna-toml/blob/latest/examples/parse.js) + +Also available with: `require('@iarna/toml/parse-string')` + +Synchronously parse a TOML string and return an object. + + +## TOML.stringify(obj) → String [(example)](https://github.com/iarna/iarna-toml/blob/latest/examples/stringify.js) + +Also available with: `require('@iarna/toml/stringify)` + +Serialize an object as TOML. + +## [your-object].toJSON + +If an object `TOML.stringify` is serializing has a `toJSON` method then it +will call it to transform the object before serializing it. This matches +the behavior of `JSON.stringify`. + +The one exception to this is that `toJSON` is not called for `Date` objects +because `JSON` represents dates as strings and TOML can represent them natively. + +[`moment`](https://www.npmjs.com/package/moment) objects are treated the +same as native `Date` objects, in this respect. + +## TOML.stringify.value(obj) -> String + +Also available with: `require('@iarna/toml/stringify').value` + +Serialize a value as TOML would. This is a fragment and not a complete +valid TOML document. + +## Promises and Streaming + +The parser provides alternative async and streaming interfaces, for times +that you're working with really absurdly big TOML files and don't want to +tie-up the event loop while it parses. + +### TOML.parse.async(str[, opts]) → Promise(Object) [(example)](https://github.com/iarna/iarna-toml/blob/latest/examples/parse-async.js) + +Also available with: `require('@iarna/toml/parse-async')` + +`opts.blocksize` is the amount text to parser per pass through the event loop. Defaults to 40kb. + +Asynchronously parse a TOML string and return a promise of the resulting object. + +### TOML.parse.stream(readable) → Promise(Object) [(example)](https://github.com/iarna/iarna-toml/blob/latest/examples/parse-stream-readable.js) + +Also available with: `require('@iarna/toml/parse-stream')` + +Given a readable stream, parse it as it feeds us data. Return a promise of the resulting object. + +### readable.pipe(TOML.parse.stream()) → Transform [(example)](https://github.com/iarna/iarna-toml/blob/latest/examples/parse-stream-through.js) + +Also available with: `require('@iarna/toml/parse-stream')` + +Returns a transform stream in object mode. When it completes, emit the +resulting object. Only one object will ever be emitted. + +## Lowlevel Interface [(example)](https://github.com/iarna/iarna-toml/blob/latest/examples/parse-lowlevel.js) [(example w/ parser debugging)](https://github.com/iarna/iarna-toml/blob/latest/examples/parse-lowlevel-debug.js) + +You construct a parser object, per TOML file you want to process: + +```js +const TOMLParser = require('@iarna/toml/lib/toml-parser.js') +const parser = new TOMLParser() +``` + +Then you call the `parse` method for each chunk as you read them, or in a +single call: + +```js +parser.parse(`hello = 'world'`) +``` + +And finally, you call the `finish` method to complete parsing and retrieve +the resulting object. + +```js +const data = parser.finish() +``` + +Both the `parse` method and `finish` method will throw if they find a +problem with the string they were given. Error objects thrown from the +parser have `pos`, `line` and `col` attributes. `TOML.parse` adds a visual +summary of where in the source string there were issues using +`parse-pretty-error` and you can too: + +```js +const prettyError = require('./parse-pretty-error.js') +const newErr = prettyError(err, sourceString) +``` + +## What's Different + +Version 2 of this module supports TOML 0.5.0. Other modules currently +published to the npm registry support 0.4.0. 0.5.0 is mostly backwards +compatible with 0.4.0, but if you have need, you can install @iarna/toml@1 +to get a version of this module that supports 0.4.0. Please see the +[CHANGELOG](CHANGELOG.md#2.0.0) for details on exactly whats changed. + +## TOML we can't do + +* `-nan` is a valid TOML value and is converted into `NaN`. There is no way to + produce `-nan` when stringifying. Stringification will produce positive `nan`. +* Detecting and erroring on invalid utf8 documents: This is because Node's + UTF8 processing converts invalid sequences into the placeholder character + and does not have facilities for reporting these as errors instead. We + _can_ detect the placeholder character, but it's valid to intentionally + include them in documents, so erroring on them is not great. +* On versions of Node < 10, very large Integer values will lose precision. + On Node >=10, bigints are used. +* Floating/local dates and times are still represented by JavaScript Date + objects, which don't actually support these concepts. The objects + returned have been modified so that you can determine what kind of thing + they are (with `isFloating`, `isDate`, `isTime` properties) and that + their ISO representation (via `toISOString`) is representative of their + TOML value. They will correctly round trip if you pass them to + `TOML.stringify`. +* Binary, hexadecimal and octal values are converted to ordinary integers and + will be decimal if you stringify them. + +## Changes + +I write a by hand, honest-to-god, +[CHANGELOG](https://github.com/iarna/iarna-toml/blob/latest/CHANGELOG.md) +for this project. It's a description of what went into a release that you +the consumer of the module could care about, not a list of git commits, so +please check it out! + +## Benchmarks + +You can run them yourself with: + +```console +$ npm run benchmark +``` + +The results below are from my desktop using Node 13.13.0. The library +versions tested were `@iarna/toml@2.2.4`, `toml-j0.4@1.1.1`, `toml@3.0.0`, +`@sgarciac/bombadil@2.3.0`, `@ltd/j-toml@0.5.107`, and `fast-toml@0.5.4`. The speed value is +megabytes-per-second that the parser can process of that document type. +Bigger is better. The percentage after average results is the margin of error. + +New here is fast-toml. fast-toml is very fast, for some datatypes, but it +also is missing most error checking demanded by the spec. For 0.4, it is +complete except for detail of multiline strings caught by the compliance +tests. Its support for 0.5 is incomplete. Check out the +[spec compliance](https://shared.by.re-becca.org/misc/TOML-SPEC-SUPPORT.html) doc +for details. + +As this table is getting a little wide, with how npm and github display it, +you can also view it seperately in the +[BENCHMARK](https://shared.by.re-becca.org/misc/BENCHMARK.html) document. + +| | @iarna/toml | toml-j0.4 | toml | @sgarciac/bombadil | @ltd/j-toml | fast-toml | +| - | :---------: | :-------: | :--: | :----------------: | :---------: | :-------: | +| **Overall** | 28MB/sec
0.35% | 6.5MB/sec
0.25% | 0.2MB/sec
0.70% | - | 35MB/sec
0.23% | - | +| **Spec Example: v0.4.0** | 26MB/sec
0.37% | 10MB/sec
0.27% | 1MB/sec
0.42% | 1.2MB/sec
0.95% | 28MB/sec
0.31% | - | +| **Spec Example: Hard Unicode** | 64MB/sec
0.59% | 18MB/sec
0.12% | 2MB/sec
0.20% | 0.6MB/sec
0.53% | 68MB/sec
0.31% | 78MB/sec
0.28% | +| **Types: Array, Inline** | 7.3MB/sec
0.60% | 4MB/sec
0.16% | 0.1MB/sec
0.91% | 1.3MB/sec
0.81% | 10MB/sec
0.35% | 9MB/sec
0.16% | +| **Types: Array** | 6.8MB/sec
0.19% | 6.7MB/sec
0.15% | 0.2MB/sec
0.79% | 1.2MB/sec
0.93% | 8.8MB/sec
0.47% | 27MB/sec
0.21% | +| **Types: Boolean,** | 21MB/sec
0.20% | 9.4MB/sec
0.17% | 0.2MB/sec
0.96% | 1.8MB/sec
0.70% | 16MB/sec
0.20% | 8.4MB/sec
0.22% | +| **Types: Datetime** | 18MB/sec
0.14% | 11MB/sec
0.15% | 0.3MB/sec
0.85% | 1.6MB/sec
0.45% | 9.8MB/sec
0.48% | 6.5MB/sec
0.23% | +| **Types: Float** | 8.8MB/sec
0.09% | 5.9MB/sec
0.14% | 0.2MB/sec
0.51% | 2.1MB/sec
0.82% | 14MB/sec
0.15% | 7.9MB/sec
0.14% | +| **Types: Int** | 5.9MB/sec
0.11% | 4.5MB/sec
0.28% | 0.1MB/sec
0.78% | 1.5MB/sec
0.64% | 10MB/sec
0.14% | 8MB/sec
0.17% | +| **Types: Literal String, 7 char** | 26MB/sec
0.29% | 8.5MB/sec
0.32% | 0.3MB/sec
0.84% | 2.3MB/sec
1.02% | 23MB/sec
0.15% | 13MB/sec
0.15% | +| **Types: Literal String, 92 char** | 46MB/sec
0.19% | 11MB/sec
0.20% | 0.3MB/sec
0.56% | 12MB/sec
0.92% | 101MB/sec
0.17% | 75MB/sec
0.29% | +| **Types: Literal String, Multiline, 1079 char** | 22MB/sec
0.42% | 6.7MB/sec
0.55% | 0.9MB/sec
0.78% | 44MB/sec
1.00% | 350MB/sec
0.16% | 636MB/sec
0.16% | +| **Types: Basic String, 7 char** | 25MB/sec
0.15% | 7.3MB/sec
0.18% | 0.2MB/sec
0.96% | 2.2MB/sec
1.09% | 14MB/sec
0.16% | 12MB/sec
0.22% | +| **Types: Basic String, 92 char** | 43MB/sec
0.30% | 7.2MB/sec
0.16% | 0.1MB/sec
4.04% | 12MB/sec
1.33% | 71MB/sec
0.19% | 70MB/sec
0.23% | +| **Types: Basic String, 1079 char** | 24MB/sec
0.45% | 5.8MB/sec
0.17% | 0.1MB/sec
3.64% | 44MB/sec
1.05% | 93MB/sec
0.29% | 635MB/sec
0.28% | +| **Types: Table, Inline** | 9.7MB/sec
0.10% | 5.5MB/sec
0.22% | 0.1MB/sec
0.87% | 1.4MB/sec
1.18% | 8.7MB/sec
0.60% | 8.7MB/sec
0.22% | +| **Types: Table** | 7.1MB/sec
0.14% | 5.6MB/sec
0.42% | 0.1MB/sec
0.65% | 1.4MB/sec
1.11% | 7.4MB/sec
0.70% | 18MB/sec
0.20% | +| **Scaling: Array, Inline, 1000 elements** | 40MB/sec
0.21% | 2.4MB/sec
0.19% | 0.1MB/sec
0.35% | 1.6MB/sec
1.02% | 17MB/sec
0.15% | 32MB/sec
0.16% | +| **Scaling: Array, Nested, 1000 deep** | 2MB/sec
0.15% | 1.7MB/sec
0.26% | 0.3MB/sec
0.58% | - | 1.8MB/sec
0.74% | 13MB/sec
0.20% | +| **Scaling: Literal String, 40kb** | 61MB/sec
0.18% | 10MB/sec
0.15% | 3MB/sec
0.84% | 12MB/sec
0.51% | 551MB/sec
0.44% | 19kMB/sec
0.19% | +| **Scaling: Literal String, Multiline, 40kb** | 62MB/sec
0.16% | 5MB/sec
0.45% | 0.2MB/sec
1.70% | 11MB/sec
0.74% | 291MB/sec
0.24% | 21kMB/sec
0.22% | +| **Scaling: Basic String, Multiline, 40kb** | 62MB/sec
0.18% | 5.8MB/sec
0.38% | 2.9MB/sec
0.86% | 11MB/sec
0.41% | 949MB/sec
0.44% | 26kMB/sec
0.16% | +| **Scaling: Basic String, 40kb** | 59MB/sec
0.20% | 6.3MB/sec
0.17% | 0.2MB/sec
1.95% | 12MB/sec
0.44% | 508MB/sec
0.35% | 18kMB/sec
0.15% | +| **Scaling: Table, Inline, 1000 elements** | 28MB/sec
0.12% | 8.2MB/sec
0.19% | 0.3MB/sec
0.89% | 2.3MB/sec
1.14% | 5.3MB/sec
0.24% | 13MB/sec
0.20% | +| **Scaling: Table, Inline, Nested, 1000 deep** | 7.8MB/sec
0.28% | 5MB/sec
0.20% | 0.1MB/sec
0.84% | - | 3.2MB/sec
0.52% | 10MB/sec
0.23% | + +## Tests + +The test suite is maintained at 100% coverage: [![Coverage Status](https://coveralls.io/repos/github/iarna/iarna-toml/badge.svg)](https://coveralls.io/github/iarna/iarna-toml) + +The spec was carefully hand converted into a series of test framework +independent (and mostly language independent) assertions, as pairs of TOML +and YAML files. You can find those files here: +[spec-test](https://github.com/iarna/iarna-toml/blob/latest/test/spec-test/). +A number of examples of invalid Unicode were also written, but are difficult +to make use of in Node.js where Unicode errors are silently hidden. You can +find those here: [spec-test-disabled](https://github.com/iarna/iarna-toml/blob/latest/test/spec-test-disabled/). + +Further tests were written to increase coverage to 100%, these may be more +implementation specific, but they can be found in [coverage](https://github.com/iarna/iarna-toml/blob/latest/test/coverage.js) and +[coverage-error](https://github.com/iarna/iarna-toml/blob/latest/test/coverage-error.js). + +I've also written some quality assurance style tests, which don't contribute +to coverage but do cover scenarios that could easily be problematic for some +implementations can be found in: +[test/qa.js](https://github.com/iarna/iarna-toml/blob/latest/test/qa.js) and +[test/qa-error.js](https://github.com/iarna/iarna-toml/blob/latest/test/qa-error.js). + +All of the official example files from the TOML spec are run through this +parser and compared to the official YAML files when available. These files are from the TOML spec as of: +[357a4ba6](https://github.com/toml-lang/toml/tree/357a4ba6782e48ff26e646780bab11c90ed0a7bc) +and specifically are: + +* [github.com/toml-lang/toml/tree/357a4ba6/examples](https://github.com/toml-lang/toml/tree/357a4ba6782e48ff26e646780bab11c90ed0a7bc/examples) +* [github.com/toml-lang/toml/tree/357a4ba6/tests](https://github.com/toml-lang/toml/tree/357a4ba6782e48ff26e646780bab11c90ed0a7bc/tests) + +The stringifier is tested by round-tripping these same files, asserting that +`TOML.parse(sourcefile)` deepEqual +`TOML.parse(TOML.stringify(TOML.parse(sourcefile))`. This is done in +[test/roundtrip-examples.js](https://github.com/iarna/iarna-toml/blob/latest/test/round-tripping.js) +There are also some tests written to complete coverage from stringification in: +[test/stringify.js](https://github.com/iarna/iarna-toml/blob/latest/test/stringify.js) + +Tests for the async and streaming interfaces are in [test/async.js](https://github.com/iarna/iarna-toml/blob/latest/test/async.js) and [test/stream.js](https://github.com/iarna/iarna-toml/blob/latest/test/stream.js) respectively. + +Tests for the parsers debugging mode live in [test/devel.js](https://github.com/iarna/iarna-toml/blob/latest/test/devel.js). + +And finally, many more stringification tests were borrowed from [@othiym23](https://github.com/othiym23)'s +[toml-stream](https://npmjs.com/package/toml-stream) module. They were fetched as of +[b6f1e26b572d49742d49fa6a6d11524d003441fa](https://github.com/othiym23/toml-stream/tree/b6f1e26b572d49742d49fa6a6d11524d003441fa/test) and live in +[test/toml-stream](https://github.com/iarna/iarna-toml/blob/latest/test/toml-stream/). + +## Improvements to make + +* In stringify: + * Any way to produce comments. As a JSON stand-in I'm not too worried + about this. That said, a document orientated fork is something I'd like + to look at eventually… + * Stringification could use some work on its error reporting. It reports + _what's_ wrong, but not where in your data structure it was. +* Further optimize the parser: + * There are some debugging assertions left in the main parser, these should be moved to a subclass. + * Make the whole debugging parser thing work as a mixin instead of as a superclass. diff --git a/node_modules/@iarna/toml/index.d.ts b/node_modules/@iarna/toml/index.d.ts new file mode 100755 index 0000000..d37e2b6 --- /dev/null +++ b/node_modules/@iarna/toml/index.d.ts @@ -0,0 +1,57 @@ +import { Transform } from "stream"; + +type JsonArray = boolean[] | number[] | string[] | JsonMap[] | Date[] +type AnyJson = boolean | number | string | JsonMap | Date | JsonArray | JsonArray[] + +interface JsonMap { + [key: string]: AnyJson; +} + +interface ParseOptions { + /** + * The amount text to parser per pass through the event loop. Defaults to 40kb (`40000`). + */ + blocksize: number +} + +interface FuncParse { + /** + * Synchronously parse a TOML string and return an object. + */ + (toml: string): JsonMap + + /** + * Asynchronously parse a TOML string and return a promise of the resulting object. + */ + async (toml: string, options?: ParseOptions): Promise + + /** + * Given a readable stream, parse it as it feeds us data. Return a promise of the resulting object. + */ + stream (readable: NodeJS.ReadableStream): Promise + stream (): Transform +} + +interface FuncStringify { + /** + * Serialize an object as TOML. + * + * If an object `TOML.stringify` is serializing has a `toJSON` method + * then it will call it to transform the object before serializing it. + * This matches the behavior of JSON.stringify. + * + * The one exception to this is that `toJSON` is not called for `Date` objects + * because JSON represents dates as strings and TOML can represent them natively. + * + * `moment` objects are treated the same as native `Date` objects, in this respect. + */ + (obj: JsonMap): string + + /** + * Serialize a value as TOML would. This is a fragment and not a complete valid TOML document. + */ + value (any: AnyJson): string +} + +export const parse: FuncParse +export const stringify: FuncStringify diff --git a/node_modules/@iarna/toml/package.json b/node_modules/@iarna/toml/package.json new file mode 100755 index 0000000..71f9e82 --- /dev/null +++ b/node_modules/@iarna/toml/package.json @@ -0,0 +1,82 @@ +{ + "name": "@iarna/toml", + "version": "2.2.5", + "main": "toml.js", + "scripts": { + "test": "tap -J --100 test/*.js test/toml-stream/*.js", + "benchmark": "node benchmark.js && node benchmark-per-file.js && node results2table.js", + "prerelease": "npm t", + "prepack": "rm -f *~", + "postpublish": "git push --follow-tags", + "pretest": "iarna-standard", + "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", + "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'", + "setup-burntsushi-toml-suite": "[ -d test/burntsushi-toml-test ] || (git clone https://github.com/BurntSushi/toml-test test/burntsushi-toml-test; rimraf test/burntsushi-toml-test/.git/hooks/*); cd test/burntsushi-toml-test; git pull", + "setup-iarna-toml-suite": "[ -d test/spec-test ] || (git clone https://github.com/iarna/toml-spec-tests -b 0.5.0 test/spec-test; rimraf test/spec-test/.git/hooks/*); cd test/spec-test; git pull", + "prepare": "npm run setup-burntsushi-toml-suite && npm run setup-iarna-toml-suite" + }, + "keywords": [ + "toml", + "toml-parser", + "toml-stringifier", + "parser", + "stringifer", + "emitter", + "ini", + "tomlify", + "encoder", + "decoder" + ], + "author": "Rebecca Turner (http://re-becca.org/)", + "license": "ISC", + "description": "Better TOML parsing and stringifying all in that familiar JSON interface.", + "dependencies": {}, + "devDependencies": { + "@iarna/standard": "^2.0.2", + "@ltd/j-toml": "^0.5.107", + "@perl/qx": "^1.1.0", + "@sgarciac/bombadil": "^2.3.0", + "ansi": "^0.3.1", + "approximate-number": "^2.0.0", + "benchmark": "^2.1.4", + "fast-toml": "^0.5.4", + "funstream": "^4.2.0", + "glob": "^7.1.6", + "js-yaml": "^3.13.1", + "rimraf": "^3.0.2", + "tap": "^12.0.1", + "toml": "^3.0.0", + "toml-j0.4": "^1.1.1", + "weallbehave": "*", + "weallcontribute": "*" + }, + "files": [ + "toml.js", + "stringify.js", + "parse.js", + "parse-string.js", + "parse-stream.js", + "parse-async.js", + "parse-pretty-error.js", + "lib/parser.js", + "lib/parser-debug.js", + "lib/toml-parser.js", + "lib/create-datetime.js", + "lib/create-date.js", + "lib/create-datetime-float.js", + "lib/create-time.js", + "lib/format-num.js", + "index.d.ts" + ], + "directories": { + "test": "test" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/iarna/iarna-toml.git" + }, + "bugs": { + "url": "https://github.com/iarna/iarna-toml/issues" + }, + "homepage": "https://github.com/iarna/iarna-toml#readme" +} diff --git a/node_modules/@iarna/toml/parse-async.js b/node_modules/@iarna/toml/parse-async.js new file mode 100755 index 0000000..e5ff090 --- /dev/null +++ b/node_modules/@iarna/toml/parse-async.js @@ -0,0 +1,30 @@ +'use strict' +module.exports = parseAsync + +const TOMLParser = require('./lib/toml-parser.js') +const prettyError = require('./parse-pretty-error.js') + +function parseAsync (str, opts) { + if (!opts) opts = {} + const index = 0 + const blocksize = opts.blocksize || 40960 + const parser = new TOMLParser() + return new Promise((resolve, reject) => { + setImmediate(parseAsyncNext, index, blocksize, resolve, reject) + }) + function parseAsyncNext (index, blocksize, resolve, reject) { + if (index >= str.length) { + try { + return resolve(parser.finish()) + } catch (err) { + return reject(prettyError(err, str)) + } + } + try { + parser.parse(str.slice(index, index + blocksize)) + setImmediate(parseAsyncNext, index + blocksize, blocksize, resolve, reject) + } catch (err) { + reject(prettyError(err, str)) + } + } +} diff --git a/node_modules/@iarna/toml/parse-pretty-error.js b/node_modules/@iarna/toml/parse-pretty-error.js new file mode 100755 index 0000000..fc0d31f --- /dev/null +++ b/node_modules/@iarna/toml/parse-pretty-error.js @@ -0,0 +1,33 @@ +'use strict' +module.exports = prettyError + +function prettyError (err, buf) { + /* istanbul ignore if */ + if (err.pos == null || err.line == null) return err + let msg = err.message + msg += ` at row ${err.line + 1}, col ${err.col + 1}, pos ${err.pos}:\n` + + /* istanbul ignore else */ + if (buf && buf.split) { + const lines = buf.split(/\n/) + const lineNumWidth = String(Math.min(lines.length, err.line + 3)).length + let linePadding = ' ' + while (linePadding.length < lineNumWidth) linePadding += ' ' + for (let ii = Math.max(0, err.line - 1); ii < Math.min(lines.length, err.line + 2); ++ii) { + let lineNum = String(ii + 1) + if (lineNum.length < lineNumWidth) lineNum = ' ' + lineNum + if (err.line === ii) { + msg += lineNum + '> ' + lines[ii] + '\n' + msg += linePadding + ' ' + for (let hh = 0; hh < err.col; ++hh) { + msg += ' ' + } + msg += '^\n' + } else { + msg += lineNum + ': ' + lines[ii] + '\n' + } + } + } + err.message = msg + '\n' + return err +} diff --git a/node_modules/@iarna/toml/parse-stream.js b/node_modules/@iarna/toml/parse-stream.js new file mode 100755 index 0000000..fb9a644 --- /dev/null +++ b/node_modules/@iarna/toml/parse-stream.js @@ -0,0 +1,80 @@ +'use strict' +module.exports = parseStream + +const stream = require('stream') +const TOMLParser = require('./lib/toml-parser.js') + +function parseStream (stm) { + if (stm) { + return parseReadable(stm) + } else { + return parseTransform(stm) + } +} + +function parseReadable (stm) { + const parser = new TOMLParser() + stm.setEncoding('utf8') + return new Promise((resolve, reject) => { + let readable + let ended = false + let errored = false + function finish () { + ended = true + if (readable) return + try { + resolve(parser.finish()) + } catch (err) { + reject(err) + } + } + function error (err) { + errored = true + reject(err) + } + stm.once('end', finish) + stm.once('error', error) + readNext() + + function readNext () { + readable = true + let data + while ((data = stm.read()) !== null) { + try { + parser.parse(data) + } catch (err) { + return error(err) + } + } + readable = false + /* istanbul ignore if */ + if (ended) return finish() + /* istanbul ignore if */ + if (errored) return + stm.once('readable', readNext) + } + }) +} + +function parseTransform () { + const parser = new TOMLParser() + return new stream.Transform({ + objectMode: true, + transform (chunk, encoding, cb) { + try { + parser.parse(chunk.toString(encoding)) + } catch (err) { + this.emit('error', err) + } + cb() + }, + flush (cb) { + try { + this.push(parser.finish()) + } catch (err) { + this.emit('error', err) + } + cb() + } + }) +} diff --git a/node_modules/@iarna/toml/parse-string.js b/node_modules/@iarna/toml/parse-string.js new file mode 100755 index 0000000..84ff7d4 --- /dev/null +++ b/node_modules/@iarna/toml/parse-string.js @@ -0,0 +1,18 @@ +'use strict' +module.exports = parseString + +const TOMLParser = require('./lib/toml-parser.js') +const prettyError = require('./parse-pretty-error.js') + +function parseString (str) { + if (global.Buffer && global.Buffer.isBuffer(str)) { + str = str.toString('utf8') + } + const parser = new TOMLParser() + try { + parser.parse(str) + return parser.finish() + } catch (err) { + throw prettyError(err, str) + } +} diff --git a/node_modules/@iarna/toml/parse.js b/node_modules/@iarna/toml/parse.js new file mode 100755 index 0000000..923b9d3 --- /dev/null +++ b/node_modules/@iarna/toml/parse.js @@ -0,0 +1,5 @@ +'use strict' +module.exports = require('./parse-string.js') +module.exports.async = require('./parse-async.js') +module.exports.stream = require('./parse-stream.js') +module.exports.prettyError = require('./parse-pretty-error.js') diff --git a/node_modules/@iarna/toml/stringify.js b/node_modules/@iarna/toml/stringify.js new file mode 100755 index 0000000..958caae --- /dev/null +++ b/node_modules/@iarna/toml/stringify.js @@ -0,0 +1,296 @@ +'use strict' +module.exports = stringify +module.exports.value = stringifyInline + +function stringify (obj) { + if (obj === null) throw typeError('null') + if (obj === void (0)) throw typeError('undefined') + if (typeof obj !== 'object') throw typeError(typeof obj) + + if (typeof obj.toJSON === 'function') obj = obj.toJSON() + if (obj == null) return null + const type = tomlType(obj) + if (type !== 'table') throw typeError(type) + return stringifyObject('', '', obj) +} + +function typeError (type) { + return new Error('Can only stringify objects, not ' + type) +} + +function arrayOneTypeError () { + return new Error("Array values can't have mixed types") +} + +function getInlineKeys (obj) { + return Object.keys(obj).filter(key => isInline(obj[key])) +} +function getComplexKeys (obj) { + return Object.keys(obj).filter(key => !isInline(obj[key])) +} + +function toJSON (obj) { + let nobj = Array.isArray(obj) ? [] : Object.prototype.hasOwnProperty.call(obj, '__proto__') ? {['__proto__']: undefined} : {} + for (let prop of Object.keys(obj)) { + if (obj[prop] && typeof obj[prop].toJSON === 'function' && !('toISOString' in obj[prop])) { + nobj[prop] = obj[prop].toJSON() + } else { + nobj[prop] = obj[prop] + } + } + return nobj +} + +function stringifyObject (prefix, indent, obj) { + obj = toJSON(obj) + var inlineKeys + var complexKeys + inlineKeys = getInlineKeys(obj) + complexKeys = getComplexKeys(obj) + var result = [] + var inlineIndent = indent || '' + inlineKeys.forEach(key => { + var type = tomlType(obj[key]) + if (type !== 'undefined' && type !== 'null') { + result.push(inlineIndent + stringifyKey(key) + ' = ' + stringifyAnyInline(obj[key], true)) + } + }) + if (result.length > 0) result.push('') + var complexIndent = prefix && inlineKeys.length > 0 ? indent + ' ' : '' + complexKeys.forEach(key => { + result.push(stringifyComplex(prefix, complexIndent, key, obj[key])) + }) + return result.join('\n') +} + +function isInline (value) { + switch (tomlType(value)) { + case 'undefined': + case 'null': + case 'integer': + case 'nan': + case 'float': + case 'boolean': + case 'string': + case 'datetime': + return true + case 'array': + return value.length === 0 || tomlType(value[0]) !== 'table' + case 'table': + return Object.keys(value).length === 0 + /* istanbul ignore next */ + default: + return false + } +} + +function tomlType (value) { + if (value === undefined) { + return 'undefined' + } else if (value === null) { + return 'null' + /* eslint-disable valid-typeof */ + } else if (typeof value === 'bigint' || (Number.isInteger(value) && !Object.is(value, -0))) { + return 'integer' + } else if (typeof value === 'number') { + return 'float' + } else if (typeof value === 'boolean') { + return 'boolean' + } else if (typeof value === 'string') { + return 'string' + } else if ('toISOString' in value) { + return isNaN(value) ? 'undefined' : 'datetime' + } else if (Array.isArray(value)) { + return 'array' + } else { + return 'table' + } +} + +function stringifyKey (key) { + var keyStr = String(key) + if (/^[-A-Za-z0-9_]+$/.test(keyStr)) { + return keyStr + } else { + return stringifyBasicString(keyStr) + } +} + +function stringifyBasicString (str) { + return '"' + escapeString(str).replace(/"/g, '\\"') + '"' +} + +function stringifyLiteralString (str) { + return "'" + str + "'" +} + +function numpad (num, str) { + while (str.length < num) str = '0' + str + return str +} + +function escapeString (str) { + return str.replace(/\\/g, '\\\\') + .replace(/[\b]/g, '\\b') + .replace(/\t/g, '\\t') + .replace(/\n/g, '\\n') + .replace(/\f/g, '\\f') + .replace(/\r/g, '\\r') + /* eslint-disable no-control-regex */ + .replace(/([\u0000-\u001f\u007f])/, c => '\\u' + numpad(4, c.codePointAt(0).toString(16))) + /* eslint-enable no-control-regex */ +} + +function stringifyMultilineString (str) { + let escaped = str.split(/\n/).map(str => { + return escapeString(str).replace(/"(?="")/g, '\\"') + }).join('\n') + if (escaped.slice(-1) === '"') escaped += '\\\n' + return '"""\n' + escaped + '"""' +} + +function stringifyAnyInline (value, multilineOk) { + let type = tomlType(value) + if (type === 'string') { + if (multilineOk && /\n/.test(value)) { + type = 'string-multiline' + } else if (!/[\b\t\n\f\r']/.test(value) && /"/.test(value)) { + type = 'string-literal' + } + } + return stringifyInline(value, type) +} + +function stringifyInline (value, type) { + /* istanbul ignore if */ + if (!type) type = tomlType(value) + switch (type) { + case 'string-multiline': + return stringifyMultilineString(value) + case 'string': + return stringifyBasicString(value) + case 'string-literal': + return stringifyLiteralString(value) + case 'integer': + return stringifyInteger(value) + case 'float': + return stringifyFloat(value) + case 'boolean': + return stringifyBoolean(value) + case 'datetime': + return stringifyDatetime(value) + case 'array': + return stringifyInlineArray(value.filter(_ => tomlType(_) !== 'null' && tomlType(_) !== 'undefined' && tomlType(_) !== 'nan')) + case 'table': + return stringifyInlineTable(value) + /* istanbul ignore next */ + default: + throw typeError(type) + } +} + +function stringifyInteger (value) { + /* eslint-disable security/detect-unsafe-regex */ + return String(value).replace(/\B(?=(\d{3})+(?!\d))/g, '_') +} + +function stringifyFloat (value) { + if (value === Infinity) { + return 'inf' + } else if (value === -Infinity) { + return '-inf' + } else if (Object.is(value, NaN)) { + return 'nan' + } else if (Object.is(value, -0)) { + return '-0.0' + } + var chunks = String(value).split('.') + var int = chunks[0] + var dec = chunks[1] || 0 + return stringifyInteger(int) + '.' + dec +} + +function stringifyBoolean (value) { + return String(value) +} + +function stringifyDatetime (value) { + return value.toISOString() +} + +function isNumber (type) { + return type === 'float' || type === 'integer' +} +function arrayType (values) { + var contentType = tomlType(values[0]) + if (values.every(_ => tomlType(_) === contentType)) return contentType + // mixed integer/float, emit as floats + if (values.every(_ => isNumber(tomlType(_)))) return 'float' + return 'mixed' +} +function validateArray (values) { + const type = arrayType(values) + if (type === 'mixed') { + throw arrayOneTypeError() + } + return type +} + +function stringifyInlineArray (values) { + values = toJSON(values) + const type = validateArray(values) + var result = '[' + var stringified = values.map(_ => stringifyInline(_, type)) + if (stringified.join(', ').length > 60 || /\n/.test(stringified)) { + result += '\n ' + stringified.join(',\n ') + '\n' + } else { + result += ' ' + stringified.join(', ') + (stringified.length > 0 ? ' ' : '') + } + return result + ']' +} + +function stringifyInlineTable (value) { + value = toJSON(value) + var result = [] + Object.keys(value).forEach(key => { + result.push(stringifyKey(key) + ' = ' + stringifyAnyInline(value[key], false)) + }) + return '{ ' + result.join(', ') + (result.length > 0 ? ' ' : '') + '}' +} + +function stringifyComplex (prefix, indent, key, value) { + var valueType = tomlType(value) + /* istanbul ignore else */ + if (valueType === 'array') { + return stringifyArrayOfTables(prefix, indent, key, value) + } else if (valueType === 'table') { + return stringifyComplexTable(prefix, indent, key, value) + } else { + throw typeError(valueType) + } +} + +function stringifyArrayOfTables (prefix, indent, key, values) { + values = toJSON(values) + validateArray(values) + var firstValueType = tomlType(values[0]) + /* istanbul ignore if */ + if (firstValueType !== 'table') throw typeError(firstValueType) + var fullKey = prefix + stringifyKey(key) + var result = '' + values.forEach(table => { + if (result.length > 0) result += '\n' + result += indent + '[[' + fullKey + ']]\n' + result += stringifyObject(fullKey + '.', indent, table) + }) + return result +} + +function stringifyComplexTable (prefix, indent, key, value) { + var fullKey = prefix + stringifyKey(key) + var result = '' + if (getInlineKeys(value).length > 0) { + result += indent + '[' + fullKey + ']\n' + } + return result + stringifyObject(fullKey + '.', indent, value) +} diff --git a/node_modules/@iarna/toml/toml.js b/node_modules/@iarna/toml/toml.js new file mode 100755 index 0000000..edca17c --- /dev/null +++ b/node_modules/@iarna/toml/toml.js @@ -0,0 +1,3 @@ +'use strict' +exports.parse = require('./parse.js') +exports.stringify = require('./stringify.js') diff --git a/node_modules/@shikijs/engine-oniguruma/LICENSE b/node_modules/@shikijs/engine-oniguruma/LICENSE new file mode 100644 index 0000000..6a62718 --- /dev/null +++ b/node_modules/@shikijs/engine-oniguruma/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2021 Pine Wu +Copyright (c) 2023 Anthony Fu + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@shikijs/engine-oniguruma/README.md b/node_modules/@shikijs/engine-oniguruma/README.md new file mode 100644 index 0000000..7955dea --- /dev/null +++ b/node_modules/@shikijs/engine-oniguruma/README.md @@ -0,0 +1,9 @@ +# @shikijs/engine-oniguruma + +Engine for Shiki using Oniguruma RegExp engine in WebAssembly. + +[Documentation](https://shiki.style/guide/regex-engines) + +## License + +MIT diff --git a/node_modules/@shikijs/engine-oniguruma/package.json b/node_modules/@shikijs/engine-oniguruma/package.json new file mode 100644 index 0000000..6189931 --- /dev/null +++ b/node_modules/@shikijs/engine-oniguruma/package.json @@ -0,0 +1,59 @@ +{ + "name": "@shikijs/engine-oniguruma", + "type": "module", + "version": "1.29.2", + "description": "Engine for Shiki using Oniguruma RegExp engine in WebAssembly", + "author": "Anthony Fu ", + "license": "MIT", + "homepage": "https://github.com/shikijs/shiki#readme", + "repository": { + "type": "git", + "url": "git+https://github.com/shikijs/shiki.git", + "directory": "packages/engine-oniguruma" + }, + "bugs": "https://github.com/shikijs/shiki/issues", + "keywords": [ + "shiki", + "shiki-engine", + "oniguruma" + ], + "sideEffects": false, + "exports": { + ".": { + "types": "./dist/index.d.mts", + "default": "./dist/index.mjs" + }, + "./wasm-inlined": { + "types": "./dist/wasm-inlined.d.mts", + "default": "./dist/wasm-inlined.mjs" + } + }, + "main": "./dist/index.mjs", + "module": "./dist/index.mjs", + "types": "./dist/index.d.mts", + "typesVersions": { + "*": { + "wasm-inlined": [ + "./dist/wasm-inlined.d.mts" + ], + "*": [ + "./dist/*", + "./*" + ] + } + }, + "files": [ + "dist" + ], + "dependencies": { + "@shikijs/vscode-textmate": "^10.0.1", + "@shikijs/types": "1.29.2" + }, + "devDependencies": { + "vscode-oniguruma": "1.7.0" + }, + "scripts": { + "build": "rimraf dist && rollup -c", + "dev": "rollup -cw" + } +} \ No newline at end of file diff --git a/node_modules/@shikijs/types/LICENSE b/node_modules/@shikijs/types/LICENSE new file mode 100644 index 0000000..6a62718 --- /dev/null +++ b/node_modules/@shikijs/types/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2021 Pine Wu +Copyright (c) 2023 Anthony Fu + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@shikijs/types/README.md b/node_modules/@shikijs/types/README.md new file mode 100644 index 0000000..371182c --- /dev/null +++ b/node_modules/@shikijs/types/README.md @@ -0,0 +1,7 @@ +# @shikijs/types + +Types for Shiki. + +## License + +MIT diff --git a/node_modules/@shikijs/types/package.json b/node_modules/@shikijs/types/package.json new file mode 100644 index 0000000..9c7fb28 --- /dev/null +++ b/node_modules/@shikijs/types/package.json @@ -0,0 +1,39 @@ +{ + "name": "@shikijs/types", + "type": "module", + "version": "1.29.2", + "description": "Type definitions for Shiki", + "author": "Anthony Fu ", + "license": "MIT", + "homepage": "https://github.com/shikijs/shiki#readme", + "repository": { + "type": "git", + "url": "git+https://github.com/shikijs/shiki.git", + "directory": "packages/types" + }, + "bugs": "https://github.com/shikijs/shiki/issues", + "keywords": [ + "shiki" + ], + "sideEffects": false, + "exports": { + ".": { + "types": "./dist/index.d.mts", + "default": "./dist/index.mjs" + } + }, + "main": "./dist/index.mjs", + "module": "./dist/index.mjs", + "types": "./dist/index.d.mts", + "files": [ + "dist" + ], + "dependencies": { + "@shikijs/vscode-textmate": "^10.0.1", + "@types/hast": "^3.0.4" + }, + "scripts": { + "build": "unbuild", + "dev": "unbuild --stub" + } +} \ No newline at end of file diff --git a/node_modules/@shikijs/vscode-textmate/LICENSE.md b/node_modules/@shikijs/vscode-textmate/LICENSE.md new file mode 100644 index 0000000..5ae193c --- /dev/null +++ b/node_modules/@shikijs/vscode-textmate/LICENSE.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Microsoft Corporation + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@shikijs/vscode-textmate/README.md b/node_modules/@shikijs/vscode-textmate/README.md new file mode 100644 index 0000000..28d9f6b --- /dev/null +++ b/node_modules/@shikijs/vscode-textmate/README.md @@ -0,0 +1,9 @@ +## Fork of [`microsoft/vscode-textmate`](https://github.com/microsoft/vscode-textmate) + +Changes make in this fork: + +- Change all `async` operations to `sync`; `onigLib` option now required to be resolved instead of a promise. +- Use `tsup` to bundle the lib, ship as a single file ES module. +- Remove debug flags and some other unnecessary exports. +- Convert `EncodedTokenAttributes` from namespace to class, rename to `EncodedTokenMetadata` +- Support RegExp literals in grammar object ([#3](https://github.com/shikijs/vscode-textmate/pull/3)) diff --git a/node_modules/@shikijs/vscode-textmate/package.json b/node_modules/@shikijs/vscode-textmate/package.json new file mode 100644 index 0000000..9d35e9c --- /dev/null +++ b/node_modules/@shikijs/vscode-textmate/package.json @@ -0,0 +1,46 @@ +{ + "name": "@shikijs/vscode-textmate", + "version": "10.0.2", + "type": "module", + "description": "Shiki's fork of `vscode-textmate`", + "author": { + "name": "Microsoft Corporation" + }, + "exports": { + ".": "./dist/index.js" + }, + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/shikijs/vscode-textmate" + }, + "files": [ + "dist" + ], + "license": "MIT", + "bugs": { + "url": "https://github.com/shikijs/vscode-textmate/issues" + }, + "devDependencies": { + "@types/mocha": "^9.1.1", + "@types/node": "^16.18.121", + "bumpp": "^9.9.0", + "mocha": "^9.2.2", + "tsup": "^8.3.5", + "tsx": "^4.19.2", + "typescript": "^5.7.2", + "vscode-oniguruma": "^1.7.0" + }, + "scripts": { + "build": "tsup", + "test": "mocha --ui=tdd ./src/tests/all.test.ts", + "benchmark": "node benchmark/benchmark.js", + "inspect": "tsx src/tests/inspect.ts", + "typecheck": "tsc --noEmit", + "tmconvert": "node scripts/tmconvert.js", + "version": "npm run compile && npm run test", + "postversion": "git push && git push --tags", + "release": "bumpp && pnpm publish" + } +} \ No newline at end of file diff --git a/node_modules/@types/hast/LICENSE b/node_modules/@types/hast/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/node_modules/@types/hast/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/hast/README.md b/node_modules/@types/hast/README.md new file mode 100644 index 0000000..b548c80 --- /dev/null +++ b/node_modules/@types/hast/README.md @@ -0,0 +1,15 @@ +# Installation +> `npm install --save @types/hast` + +# Summary +This package contains type definitions for hast (https://github.com/syntax-tree/hast). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/hast. + +### Additional Details + * Last updated: Tue, 30 Jan 2024 21:35:45 GMT + * Dependencies: [@types/unist](https://npmjs.com/package/@types/unist) + +# Credits +These definitions were written by [lukeggchapman](https://github.com/lukeggchapman), [Junyoung Choi](https://github.com/rokt33r), [Christian Murphy](https://github.com/ChristianMurphy), and [Remco Haszing](https://github.com/remcohaszing). diff --git a/node_modules/@types/hast/index.d.ts b/node_modules/@types/hast/index.d.ts new file mode 100644 index 0000000..122b5b4 --- /dev/null +++ b/node_modules/@types/hast/index.d.ts @@ -0,0 +1,282 @@ +import type { Data as UnistData, Literal as UnistLiteral, Node as UnistNode, Parent as UnistParent } from "unist"; + +// ## Interfaces + +/** + * Info associated with hast nodes by the ecosystem. + * + * This space is guaranteed to never be specified by unist or hast. + * But you can use it in utilities and plugins to store data. + * + * This type can be augmented to register custom data. + * For example: + * + * ```ts + * declare module 'hast' { + * interface Data { + * // `someNode.data.myId` is typed as `number | undefined` + * myId?: number | undefined + * } + * } + * ``` + */ +export interface Data extends UnistData {} + +/** + * Info associated with an element. + */ +export interface Properties { + [PropertyName: string]: boolean | number | string | null | undefined | Array; +} + +// ## Content maps + +/** + * Union of registered hast nodes that can occur in {@link Element}. + * + * To register mote custom hast nodes, add them to {@link ElementContentMap}. + * They will be automatically added here. + */ +export type ElementContent = ElementContentMap[keyof ElementContentMap]; + +/** + * Registry of all hast nodes that can occur as children of {@link Element}. + * + * For a union of all {@link Element} children, see {@link ElementContent}. + */ +export interface ElementContentMap { + comment: Comment; + element: Element; + text: Text; +} + +/** + * Union of registered hast nodes that can occur in {@link Root}. + * + * To register custom hast nodes, add them to {@link RootContentMap}. + * They will be automatically added here. + */ +export type RootContent = RootContentMap[keyof RootContentMap]; + +/** + * Registry of all hast nodes that can occur as children of {@link Root}. + * + * > 👉 **Note**: {@link Root} does not need to be an entire document. + * > it can also be a fragment. + * + * For a union of all {@link Root} children, see {@link RootContent}. + */ +export interface RootContentMap { + comment: Comment; + doctype: Doctype; + element: Element; + text: Text; +} + +// ### Special content types + +/** + * Union of registered hast nodes that can occur in {@link Root}. + * + * @deprecated Use {@link RootContent} instead. + */ +export type Content = RootContent; + +/** + * Union of registered hast literals. + * + * To register custom hast nodes, add them to {@link RootContentMap} and other + * places where relevant. + * They will be automatically added here. + */ +export type Literals = Extract; + +/** + * Union of registered hast nodes. + * + * To register custom hast nodes, add them to {@link RootContentMap} and other + * places where relevant. + * They will be automatically added here. + */ +export type Nodes = Root | RootContent; + +/** + * Union of registered hast parents. + * + * To register custom hast nodes, add them to {@link RootContentMap} and other + * places where relevant. + * They will be automatically added here. + */ +export type Parents = Extract; + +// ## Abstract nodes + +/** + * Abstract hast node. + * + * This interface is supposed to be extended. + * If you can use {@link Literal} or {@link Parent}, you should. + * But for example in HTML, a `Doctype` is neither literal nor parent, but + * still a node. + * + * To register custom hast nodes, add them to {@link RootContentMap} and other + * places where relevant (such as {@link ElementContentMap}). + * + * For a union of all registered hast nodes, see {@link Nodes}. + */ +export interface Node extends UnistNode { + /** + * Info from the ecosystem. + */ + data?: Data | undefined; +} + +/** + * Abstract hast node that contains the smallest possible value. + * + * This interface is supposed to be extended if you make custom hast nodes. + * + * For a union of all registered hast literals, see {@link Literals}. + */ +export interface Literal extends Node { + /** + * Plain-text value. + */ + value: string; +} + +/** + * Abstract hast node that contains other hast nodes (*children*). + * + * This interface is supposed to be extended if you make custom hast nodes. + * + * For a union of all registered hast parents, see {@link Parents}. + */ +export interface Parent extends Node { + /** + * List of children. + */ + children: RootContent[]; +} + +// ## Concrete nodes + +/** + * HTML comment. + */ +export interface Comment extends Literal { + /** + * Node type of HTML comments in hast. + */ + type: "comment"; + /** + * Data associated with the comment. + */ + data?: CommentData | undefined; +} + +/** + * Info associated with hast comments by the ecosystem. + */ +export interface CommentData extends Data {} + +/** + * HTML document type. + */ +export interface Doctype extends UnistNode { + /** + * Node type of HTML document types in hast. + */ + type: "doctype"; + /** + * Data associated with the doctype. + */ + data?: DoctypeData | undefined; +} + +/** + * Info associated with hast doctypes by the ecosystem. + */ +export interface DoctypeData extends Data {} + +/** + * HTML element. + */ +export interface Element extends Parent { + /** + * Node type of elements. + */ + type: "element"; + /** + * Tag name (such as `'body'`) of the element. + */ + tagName: string; + /** + * Info associated with the element. + */ + properties: Properties; + /** + * Children of element. + */ + children: ElementContent[]; + /** + * When the `tagName` field is `'template'`, a `content` field can be + * present. + */ + content?: Root | undefined; + /** + * Data associated with the element. + */ + data?: ElementData | undefined; +} + +/** + * Info associated with hast elements by the ecosystem. + */ +export interface ElementData extends Data {} + +/** + * Document fragment or a whole document. + * + * Should be used as the root of a tree and must not be used as a child. + * + * Can also be used as the value for the content field on a `'template'` element. + */ +export interface Root extends Parent { + /** + * Node type of hast root. + */ + type: "root"; + /** + * Children of root. + */ + children: RootContent[]; + /** + * Data associated with the hast root. + */ + data?: RootData | undefined; +} + +/** + * Info associated with hast root nodes by the ecosystem. + */ +export interface RootData extends Data {} + +/** + * HTML character data (plain text). + */ +export interface Text extends Literal { + /** + * Node type of HTML character data (plain text) in hast. + */ + type: "text"; + /** + * Data associated with the text. + */ + data?: TextData | undefined; +} + +/** + * Info associated with hast texts by the ecosystem. + */ +export interface TextData extends Data {} diff --git a/node_modules/@types/hast/package.json b/node_modules/@types/hast/package.json new file mode 100644 index 0000000..464e3f7 --- /dev/null +++ b/node_modules/@types/hast/package.json @@ -0,0 +1,42 @@ +{ + "name": "@types/hast", + "version": "3.0.4", + "description": "TypeScript definitions for hast", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/hast", + "license": "MIT", + "contributors": [ + { + "name": "lukeggchapman", + "githubUsername": "lukeggchapman", + "url": "https://github.com/lukeggchapman" + }, + { + "name": "Junyoung Choi", + "githubUsername": "rokt33r", + "url": "https://github.com/rokt33r" + }, + { + "name": "Christian Murphy", + "githubUsername": "ChristianMurphy", + "url": "https://github.com/ChristianMurphy" + }, + { + "name": "Remco Haszing", + "githubUsername": "remcohaszing", + "url": "https://github.com/remcohaszing" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/hast" + }, + "scripts": {}, + "dependencies": { + "@types/unist": "*" + }, + "typesPublisherContentHash": "3f3f73826d79157c12087f5bb36195319c6f435b9e218fa7a8de88d1cc64d097", + "typeScriptVersion": "4.6" +} \ No newline at end of file diff --git a/node_modules/@types/unist/LICENSE b/node_modules/@types/unist/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/node_modules/@types/unist/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/unist/README.md b/node_modules/@types/unist/README.md new file mode 100644 index 0000000..3beb9b3 --- /dev/null +++ b/node_modules/@types/unist/README.md @@ -0,0 +1,15 @@ +# Installation +> `npm install --save @types/unist` + +# Summary +This package contains type definitions for unist (https://github.com/syntax-tree/unist). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/unist. + +### Additional Details + * Last updated: Thu, 15 Aug 2024 02:18:53 GMT + * Dependencies: none + +# Credits +These definitions were written by [bizen241](https://github.com/bizen241), [Jun Lu](https://github.com/lujun2), [Hernan Rajchert](https://github.com/hrajchert), [Titus Wormer](https://github.com/wooorm), [Junyoung Choi](https://github.com/rokt33r), [Ben Moon](https://github.com/GuiltyDolphin), [JounQin](https://github.com/JounQin), and [Remco Haszing](https://github.com/remcohaszing). diff --git a/node_modules/@types/unist/index.d.ts b/node_modules/@types/unist/index.d.ts new file mode 100644 index 0000000..513ddee --- /dev/null +++ b/node_modules/@types/unist/index.d.ts @@ -0,0 +1,119 @@ +// ## Interfaces + +/** + * Info associated with nodes by the ecosystem. + * + * This space is guaranteed to never be specified by unist or specifications + * implementing unist. + * But you can use it in utilities and plugins to store data. + * + * This type can be augmented to register custom data. + * For example: + * + * ```ts + * declare module 'unist' { + * interface Data { + * // `someNode.data.myId` is typed as `number | undefined` + * myId?: number | undefined + * } + * } + * ``` + */ +export interface Data {} + +/** + * One place in a source file. + */ +export interface Point { + /** + * Line in a source file (1-indexed integer). + */ + line: number; + + /** + * Column in a source file (1-indexed integer). + */ + column: number; + /** + * Character in a source file (0-indexed integer). + */ + offset?: number | undefined; +} + +/** + * Position of a node in a source document. + * + * A position is a range between two points. + */ +export interface Position { + /** + * Place of the first character of the parsed source region. + */ + start: Point; + + /** + * Place of the first character after the parsed source region. + */ + end: Point; +} + +// ## Abstract nodes + +/** + * Abstract unist node that contains the smallest possible value. + * + * This interface is supposed to be extended. + * + * For example, in HTML, a `text` node is a leaf that contains text. + */ +export interface Literal extends Node { + /** + * Plain value. + */ + value: unknown; +} + +/** + * Abstract unist node. + * + * The syntactic unit in unist syntax trees are called nodes. + * + * This interface is supposed to be extended. + * If you can use {@link Literal} or {@link Parent}, you should. + * But for example in markdown, a `thematicBreak` (`***`), is neither literal + * nor parent, but still a node. + */ +export interface Node { + /** + * Node type. + */ + type: string; + + /** + * Info from the ecosystem. + */ + data?: Data | undefined; + + /** + * Position of a node in a source document. + * + * Nodes that are generated (not in the original source document) must not + * have a position. + */ + position?: Position | undefined; +} + +/** + * Abstract unist node that contains other nodes (*children*). + * + * This interface is supposed to be extended. + * + * For example, in XML, an element is a parent of different things, such as + * comments, text, and further elements. + */ +export interface Parent extends Node { + /** + * List of children. + */ + children: Node[]; +} diff --git a/node_modules/@types/unist/package.json b/node_modules/@types/unist/package.json new file mode 100644 index 0000000..d2092db --- /dev/null +++ b/node_modules/@types/unist/package.json @@ -0,0 +1,60 @@ +{ + "name": "@types/unist", + "version": "3.0.3", + "description": "TypeScript definitions for unist", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/unist", + "license": "MIT", + "contributors": [ + { + "name": "bizen241", + "githubUsername": "bizen241", + "url": "https://github.com/bizen241" + }, + { + "name": "Jun Lu", + "githubUsername": "lujun2", + "url": "https://github.com/lujun2" + }, + { + "name": "Hernan Rajchert", + "githubUsername": "hrajchert", + "url": "https://github.com/hrajchert" + }, + { + "name": "Titus Wormer", + "githubUsername": "wooorm", + "url": "https://github.com/wooorm" + }, + { + "name": "Junyoung Choi", + "githubUsername": "rokt33r", + "url": "https://github.com/rokt33r" + }, + { + "name": "Ben Moon", + "githubUsername": "GuiltyDolphin", + "url": "https://github.com/GuiltyDolphin" + }, + { + "name": "JounQin", + "githubUsername": "JounQin", + "url": "https://github.com/JounQin" + }, + { + "name": "Remco Haszing", + "githubUsername": "remcohaszing", + "url": "https://github.com/remcohaszing" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/unist" + }, + "scripts": {}, + "dependencies": {}, + "typesPublisherContentHash": "7f3d5ce8d56003f3583a5317f98d444bdc99910c7b486c6b10af4f38694e61fe", + "typeScriptVersion": "4.8" +} \ No newline at end of file diff --git a/node_modules/argparse/CHANGELOG.md b/node_modules/argparse/CHANGELOG.md new file mode 100644 index 0000000..dc39ed6 --- /dev/null +++ b/node_modules/argparse/CHANGELOG.md @@ -0,0 +1,216 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + + +## [2.0.1] - 2020-08-29 +### Fixed +- Fix issue with `process.argv` when used with interpreters (`coffee`, `ts-node`, etc.), #150. + + +## [2.0.0] - 2020-08-14 +### Changed +- Full rewrite. Now port from python 3.9.0 & more precise following. + See [doc](./doc) for difference and migration info. +- node.js 10+ required +- Removed most of local docs in favour of original ones. + + +## [1.0.10] - 2018-02-15 +### Fixed +- Use .concat instead of + for arrays, #122. + + +## [1.0.9] - 2016-09-29 +### Changed +- Rerelease after 1.0.8 - deps cleanup. + + +## [1.0.8] - 2016-09-29 +### Changed +- Maintenance (deps bump, fix node 6.5+ tests, coverage report). + + +## [1.0.7] - 2016-03-17 +### Changed +- Teach `addArgument` to accept string arg names. #97, @tomxtobin. + + +## [1.0.6] - 2016-02-06 +### Changed +- Maintenance: moved to eslint & updated CS. + + +## [1.0.5] - 2016-02-05 +### Changed +- Removed lodash dependency to significantly reduce install size. + Thanks to @mourner. + + +## [1.0.4] - 2016-01-17 +### Changed +- Maintenance: lodash update to 4.0.0. + + +## [1.0.3] - 2015-10-27 +### Fixed +- Fix parse `=` in args: `--examplepath="C:\myfolder\env=x64"`. #84, @CatWithApple. + + +## [1.0.2] - 2015-03-22 +### Changed +- Relaxed lodash version dependency. + + +## [1.0.1] - 2015-02-20 +### Changed +- Changed dependencies to be compatible with ancient nodejs. + + +## [1.0.0] - 2015-02-19 +### Changed +- Maintenance release. +- Replaced `underscore` with `lodash`. +- Bumped version to 1.0.0 to better reflect semver meaning. +- HISTORY.md -> CHANGELOG.md + + +## [0.1.16] - 2013-12-01 +### Changed +- Maintenance release. Updated dependencies and docs. + + +## [0.1.15] - 2013-05-13 +### Fixed +- Fixed #55, @trebor89 + + +## [0.1.14] - 2013-05-12 +### Fixed +- Fixed #62, @maxtaco + + +## [0.1.13] - 2013-04-08 +### Changed +- Added `.npmignore` to reduce package size + + +## [0.1.12] - 2013-02-10 +### Fixed +- Fixed conflictHandler (#46), @hpaulj + + +## [0.1.11] - 2013-02-07 +### Added +- Added 70+ tests (ported from python), @hpaulj +- Added conflictHandler, @applepicke +- Added fromfilePrefixChar, @hpaulj + +### Fixed +- Multiple bugfixes, @hpaulj + + +## [0.1.10] - 2012-12-30 +### Added +- Added [mutual exclusion](http://docs.python.org/dev/library/argparse.html#mutual-exclusion) + support, thanks to @hpaulj + +### Fixed +- Fixed options check for `storeConst` & `appendConst` actions, thanks to @hpaulj + + +## [0.1.9] - 2012-12-27 +### Fixed +- Fixed option dest interferens with other options (issue #23), thanks to @hpaulj +- Fixed default value behavior with `*` positionals, thanks to @hpaulj +- Improve `getDefault()` behavior, thanks to @hpaulj +- Improve negative argument parsing, thanks to @hpaulj + + +## [0.1.8] - 2012-12-01 +### Fixed +- Fixed parser parents (issue #19), thanks to @hpaulj +- Fixed negative argument parse (issue #20), thanks to @hpaulj + + +## [0.1.7] - 2012-10-14 +### Fixed +- Fixed 'choices' argument parse (issue #16) +- Fixed stderr output (issue #15) + + +## [0.1.6] - 2012-09-09 +### Fixed +- Fixed check for conflict of options (thanks to @tomxtobin) + + +## [0.1.5] - 2012-09-03 +### Fixed +- Fix parser #setDefaults method (thanks to @tomxtobin) + + +## [0.1.4] - 2012-07-30 +### Fixed +- Fixed pseudo-argument support (thanks to @CGamesPlay) +- Fixed addHelp default (should be true), if not set (thanks to @benblank) + + +## [0.1.3] - 2012-06-27 +### Fixed +- Fixed formatter api name: Formatter -> HelpFormatter + + +## [0.1.2] - 2012-05-29 +### Fixed +- Removed excess whitespace in help +- Fixed error reporting, when parcer with subcommands + called with empty arguments + +### Added +- Added basic tests + + +## [0.1.1] - 2012-05-23 +### Fixed +- Fixed line wrapping in help formatter +- Added better error reporting on invalid arguments + + +## [0.1.0] - 2012-05-16 +### Added +- First release. + + +[2.0.1]: https://github.com/nodeca/argparse/compare/2.0.0...2.0.1 +[2.0.0]: https://github.com/nodeca/argparse/compare/1.0.10...2.0.0 +[1.0.10]: https://github.com/nodeca/argparse/compare/1.0.9...1.0.10 +[1.0.9]: https://github.com/nodeca/argparse/compare/1.0.8...1.0.9 +[1.0.8]: https://github.com/nodeca/argparse/compare/1.0.7...1.0.8 +[1.0.7]: https://github.com/nodeca/argparse/compare/1.0.6...1.0.7 +[1.0.6]: https://github.com/nodeca/argparse/compare/1.0.5...1.0.6 +[1.0.5]: https://github.com/nodeca/argparse/compare/1.0.4...1.0.5 +[1.0.4]: https://github.com/nodeca/argparse/compare/1.0.3...1.0.4 +[1.0.3]: https://github.com/nodeca/argparse/compare/1.0.2...1.0.3 +[1.0.2]: https://github.com/nodeca/argparse/compare/1.0.1...1.0.2 +[1.0.1]: https://github.com/nodeca/argparse/compare/1.0.0...1.0.1 +[1.0.0]: https://github.com/nodeca/argparse/compare/0.1.16...1.0.0 +[0.1.16]: https://github.com/nodeca/argparse/compare/0.1.15...0.1.16 +[0.1.15]: https://github.com/nodeca/argparse/compare/0.1.14...0.1.15 +[0.1.14]: https://github.com/nodeca/argparse/compare/0.1.13...0.1.14 +[0.1.13]: https://github.com/nodeca/argparse/compare/0.1.12...0.1.13 +[0.1.12]: https://github.com/nodeca/argparse/compare/0.1.11...0.1.12 +[0.1.11]: https://github.com/nodeca/argparse/compare/0.1.10...0.1.11 +[0.1.10]: https://github.com/nodeca/argparse/compare/0.1.9...0.1.10 +[0.1.9]: https://github.com/nodeca/argparse/compare/0.1.8...0.1.9 +[0.1.8]: https://github.com/nodeca/argparse/compare/0.1.7...0.1.8 +[0.1.7]: https://github.com/nodeca/argparse/compare/0.1.6...0.1.7 +[0.1.6]: https://github.com/nodeca/argparse/compare/0.1.5...0.1.6 +[0.1.5]: https://github.com/nodeca/argparse/compare/0.1.4...0.1.5 +[0.1.4]: https://github.com/nodeca/argparse/compare/0.1.3...0.1.4 +[0.1.3]: https://github.com/nodeca/argparse/compare/0.1.2...0.1.3 +[0.1.2]: https://github.com/nodeca/argparse/compare/0.1.1...0.1.2 +[0.1.1]: https://github.com/nodeca/argparse/compare/0.1.0...0.1.1 +[0.1.0]: https://github.com/nodeca/argparse/releases/tag/0.1.0 diff --git a/node_modules/argparse/LICENSE b/node_modules/argparse/LICENSE new file mode 100644 index 0000000..66a3ac8 --- /dev/null +++ b/node_modules/argparse/LICENSE @@ -0,0 +1,254 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/argparse/README.md b/node_modules/argparse/README.md new file mode 100644 index 0000000..550b5c9 --- /dev/null +++ b/node_modules/argparse/README.md @@ -0,0 +1,84 @@ +argparse +======== + +[![Build Status](https://secure.travis-ci.org/nodeca/argparse.svg?branch=master)](http://travis-ci.org/nodeca/argparse) +[![NPM version](https://img.shields.io/npm/v/argparse.svg)](https://www.npmjs.org/package/argparse) + +CLI arguments parser for node.js, with [sub-commands](https://docs.python.org/3.9/library/argparse.html#sub-commands) support. Port of python's [argparse](http://docs.python.org/dev/library/argparse.html) (version [3.9.0](https://github.com/python/cpython/blob/v3.9.0rc1/Lib/argparse.py)). + +**Difference with original.** + +- JS has no keyword arguments support. + - Pass options instead: `new ArgumentParser({ description: 'example', add_help: true })`. +- JS has no python's types `int`, `float`, ... + - Use string-typed names: `.add_argument('-b', { type: 'int', help: 'help' })`. +- `%r` format specifier uses `require('util').inspect()`. + +More details in [doc](./doc). + + +Example +------- + +`test.js` file: + +```javascript +#!/usr/bin/env node +'use strict'; + +const { ArgumentParser } = require('argparse'); +const { version } = require('./package.json'); + +const parser = new ArgumentParser({ + description: 'Argparse example' +}); + +parser.add_argument('-v', '--version', { action: 'version', version }); +parser.add_argument('-f', '--foo', { help: 'foo bar' }); +parser.add_argument('-b', '--bar', { help: 'bar foo' }); +parser.add_argument('--baz', { help: 'baz bar' }); + +console.dir(parser.parse_args()); +``` + +Display help: + +``` +$ ./test.js -h +usage: test.js [-h] [-v] [-f FOO] [-b BAR] [--baz BAZ] + +Argparse example + +optional arguments: + -h, --help show this help message and exit + -v, --version show program's version number and exit + -f FOO, --foo FOO foo bar + -b BAR, --bar BAR bar foo + --baz BAZ baz bar +``` + +Parse arguments: + +``` +$ ./test.js -f=3 --bar=4 --baz 5 +{ foo: '3', bar: '4', baz: '5' } +``` + + +API docs +-------- + +Since this is a port with minimal divergence, there's no separate documentation. +Use original one instead, with notes about difference. + +1. [Original doc](https://docs.python.org/3.9/library/argparse.html). +2. [Original tutorial](https://docs.python.org/3.9/howto/argparse.html). +3. [Difference with python](./doc). + + +argparse for enterprise +----------------------- + +Available as part of the Tidelift Subscription + +The maintainers of argparse and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-argparse?utm_source=npm-argparse&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) diff --git a/node_modules/argparse/argparse.js b/node_modules/argparse/argparse.js new file mode 100644 index 0000000..2b8c8c6 --- /dev/null +++ b/node_modules/argparse/argparse.js @@ -0,0 +1,3707 @@ +// Port of python's argparse module, version 3.9.0: +// https://github.com/python/cpython/blob/v3.9.0rc1/Lib/argparse.py + +'use strict' + +// Copyright (C) 2010-2020 Python Software Foundation. +// Copyright (C) 2020 argparse.js authors + +/* + * Command-line parsing library + * + * This module is an optparse-inspired command-line parsing library that: + * + * - handles both optional and positional arguments + * - produces highly informative usage messages + * - supports parsers that dispatch to sub-parsers + * + * The following is a simple usage example that sums integers from the + * command-line and writes the result to a file:: + * + * parser = argparse.ArgumentParser( + * description='sum the integers at the command line') + * parser.add_argument( + * 'integers', metavar='int', nargs='+', type=int, + * help='an integer to be summed') + * parser.add_argument( + * '--log', default=sys.stdout, type=argparse.FileType('w'), + * help='the file where the sum should be written') + * args = parser.parse_args() + * args.log.write('%s' % sum(args.integers)) + * args.log.close() + * + * The module contains the following public classes: + * + * - ArgumentParser -- The main entry point for command-line parsing. As the + * example above shows, the add_argument() method is used to populate + * the parser with actions for optional and positional arguments. Then + * the parse_args() method is invoked to convert the args at the + * command-line into an object with attributes. + * + * - ArgumentError -- The exception raised by ArgumentParser objects when + * there are errors with the parser's actions. Errors raised while + * parsing the command-line are caught by ArgumentParser and emitted + * as command-line messages. + * + * - FileType -- A factory for defining types of files to be created. As the + * example above shows, instances of FileType are typically passed as + * the type= argument of add_argument() calls. + * + * - Action -- The base class for parser actions. Typically actions are + * selected by passing strings like 'store_true' or 'append_const' to + * the action= argument of add_argument(). However, for greater + * customization of ArgumentParser actions, subclasses of Action may + * be defined and passed as the action= argument. + * + * - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter, + * ArgumentDefaultsHelpFormatter -- Formatter classes which + * may be passed as the formatter_class= argument to the + * ArgumentParser constructor. HelpFormatter is the default, + * RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser + * not to change the formatting for help text, and + * ArgumentDefaultsHelpFormatter adds information about argument defaults + * to the help. + * + * All other classes in this module are considered implementation details. + * (Also note that HelpFormatter and RawDescriptionHelpFormatter are only + * considered public as object names -- the API of the formatter objects is + * still considered an implementation detail.) + */ + +const SUPPRESS = '==SUPPRESS==' + +const OPTIONAL = '?' +const ZERO_OR_MORE = '*' +const ONE_OR_MORE = '+' +const PARSER = 'A...' +const REMAINDER = '...' +const _UNRECOGNIZED_ARGS_ATTR = '_unrecognized_args' + + +// ================================== +// Utility functions used for porting +// ================================== +const assert = require('assert') +const util = require('util') +const fs = require('fs') +const sub = require('./lib/sub') +const path = require('path') +const repr = util.inspect + +function get_argv() { + // omit first argument (which is assumed to be interpreter - `node`, `coffee`, `ts-node`, etc.) + return process.argv.slice(1) +} + +function get_terminal_size() { + return { + columns: +process.env.COLUMNS || process.stdout.columns || 80 + } +} + +function hasattr(object, name) { + return Object.prototype.hasOwnProperty.call(object, name) +} + +function getattr(object, name, value) { + return hasattr(object, name) ? object[name] : value +} + +function setattr(object, name, value) { + object[name] = value +} + +function setdefault(object, name, value) { + if (!hasattr(object, name)) object[name] = value + return object[name] +} + +function delattr(object, name) { + delete object[name] +} + +function range(from, to, step=1) { + // range(10) is equivalent to range(0, 10) + if (arguments.length === 1) [ to, from ] = [ from, 0 ] + if (typeof from !== 'number' || typeof to !== 'number' || typeof step !== 'number') { + throw new TypeError('argument cannot be interpreted as an integer') + } + if (step === 0) throw new TypeError('range() arg 3 must not be zero') + + let result = [] + if (step > 0) { + for (let i = from; i < to; i += step) result.push(i) + } else { + for (let i = from; i > to; i += step) result.push(i) + } + return result +} + +function splitlines(str, keepends = false) { + let result + if (!keepends) { + result = str.split(/\r\n|[\n\r\v\f\x1c\x1d\x1e\x85\u2028\u2029]/) + } else { + result = [] + let parts = str.split(/(\r\n|[\n\r\v\f\x1c\x1d\x1e\x85\u2028\u2029])/) + for (let i = 0; i < parts.length; i += 2) { + result.push(parts[i] + (i + 1 < parts.length ? parts[i + 1] : '')) + } + } + if (!result[result.length - 1]) result.pop() + return result +} + +function _string_lstrip(string, prefix_chars) { + let idx = 0 + while (idx < string.length && prefix_chars.includes(string[idx])) idx++ + return idx ? string.slice(idx) : string +} + +function _string_split(string, sep, maxsplit) { + let result = string.split(sep) + if (result.length > maxsplit) { + result = result.slice(0, maxsplit).concat([ result.slice(maxsplit).join(sep) ]) + } + return result +} + +function _array_equal(array1, array2) { + if (array1.length !== array2.length) return false + for (let i = 0; i < array1.length; i++) { + if (array1[i] !== array2[i]) return false + } + return true +} + +function _array_remove(array, item) { + let idx = array.indexOf(item) + if (idx === -1) throw new TypeError(sub('%r not in list', item)) + array.splice(idx, 1) +} + +// normalize choices to array; +// this isn't required in python because `in` and `map` operators work with anything, +// but in js dealing with multiple types here is too clunky +function _choices_to_array(choices) { + if (choices === undefined) { + return [] + } else if (Array.isArray(choices)) { + return choices + } else if (choices !== null && typeof choices[Symbol.iterator] === 'function') { + return Array.from(choices) + } else if (typeof choices === 'object' && choices !== null) { + return Object.keys(choices) + } else { + throw new Error(sub('invalid choices value: %r', choices)) + } +} + +// decorator that allows a class to be called without new +function _callable(cls) { + let result = { // object is needed for inferred class name + [cls.name]: function (...args) { + let this_class = new.target === result || !new.target + return Reflect.construct(cls, args, this_class ? cls : new.target) + } + } + result[cls.name].prototype = cls.prototype + // fix default tag for toString, e.g. [object Action] instead of [object Object] + cls.prototype[Symbol.toStringTag] = cls.name + return result[cls.name] +} + +function _alias(object, from, to) { + try { + let name = object.constructor.name + Object.defineProperty(object, from, { + value: util.deprecate(object[to], sub('%s.%s() is renamed to %s.%s()', + name, from, name, to)), + enumerable: false + }) + } catch {} +} + +// decorator that allows snake_case class methods to be called with camelCase and vice versa +function _camelcase_alias(_class) { + for (let name of Object.getOwnPropertyNames(_class.prototype)) { + let camelcase = name.replace(/\w_[a-z]/g, s => s[0] + s[2].toUpperCase()) + if (camelcase !== name) _alias(_class.prototype, camelcase, name) + } + return _class +} + +function _to_legacy_name(key) { + key = key.replace(/\w_[a-z]/g, s => s[0] + s[2].toUpperCase()) + if (key === 'default') key = 'defaultValue' + if (key === 'const') key = 'constant' + return key +} + +function _to_new_name(key) { + if (key === 'defaultValue') key = 'default' + if (key === 'constant') key = 'const' + key = key.replace(/[A-Z]/g, c => '_' + c.toLowerCase()) + return key +} + +// parse options +let no_default = Symbol('no_default_value') +function _parse_opts(args, descriptor) { + function get_name() { + let stack = new Error().stack.split('\n') + .map(x => x.match(/^ at (.*) \(.*\)$/)) + .filter(Boolean) + .map(m => m[1]) + .map(fn => fn.match(/[^ .]*$/)[0]) + + if (stack.length && stack[0] === get_name.name) stack.shift() + if (stack.length && stack[0] === _parse_opts.name) stack.shift() + return stack.length ? stack[0] : '' + } + + args = Array.from(args) + let kwargs = {} + let result = [] + let last_opt = args.length && args[args.length - 1] + + if (typeof last_opt === 'object' && last_opt !== null && !Array.isArray(last_opt) && + (!last_opt.constructor || last_opt.constructor.name === 'Object')) { + kwargs = Object.assign({}, args.pop()) + } + + // LEGACY (v1 compatibility): camelcase + let renames = [] + for (let key of Object.keys(descriptor)) { + let old_name = _to_legacy_name(key) + if (old_name !== key && (old_name in kwargs)) { + if (key in kwargs) { + // default and defaultValue specified at the same time, happens often in old tests + //throw new TypeError(sub('%s() got multiple values for argument %r', get_name(), key)) + } else { + kwargs[key] = kwargs[old_name] + } + renames.push([ old_name, key ]) + delete kwargs[old_name] + } + } + if (renames.length) { + let name = get_name() + deprecate('camelcase_' + name, sub('%s(): following options are renamed: %s', + name, renames.map(([ a, b ]) => sub('%r -> %r', a, b)))) + } + // end + + let missing_positionals = [] + let positional_count = args.length + + for (let [ key, def ] of Object.entries(descriptor)) { + if (key[0] === '*') { + if (key.length > 0 && key[1] === '*') { + // LEGACY (v1 compatibility): camelcase + let renames = [] + for (let key of Object.keys(kwargs)) { + let new_name = _to_new_name(key) + if (new_name !== key && (key in kwargs)) { + if (new_name in kwargs) { + // default and defaultValue specified at the same time, happens often in old tests + //throw new TypeError(sub('%s() got multiple values for argument %r', get_name(), new_name)) + } else { + kwargs[new_name] = kwargs[key] + } + renames.push([ key, new_name ]) + delete kwargs[key] + } + } + if (renames.length) { + let name = get_name() + deprecate('camelcase_' + name, sub('%s(): following options are renamed: %s', + name, renames.map(([ a, b ]) => sub('%r -> %r', a, b)))) + } + // end + result.push(kwargs) + kwargs = {} + } else { + result.push(args) + args = [] + } + } else if (key in kwargs && args.length > 0) { + throw new TypeError(sub('%s() got multiple values for argument %r', get_name(), key)) + } else if (key in kwargs) { + result.push(kwargs[key]) + delete kwargs[key] + } else if (args.length > 0) { + result.push(args.shift()) + } else if (def !== no_default) { + result.push(def) + } else { + missing_positionals.push(key) + } + } + + if (Object.keys(kwargs).length) { + throw new TypeError(sub('%s() got an unexpected keyword argument %r', + get_name(), Object.keys(kwargs)[0])) + } + + if (args.length) { + let from = Object.entries(descriptor).filter(([ k, v ]) => k[0] !== '*' && v !== no_default).length + let to = Object.entries(descriptor).filter(([ k ]) => k[0] !== '*').length + throw new TypeError(sub('%s() takes %s positional argument%s but %s %s given', + get_name(), + from === to ? sub('from %s to %s', from, to) : to, + from === to && to === 1 ? '' : 's', + positional_count, + positional_count === 1 ? 'was' : 'were')) + } + + if (missing_positionals.length) { + let strs = missing_positionals.map(repr) + if (strs.length > 1) strs[strs.length - 1] = 'and ' + strs[strs.length - 1] + let str_joined = strs.join(strs.length === 2 ? '' : ', ') + throw new TypeError(sub('%s() missing %i required positional argument%s: %s', + get_name(), strs.length, strs.length === 1 ? '' : 's', str_joined)) + } + + return result +} + +let _deprecations = {} +function deprecate(id, string) { + _deprecations[id] = _deprecations[id] || util.deprecate(() => {}, string) + _deprecations[id]() +} + + +// ============================= +// Utility functions and classes +// ============================= +function _AttributeHolder(cls = Object) { + /* + * Abstract base class that provides __repr__. + * + * The __repr__ method returns a string in the format:: + * ClassName(attr=name, attr=name, ...) + * The attributes are determined either by a class-level attribute, + * '_kwarg_names', or by inspecting the instance __dict__. + */ + + return class _AttributeHolder extends cls { + [util.inspect.custom]() { + let type_name = this.constructor.name + let arg_strings = [] + let star_args = {} + for (let arg of this._get_args()) { + arg_strings.push(repr(arg)) + } + for (let [ name, value ] of this._get_kwargs()) { + if (/^[a-z_][a-z0-9_$]*$/i.test(name)) { + arg_strings.push(sub('%s=%r', name, value)) + } else { + star_args[name] = value + } + } + if (Object.keys(star_args).length) { + arg_strings.push(sub('**%s', repr(star_args))) + } + return sub('%s(%s)', type_name, arg_strings.join(', ')) + } + + toString() { + return this[util.inspect.custom]() + } + + _get_kwargs() { + return Object.entries(this) + } + + _get_args() { + return [] + } + } +} + + +function _copy_items(items) { + if (items === undefined) { + return [] + } + return items.slice(0) +} + + +// =============== +// Formatting Help +// =============== +const HelpFormatter = _camelcase_alias(_callable(class HelpFormatter { + /* + * Formatter for generating usage messages and argument help strings. + * + * Only the name of this class is considered a public API. All the methods + * provided by the class are considered an implementation detail. + */ + + constructor() { + let [ + prog, + indent_increment, + max_help_position, + width + ] = _parse_opts(arguments, { + prog: no_default, + indent_increment: 2, + max_help_position: 24, + width: undefined + }) + + // default setting for width + if (width === undefined) { + width = get_terminal_size().columns + width -= 2 + } + + this._prog = prog + this._indent_increment = indent_increment + this._max_help_position = Math.min(max_help_position, + Math.max(width - 20, indent_increment * 2)) + this._width = width + + this._current_indent = 0 + this._level = 0 + this._action_max_length = 0 + + this._root_section = this._Section(this, undefined) + this._current_section = this._root_section + + this._whitespace_matcher = /[ \t\n\r\f\v]+/g // equivalent to python /\s+/ with ASCII flag + this._long_break_matcher = /\n\n\n+/g + } + + // =============================== + // Section and indentation methods + // =============================== + _indent() { + this._current_indent += this._indent_increment + this._level += 1 + } + + _dedent() { + this._current_indent -= this._indent_increment + assert(this._current_indent >= 0, 'Indent decreased below 0.') + this._level -= 1 + } + + _add_item(func, args) { + this._current_section.items.push([ func, args ]) + } + + // ======================== + // Message building methods + // ======================== + start_section(heading) { + this._indent() + let section = this._Section(this, this._current_section, heading) + this._add_item(section.format_help.bind(section), []) + this._current_section = section + } + + end_section() { + this._current_section = this._current_section.parent + this._dedent() + } + + add_text(text) { + if (text !== SUPPRESS && text !== undefined) { + this._add_item(this._format_text.bind(this), [text]) + } + } + + add_usage(usage, actions, groups, prefix = undefined) { + if (usage !== SUPPRESS) { + let args = [ usage, actions, groups, prefix ] + this._add_item(this._format_usage.bind(this), args) + } + } + + add_argument(action) { + if (action.help !== SUPPRESS) { + + // find all invocations + let invocations = [this._format_action_invocation(action)] + for (let subaction of this._iter_indented_subactions(action)) { + invocations.push(this._format_action_invocation(subaction)) + } + + // update the maximum item length + let invocation_length = Math.max(...invocations.map(invocation => invocation.length)) + let action_length = invocation_length + this._current_indent + this._action_max_length = Math.max(this._action_max_length, + action_length) + + // add the item to the list + this._add_item(this._format_action.bind(this), [action]) + } + } + + add_arguments(actions) { + for (let action of actions) { + this.add_argument(action) + } + } + + // ======================= + // Help-formatting methods + // ======================= + format_help() { + let help = this._root_section.format_help() + if (help) { + help = help.replace(this._long_break_matcher, '\n\n') + help = help.replace(/^\n+|\n+$/g, '') + '\n' + } + return help + } + + _join_parts(part_strings) { + return part_strings.filter(part => part && part !== SUPPRESS).join('') + } + + _format_usage(usage, actions, groups, prefix) { + if (prefix === undefined) { + prefix = 'usage: ' + } + + // if usage is specified, use that + if (usage !== undefined) { + usage = sub(usage, { prog: this._prog }) + + // if no optionals or positionals are available, usage is just prog + } else if (usage === undefined && !actions.length) { + usage = sub('%(prog)s', { prog: this._prog }) + + // if optionals and positionals are available, calculate usage + } else if (usage === undefined) { + let prog = sub('%(prog)s', { prog: this._prog }) + + // split optionals from positionals + let optionals = [] + let positionals = [] + for (let action of actions) { + if (action.option_strings.length) { + optionals.push(action) + } else { + positionals.push(action) + } + } + + // build full usage string + let action_usage = this._format_actions_usage([].concat(optionals).concat(positionals), groups) + usage = [ prog, action_usage ].map(String).join(' ') + + // wrap the usage parts if it's too long + let text_width = this._width - this._current_indent + if (prefix.length + usage.length > text_width) { + + // break usage into wrappable parts + let part_regexp = /\(.*?\)+(?=\s|$)|\[.*?\]+(?=\s|$)|\S+/g + let opt_usage = this._format_actions_usage(optionals, groups) + let pos_usage = this._format_actions_usage(positionals, groups) + let opt_parts = opt_usage.match(part_regexp) || [] + let pos_parts = pos_usage.match(part_regexp) || [] + assert(opt_parts.join(' ') === opt_usage) + assert(pos_parts.join(' ') === pos_usage) + + // helper for wrapping lines + let get_lines = (parts, indent, prefix = undefined) => { + let lines = [] + let line = [] + let line_len + if (prefix !== undefined) { + line_len = prefix.length - 1 + } else { + line_len = indent.length - 1 + } + for (let part of parts) { + if (line_len + 1 + part.length > text_width && line) { + lines.push(indent + line.join(' ')) + line = [] + line_len = indent.length - 1 + } + line.push(part) + line_len += part.length + 1 + } + if (line.length) { + lines.push(indent + line.join(' ')) + } + if (prefix !== undefined) { + lines[0] = lines[0].slice(indent.length) + } + return lines + } + + let lines + + // if prog is short, follow it with optionals or positionals + if (prefix.length + prog.length <= 0.75 * text_width) { + let indent = ' '.repeat(prefix.length + prog.length + 1) + if (opt_parts.length) { + lines = get_lines([prog].concat(opt_parts), indent, prefix) + lines = lines.concat(get_lines(pos_parts, indent)) + } else if (pos_parts.length) { + lines = get_lines([prog].concat(pos_parts), indent, prefix) + } else { + lines = [prog] + } + + // if prog is long, put it on its own line + } else { + let indent = ' '.repeat(prefix.length) + let parts = [].concat(opt_parts).concat(pos_parts) + lines = get_lines(parts, indent) + if (lines.length > 1) { + lines = [] + lines = lines.concat(get_lines(opt_parts, indent)) + lines = lines.concat(get_lines(pos_parts, indent)) + } + lines = [prog].concat(lines) + } + + // join lines into usage + usage = lines.join('\n') + } + } + + // prefix with 'usage:' + return sub('%s%s\n\n', prefix, usage) + } + + _format_actions_usage(actions, groups) { + // find group indices and identify actions in groups + let group_actions = new Set() + let inserts = {} + for (let group of groups) { + let start = actions.indexOf(group._group_actions[0]) + if (start === -1) { + continue + } else { + let end = start + group._group_actions.length + if (_array_equal(actions.slice(start, end), group._group_actions)) { + for (let action of group._group_actions) { + group_actions.add(action) + } + if (!group.required) { + if (start in inserts) { + inserts[start] += ' [' + } else { + inserts[start] = '[' + } + if (end in inserts) { + inserts[end] += ']' + } else { + inserts[end] = ']' + } + } else { + if (start in inserts) { + inserts[start] += ' (' + } else { + inserts[start] = '(' + } + if (end in inserts) { + inserts[end] += ')' + } else { + inserts[end] = ')' + } + } + for (let i of range(start + 1, end)) { + inserts[i] = '|' + } + } + } + } + + // collect all actions format strings + let parts = [] + for (let [ i, action ] of Object.entries(actions)) { + + // suppressed arguments are marked with None + // remove | separators for suppressed arguments + if (action.help === SUPPRESS) { + parts.push(undefined) + if (inserts[+i] === '|') { + delete inserts[+i] + } else if (inserts[+i + 1] === '|') { + delete inserts[+i + 1] + } + + // produce all arg strings + } else if (!action.option_strings.length) { + let default_value = this._get_default_metavar_for_positional(action) + let part = this._format_args(action, default_value) + + // if it's in a group, strip the outer [] + if (group_actions.has(action)) { + if (part[0] === '[' && part[part.length - 1] === ']') { + part = part.slice(1, -1) + } + } + + // add the action string to the list + parts.push(part) + + // produce the first way to invoke the option in brackets + } else { + let option_string = action.option_strings[0] + let part + + // if the Optional doesn't take a value, format is: + // -s or --long + if (action.nargs === 0) { + part = action.format_usage() + + // if the Optional takes a value, format is: + // -s ARGS or --long ARGS + } else { + let default_value = this._get_default_metavar_for_optional(action) + let args_string = this._format_args(action, default_value) + part = sub('%s %s', option_string, args_string) + } + + // make it look optional if it's not required or in a group + if (!action.required && !group_actions.has(action)) { + part = sub('[%s]', part) + } + + // add the action string to the list + parts.push(part) + } + } + + // insert things at the necessary indices + for (let i of Object.keys(inserts).map(Number).sort((a, b) => b - a)) { + parts.splice(+i, 0, inserts[+i]) + } + + // join all the action items with spaces + let text = parts.filter(Boolean).join(' ') + + // clean up separators for mutually exclusive groups + text = text.replace(/([\[(]) /g, '$1') + text = text.replace(/ ([\])])/g, '$1') + text = text.replace(/[\[(] *[\])]/g, '') + text = text.replace(/\(([^|]*)\)/g, '$1', text) + text = text.trim() + + // return the text + return text + } + + _format_text(text) { + if (text.includes('%(prog)')) { + text = sub(text, { prog: this._prog }) + } + let text_width = Math.max(this._width - this._current_indent, 11) + let indent = ' '.repeat(this._current_indent) + return this._fill_text(text, text_width, indent) + '\n\n' + } + + _format_action(action) { + // determine the required width and the entry label + let help_position = Math.min(this._action_max_length + 2, + this._max_help_position) + let help_width = Math.max(this._width - help_position, 11) + let action_width = help_position - this._current_indent - 2 + let action_header = this._format_action_invocation(action) + let indent_first + + // no help; start on same line and add a final newline + if (!action.help) { + let tup = [ this._current_indent, '', action_header ] + action_header = sub('%*s%s\n', ...tup) + + // short action name; start on the same line and pad two spaces + } else if (action_header.length <= action_width) { + let tup = [ this._current_indent, '', action_width, action_header ] + action_header = sub('%*s%-*s ', ...tup) + indent_first = 0 + + // long action name; start on the next line + } else { + let tup = [ this._current_indent, '', action_header ] + action_header = sub('%*s%s\n', ...tup) + indent_first = help_position + } + + // collect the pieces of the action help + let parts = [action_header] + + // if there was help for the action, add lines of help text + if (action.help) { + let help_text = this._expand_help(action) + let help_lines = this._split_lines(help_text, help_width) + parts.push(sub('%*s%s\n', indent_first, '', help_lines[0])) + for (let line of help_lines.slice(1)) { + parts.push(sub('%*s%s\n', help_position, '', line)) + } + + // or add a newline if the description doesn't end with one + } else if (!action_header.endsWith('\n')) { + parts.push('\n') + } + + // if there are any sub-actions, add their help as well + for (let subaction of this._iter_indented_subactions(action)) { + parts.push(this._format_action(subaction)) + } + + // return a single string + return this._join_parts(parts) + } + + _format_action_invocation(action) { + if (!action.option_strings.length) { + let default_value = this._get_default_metavar_for_positional(action) + let metavar = this._metavar_formatter(action, default_value)(1)[0] + return metavar + + } else { + let parts = [] + + // if the Optional doesn't take a value, format is: + // -s, --long + if (action.nargs === 0) { + parts = parts.concat(action.option_strings) + + // if the Optional takes a value, format is: + // -s ARGS, --long ARGS + } else { + let default_value = this._get_default_metavar_for_optional(action) + let args_string = this._format_args(action, default_value) + for (let option_string of action.option_strings) { + parts.push(sub('%s %s', option_string, args_string)) + } + } + + return parts.join(', ') + } + } + + _metavar_formatter(action, default_metavar) { + let result + if (action.metavar !== undefined) { + result = action.metavar + } else if (action.choices !== undefined) { + let choice_strs = _choices_to_array(action.choices).map(String) + result = sub('{%s}', choice_strs.join(',')) + } else { + result = default_metavar + } + + function format(tuple_size) { + if (Array.isArray(result)) { + return result + } else { + return Array(tuple_size).fill(result) + } + } + return format + } + + _format_args(action, default_metavar) { + let get_metavar = this._metavar_formatter(action, default_metavar) + let result + if (action.nargs === undefined) { + result = sub('%s', ...get_metavar(1)) + } else if (action.nargs === OPTIONAL) { + result = sub('[%s]', ...get_metavar(1)) + } else if (action.nargs === ZERO_OR_MORE) { + let metavar = get_metavar(1) + if (metavar.length === 2) { + result = sub('[%s [%s ...]]', ...metavar) + } else { + result = sub('[%s ...]', ...metavar) + } + } else if (action.nargs === ONE_OR_MORE) { + result = sub('%s [%s ...]', ...get_metavar(2)) + } else if (action.nargs === REMAINDER) { + result = '...' + } else if (action.nargs === PARSER) { + result = sub('%s ...', ...get_metavar(1)) + } else if (action.nargs === SUPPRESS) { + result = '' + } else { + let formats + try { + formats = range(action.nargs).map(() => '%s') + } catch (err) { + throw new TypeError('invalid nargs value') + } + result = sub(formats.join(' '), ...get_metavar(action.nargs)) + } + return result + } + + _expand_help(action) { + let params = Object.assign({ prog: this._prog }, action) + for (let name of Object.keys(params)) { + if (params[name] === SUPPRESS) { + delete params[name] + } + } + for (let name of Object.keys(params)) { + if (params[name] && params[name].name) { + params[name] = params[name].name + } + } + if (params.choices !== undefined) { + let choices_str = _choices_to_array(params.choices).map(String).join(', ') + params.choices = choices_str + } + // LEGACY (v1 compatibility): camelcase + for (let key of Object.keys(params)) { + let old_name = _to_legacy_name(key) + if (old_name !== key) { + params[old_name] = params[key] + } + } + // end + return sub(this._get_help_string(action), params) + } + + * _iter_indented_subactions(action) { + if (typeof action._get_subactions === 'function') { + this._indent() + yield* action._get_subactions() + this._dedent() + } + } + + _split_lines(text, width) { + text = text.replace(this._whitespace_matcher, ' ').trim() + // The textwrap module is used only for formatting help. + // Delay its import for speeding up the common usage of argparse. + let textwrap = require('./lib/textwrap') + return textwrap.wrap(text, { width }) + } + + _fill_text(text, width, indent) { + text = text.replace(this._whitespace_matcher, ' ').trim() + let textwrap = require('./lib/textwrap') + return textwrap.fill(text, { width, + initial_indent: indent, + subsequent_indent: indent }) + } + + _get_help_string(action) { + return action.help + } + + _get_default_metavar_for_optional(action) { + return action.dest.toUpperCase() + } + + _get_default_metavar_for_positional(action) { + return action.dest + } +})) + +HelpFormatter.prototype._Section = _callable(class _Section { + + constructor(formatter, parent, heading = undefined) { + this.formatter = formatter + this.parent = parent + this.heading = heading + this.items = [] + } + + format_help() { + // format the indented section + if (this.parent !== undefined) { + this.formatter._indent() + } + let item_help = this.formatter._join_parts(this.items.map(([ func, args ]) => func.apply(null, args))) + if (this.parent !== undefined) { + this.formatter._dedent() + } + + // return nothing if the section was empty + if (!item_help) { + return '' + } + + // add the heading if the section was non-empty + let heading + if (this.heading !== SUPPRESS && this.heading !== undefined) { + let current_indent = this.formatter._current_indent + heading = sub('%*s%s:\n', current_indent, '', this.heading) + } else { + heading = '' + } + + // join the section-initial newline, the heading and the help + return this.formatter._join_parts(['\n', heading, item_help, '\n']) + } +}) + + +const RawDescriptionHelpFormatter = _camelcase_alias(_callable(class RawDescriptionHelpFormatter extends HelpFormatter { + /* + * Help message formatter which retains any formatting in descriptions. + * + * Only the name of this class is considered a public API. All the methods + * provided by the class are considered an implementation detail. + */ + + _fill_text(text, width, indent) { + return splitlines(text, true).map(line => indent + line).join('') + } +})) + + +const RawTextHelpFormatter = _camelcase_alias(_callable(class RawTextHelpFormatter extends RawDescriptionHelpFormatter { + /* + * Help message formatter which retains formatting of all help text. + * + * Only the name of this class is considered a public API. All the methods + * provided by the class are considered an implementation detail. + */ + + _split_lines(text/*, width*/) { + return splitlines(text) + } +})) + + +const ArgumentDefaultsHelpFormatter = _camelcase_alias(_callable(class ArgumentDefaultsHelpFormatter extends HelpFormatter { + /* + * Help message formatter which adds default values to argument help. + * + * Only the name of this class is considered a public API. All the methods + * provided by the class are considered an implementation detail. + */ + + _get_help_string(action) { + let help = action.help + // LEGACY (v1 compatibility): additional check for defaultValue needed + if (!action.help.includes('%(default)') && !action.help.includes('%(defaultValue)')) { + if (action.default !== SUPPRESS) { + let defaulting_nargs = [OPTIONAL, ZERO_OR_MORE] + if (action.option_strings.length || defaulting_nargs.includes(action.nargs)) { + help += ' (default: %(default)s)' + } + } + } + return help + } +})) + + +const MetavarTypeHelpFormatter = _camelcase_alias(_callable(class MetavarTypeHelpFormatter extends HelpFormatter { + /* + * Help message formatter which uses the argument 'type' as the default + * metavar value (instead of the argument 'dest') + * + * Only the name of this class is considered a public API. All the methods + * provided by the class are considered an implementation detail. + */ + + _get_default_metavar_for_optional(action) { + return typeof action.type === 'function' ? action.type.name : action.type + } + + _get_default_metavar_for_positional(action) { + return typeof action.type === 'function' ? action.type.name : action.type + } +})) + + +// ===================== +// Options and Arguments +// ===================== +function _get_action_name(argument) { + if (argument === undefined) { + return undefined + } else if (argument.option_strings.length) { + return argument.option_strings.join('/') + } else if (![ undefined, SUPPRESS ].includes(argument.metavar)) { + return argument.metavar + } else if (![ undefined, SUPPRESS ].includes(argument.dest)) { + return argument.dest + } else { + return undefined + } +} + + +const ArgumentError = _callable(class ArgumentError extends Error { + /* + * An error from creating or using an argument (optional or positional). + * + * The string value of this exception is the message, augmented with + * information about the argument that caused it. + */ + + constructor(argument, message) { + super() + this.name = 'ArgumentError' + this._argument_name = _get_action_name(argument) + this._message = message + this.message = this.str() + } + + str() { + let format + if (this._argument_name === undefined) { + format = '%(message)s' + } else { + format = 'argument %(argument_name)s: %(message)s' + } + return sub(format, { message: this._message, + argument_name: this._argument_name }) + } +}) + + +const ArgumentTypeError = _callable(class ArgumentTypeError extends Error { + /* + * An error from trying to convert a command line string to a type. + */ + + constructor(message) { + super(message) + this.name = 'ArgumentTypeError' + } +}) + + +// ============== +// Action classes +// ============== +const Action = _camelcase_alias(_callable(class Action extends _AttributeHolder(Function) { + /* + * Information about how to convert command line strings to Python objects. + * + * Action objects are used by an ArgumentParser to represent the information + * needed to parse a single argument from one or more strings from the + * command line. The keyword arguments to the Action constructor are also + * all attributes of Action instances. + * + * Keyword Arguments: + * + * - option_strings -- A list of command-line option strings which + * should be associated with this action. + * + * - dest -- The name of the attribute to hold the created object(s) + * + * - nargs -- The number of command-line arguments that should be + * consumed. By default, one argument will be consumed and a single + * value will be produced. Other values include: + * - N (an integer) consumes N arguments (and produces a list) + * - '?' consumes zero or one arguments + * - '*' consumes zero or more arguments (and produces a list) + * - '+' consumes one or more arguments (and produces a list) + * Note that the difference between the default and nargs=1 is that + * with the default, a single value will be produced, while with + * nargs=1, a list containing a single value will be produced. + * + * - const -- The value to be produced if the option is specified and the + * option uses an action that takes no values. + * + * - default -- The value to be produced if the option is not specified. + * + * - type -- A callable that accepts a single string argument, and + * returns the converted value. The standard Python types str, int, + * float, and complex are useful examples of such callables. If None, + * str is used. + * + * - choices -- A container of values that should be allowed. If not None, + * after a command-line argument has been converted to the appropriate + * type, an exception will be raised if it is not a member of this + * collection. + * + * - required -- True if the action must always be specified at the + * command line. This is only meaningful for optional command-line + * arguments. + * + * - help -- The help string describing the argument. + * + * - metavar -- The name to be used for the option's argument with the + * help string. If None, the 'dest' value will be used as the name. + */ + + constructor() { + let [ + option_strings, + dest, + nargs, + const_value, + default_value, + type, + choices, + required, + help, + metavar + ] = _parse_opts(arguments, { + option_strings: no_default, + dest: no_default, + nargs: undefined, + const: undefined, + default: undefined, + type: undefined, + choices: undefined, + required: false, + help: undefined, + metavar: undefined + }) + + // when this class is called as a function, redirect it to .call() method of itself + super('return arguments.callee.call.apply(arguments.callee, arguments)') + + this.option_strings = option_strings + this.dest = dest + this.nargs = nargs + this.const = const_value + this.default = default_value + this.type = type + this.choices = choices + this.required = required + this.help = help + this.metavar = metavar + } + + _get_kwargs() { + let names = [ + 'option_strings', + 'dest', + 'nargs', + 'const', + 'default', + 'type', + 'choices', + 'help', + 'metavar' + ] + return names.map(name => [ name, getattr(this, name) ]) + } + + format_usage() { + return this.option_strings[0] + } + + call(/*parser, namespace, values, option_string = undefined*/) { + throw new Error('.call() not defined') + } +})) + + +const BooleanOptionalAction = _camelcase_alias(_callable(class BooleanOptionalAction extends Action { + + constructor() { + let [ + option_strings, + dest, + default_value, + type, + choices, + required, + help, + metavar + ] = _parse_opts(arguments, { + option_strings: no_default, + dest: no_default, + default: undefined, + type: undefined, + choices: undefined, + required: false, + help: undefined, + metavar: undefined + }) + + let _option_strings = [] + for (let option_string of option_strings) { + _option_strings.push(option_string) + + if (option_string.startsWith('--')) { + option_string = '--no-' + option_string.slice(2) + _option_strings.push(option_string) + } + } + + if (help !== undefined && default_value !== undefined) { + help += ` (default: ${default_value})` + } + + super({ + option_strings: _option_strings, + dest, + nargs: 0, + default: default_value, + type, + choices, + required, + help, + metavar + }) + } + + call(parser, namespace, values, option_string = undefined) { + if (this.option_strings.includes(option_string)) { + setattr(namespace, this.dest, !option_string.startsWith('--no-')) + } + } + + format_usage() { + return this.option_strings.join(' | ') + } +})) + + +const _StoreAction = _callable(class _StoreAction extends Action { + + constructor() { + let [ + option_strings, + dest, + nargs, + const_value, + default_value, + type, + choices, + required, + help, + metavar + ] = _parse_opts(arguments, { + option_strings: no_default, + dest: no_default, + nargs: undefined, + const: undefined, + default: undefined, + type: undefined, + choices: undefined, + required: false, + help: undefined, + metavar: undefined + }) + + if (nargs === 0) { + throw new TypeError('nargs for store actions must be != 0; if you ' + + 'have nothing to store, actions such as store ' + + 'true or store const may be more appropriate') + } + if (const_value !== undefined && nargs !== OPTIONAL) { + throw new TypeError(sub('nargs must be %r to supply const', OPTIONAL)) + } + super({ + option_strings, + dest, + nargs, + const: const_value, + default: default_value, + type, + choices, + required, + help, + metavar + }) + } + + call(parser, namespace, values/*, option_string = undefined*/) { + setattr(namespace, this.dest, values) + } +}) + + +const _StoreConstAction = _callable(class _StoreConstAction extends Action { + + constructor() { + let [ + option_strings, + dest, + const_value, + default_value, + required, + help + //, metavar + ] = _parse_opts(arguments, { + option_strings: no_default, + dest: no_default, + const: no_default, + default: undefined, + required: false, + help: undefined, + metavar: undefined + }) + + super({ + option_strings, + dest, + nargs: 0, + const: const_value, + default: default_value, + required, + help + }) + } + + call(parser, namespace/*, values, option_string = undefined*/) { + setattr(namespace, this.dest, this.const) + } +}) + + +const _StoreTrueAction = _callable(class _StoreTrueAction extends _StoreConstAction { + + constructor() { + let [ + option_strings, + dest, + default_value, + required, + help + ] = _parse_opts(arguments, { + option_strings: no_default, + dest: no_default, + default: false, + required: false, + help: undefined + }) + + super({ + option_strings, + dest, + const: true, + default: default_value, + required, + help + }) + } +}) + + +const _StoreFalseAction = _callable(class _StoreFalseAction extends _StoreConstAction { + + constructor() { + let [ + option_strings, + dest, + default_value, + required, + help + ] = _parse_opts(arguments, { + option_strings: no_default, + dest: no_default, + default: true, + required: false, + help: undefined + }) + + super({ + option_strings, + dest, + const: false, + default: default_value, + required, + help + }) + } +}) + + +const _AppendAction = _callable(class _AppendAction extends Action { + + constructor() { + let [ + option_strings, + dest, + nargs, + const_value, + default_value, + type, + choices, + required, + help, + metavar + ] = _parse_opts(arguments, { + option_strings: no_default, + dest: no_default, + nargs: undefined, + const: undefined, + default: undefined, + type: undefined, + choices: undefined, + required: false, + help: undefined, + metavar: undefined + }) + + if (nargs === 0) { + throw new TypeError('nargs for append actions must be != 0; if arg ' + + 'strings are not supplying the value to append, ' + + 'the append const action may be more appropriate') + } + if (const_value !== undefined && nargs !== OPTIONAL) { + throw new TypeError(sub('nargs must be %r to supply const', OPTIONAL)) + } + super({ + option_strings, + dest, + nargs, + const: const_value, + default: default_value, + type, + choices, + required, + help, + metavar + }) + } + + call(parser, namespace, values/*, option_string = undefined*/) { + let items = getattr(namespace, this.dest, undefined) + items = _copy_items(items) + items.push(values) + setattr(namespace, this.dest, items) + } +}) + + +const _AppendConstAction = _callable(class _AppendConstAction extends Action { + + constructor() { + let [ + option_strings, + dest, + const_value, + default_value, + required, + help, + metavar + ] = _parse_opts(arguments, { + option_strings: no_default, + dest: no_default, + const: no_default, + default: undefined, + required: false, + help: undefined, + metavar: undefined + }) + + super({ + option_strings, + dest, + nargs: 0, + const: const_value, + default: default_value, + required, + help, + metavar + }) + } + + call(parser, namespace/*, values, option_string = undefined*/) { + let items = getattr(namespace, this.dest, undefined) + items = _copy_items(items) + items.push(this.const) + setattr(namespace, this.dest, items) + } +}) + + +const _CountAction = _callable(class _CountAction extends Action { + + constructor() { + let [ + option_strings, + dest, + default_value, + required, + help + ] = _parse_opts(arguments, { + option_strings: no_default, + dest: no_default, + default: undefined, + required: false, + help: undefined + }) + + super({ + option_strings, + dest, + nargs: 0, + default: default_value, + required, + help + }) + } + + call(parser, namespace/*, values, option_string = undefined*/) { + let count = getattr(namespace, this.dest, undefined) + if (count === undefined) { + count = 0 + } + setattr(namespace, this.dest, count + 1) + } +}) + + +const _HelpAction = _callable(class _HelpAction extends Action { + + constructor() { + let [ + option_strings, + dest, + default_value, + help + ] = _parse_opts(arguments, { + option_strings: no_default, + dest: SUPPRESS, + default: SUPPRESS, + help: undefined + }) + + super({ + option_strings, + dest, + default: default_value, + nargs: 0, + help + }) + } + + call(parser/*, namespace, values, option_string = undefined*/) { + parser.print_help() + parser.exit() + } +}) + + +const _VersionAction = _callable(class _VersionAction extends Action { + + constructor() { + let [ + option_strings, + version, + dest, + default_value, + help + ] = _parse_opts(arguments, { + option_strings: no_default, + version: undefined, + dest: SUPPRESS, + default: SUPPRESS, + help: "show program's version number and exit" + }) + + super({ + option_strings, + dest, + default: default_value, + nargs: 0, + help + }) + this.version = version + } + + call(parser/*, namespace, values, option_string = undefined*/) { + let version = this.version + if (version === undefined) { + version = parser.version + } + let formatter = parser._get_formatter() + formatter.add_text(version) + parser._print_message(formatter.format_help(), process.stdout) + parser.exit() + } +}) + + +const _SubParsersAction = _camelcase_alias(_callable(class _SubParsersAction extends Action { + + constructor() { + let [ + option_strings, + prog, + parser_class, + dest, + required, + help, + metavar + ] = _parse_opts(arguments, { + option_strings: no_default, + prog: no_default, + parser_class: no_default, + dest: SUPPRESS, + required: false, + help: undefined, + metavar: undefined + }) + + let name_parser_map = {} + + super({ + option_strings, + dest, + nargs: PARSER, + choices: name_parser_map, + required, + help, + metavar + }) + + this._prog_prefix = prog + this._parser_class = parser_class + this._name_parser_map = name_parser_map + this._choices_actions = [] + } + + add_parser() { + let [ + name, + kwargs + ] = _parse_opts(arguments, { + name: no_default, + '**kwargs': no_default + }) + + // set prog from the existing prefix + if (kwargs.prog === undefined) { + kwargs.prog = sub('%s %s', this._prog_prefix, name) + } + + let aliases = getattr(kwargs, 'aliases', []) + delete kwargs.aliases + + // create a pseudo-action to hold the choice help + if ('help' in kwargs) { + let help = kwargs.help + delete kwargs.help + let choice_action = this._ChoicesPseudoAction(name, aliases, help) + this._choices_actions.push(choice_action) + } + + // create the parser and add it to the map + let parser = new this._parser_class(kwargs) + this._name_parser_map[name] = parser + + // make parser available under aliases also + for (let alias of aliases) { + this._name_parser_map[alias] = parser + } + + return parser + } + + _get_subactions() { + return this._choices_actions + } + + call(parser, namespace, values/*, option_string = undefined*/) { + let parser_name = values[0] + let arg_strings = values.slice(1) + + // set the parser name if requested + if (this.dest !== SUPPRESS) { + setattr(namespace, this.dest, parser_name) + } + + // select the parser + if (hasattr(this._name_parser_map, parser_name)) { + parser = this._name_parser_map[parser_name] + } else { + let args = {parser_name, + choices: this._name_parser_map.join(', ')} + let msg = sub('unknown parser %(parser_name)r (choices: %(choices)s)', args) + throw new ArgumentError(this, msg) + } + + // parse all the remaining options into the namespace + // store any unrecognized options on the object, so that the top + // level parser can decide what to do with them + + // In case this subparser defines new defaults, we parse them + // in a new namespace object and then update the original + // namespace for the relevant parts. + let subnamespace + [ subnamespace, arg_strings ] = parser.parse_known_args(arg_strings, undefined) + for (let [ key, value ] of Object.entries(subnamespace)) { + setattr(namespace, key, value) + } + + if (arg_strings.length) { + setdefault(namespace, _UNRECOGNIZED_ARGS_ATTR, []) + getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).push(...arg_strings) + } + } +})) + + +_SubParsersAction.prototype._ChoicesPseudoAction = _callable(class _ChoicesPseudoAction extends Action { + constructor(name, aliases, help) { + let metavar = name, dest = name + if (aliases.length) { + metavar += sub(' (%s)', aliases.join(', ')) + } + super({ option_strings: [], dest, help, metavar }) + } +}) + + +const _ExtendAction = _callable(class _ExtendAction extends _AppendAction { + call(parser, namespace, values/*, option_string = undefined*/) { + let items = getattr(namespace, this.dest, undefined) + items = _copy_items(items) + items = items.concat(values) + setattr(namespace, this.dest, items) + } +}) + + +// ============== +// Type classes +// ============== +const FileType = _callable(class FileType extends Function { + /* + * Factory for creating file object types + * + * Instances of FileType are typically passed as type= arguments to the + * ArgumentParser add_argument() method. + * + * Keyword Arguments: + * - mode -- A string indicating how the file is to be opened. Accepts the + * same values as the builtin open() function. + * - bufsize -- The file's desired buffer size. Accepts the same values as + * the builtin open() function. + * - encoding -- The file's encoding. Accepts the same values as the + * builtin open() function. + * - errors -- A string indicating how encoding and decoding errors are to + * be handled. Accepts the same value as the builtin open() function. + */ + + constructor() { + let [ + flags, + encoding, + mode, + autoClose, + emitClose, + start, + end, + highWaterMark, + fs + ] = _parse_opts(arguments, { + flags: 'r', + encoding: undefined, + mode: undefined, // 0o666 + autoClose: undefined, // true + emitClose: undefined, // false + start: undefined, // 0 + end: undefined, // Infinity + highWaterMark: undefined, // 64 * 1024 + fs: undefined + }) + + // when this class is called as a function, redirect it to .call() method of itself + super('return arguments.callee.call.apply(arguments.callee, arguments)') + + Object.defineProperty(this, 'name', { + get() { + return sub('FileType(%r)', flags) + } + }) + this._flags = flags + this._options = {} + if (encoding !== undefined) this._options.encoding = encoding + if (mode !== undefined) this._options.mode = mode + if (autoClose !== undefined) this._options.autoClose = autoClose + if (emitClose !== undefined) this._options.emitClose = emitClose + if (start !== undefined) this._options.start = start + if (end !== undefined) this._options.end = end + if (highWaterMark !== undefined) this._options.highWaterMark = highWaterMark + if (fs !== undefined) this._options.fs = fs + } + + call(string) { + // the special argument "-" means sys.std{in,out} + if (string === '-') { + if (this._flags.includes('r')) { + return process.stdin + } else if (this._flags.includes('w')) { + return process.stdout + } else { + let msg = sub('argument "-" with mode %r', this._flags) + throw new TypeError(msg) + } + } + + // all other arguments are used as file names + let fd + try { + fd = fs.openSync(string, this._flags, this._options.mode) + } catch (e) { + let args = { filename: string, error: e.message } + let message = "can't open '%(filename)s': %(error)s" + throw new ArgumentTypeError(sub(message, args)) + } + + let options = Object.assign({ fd, flags: this._flags }, this._options) + if (this._flags.includes('r')) { + return fs.createReadStream(undefined, options) + } else if (this._flags.includes('w')) { + return fs.createWriteStream(undefined, options) + } else { + let msg = sub('argument "%s" with mode %r', string, this._flags) + throw new TypeError(msg) + } + } + + [util.inspect.custom]() { + let args = [ this._flags ] + let kwargs = Object.entries(this._options).map(([ k, v ]) => { + if (k === 'mode') v = { value: v, [util.inspect.custom]() { return '0o' + this.value.toString(8) } } + return [ k, v ] + }) + let args_str = [] + .concat(args.filter(arg => arg !== -1).map(repr)) + .concat(kwargs.filter(([/*kw*/, arg]) => arg !== undefined) + .map(([kw, arg]) => sub('%s=%r', kw, arg))) + .join(', ') + return sub('%s(%s)', this.constructor.name, args_str) + } + + toString() { + return this[util.inspect.custom]() + } +}) + +// =========================== +// Optional and Positional Parsing +// =========================== +const Namespace = _callable(class Namespace extends _AttributeHolder() { + /* + * Simple object for storing attributes. + * + * Implements equality by attribute names and values, and provides a simple + * string representation. + */ + + constructor(options = {}) { + super() + Object.assign(this, options) + } +}) + +// unset string tag to mimic plain object +Namespace.prototype[Symbol.toStringTag] = undefined + + +const _ActionsContainer = _camelcase_alias(_callable(class _ActionsContainer { + + constructor() { + let [ + description, + prefix_chars, + argument_default, + conflict_handler + ] = _parse_opts(arguments, { + description: no_default, + prefix_chars: no_default, + argument_default: no_default, + conflict_handler: no_default + }) + + this.description = description + this.argument_default = argument_default + this.prefix_chars = prefix_chars + this.conflict_handler = conflict_handler + + // set up registries + this._registries = {} + + // register actions + this.register('action', undefined, _StoreAction) + this.register('action', 'store', _StoreAction) + this.register('action', 'store_const', _StoreConstAction) + this.register('action', 'store_true', _StoreTrueAction) + this.register('action', 'store_false', _StoreFalseAction) + this.register('action', 'append', _AppendAction) + this.register('action', 'append_const', _AppendConstAction) + this.register('action', 'count', _CountAction) + this.register('action', 'help', _HelpAction) + this.register('action', 'version', _VersionAction) + this.register('action', 'parsers', _SubParsersAction) + this.register('action', 'extend', _ExtendAction) + // LEGACY (v1 compatibility): camelcase variants + ;[ 'storeConst', 'storeTrue', 'storeFalse', 'appendConst' ].forEach(old_name => { + let new_name = _to_new_name(old_name) + this.register('action', old_name, util.deprecate(this._registry_get('action', new_name), + sub('{action: "%s"} is renamed to {action: "%s"}', old_name, new_name))) + }) + // end + + // raise an exception if the conflict handler is invalid + this._get_handler() + + // action storage + this._actions = [] + this._option_string_actions = {} + + // groups + this._action_groups = [] + this._mutually_exclusive_groups = [] + + // defaults storage + this._defaults = {} + + // determines whether an "option" looks like a negative number + this._negative_number_matcher = /^-\d+$|^-\d*\.\d+$/ + + // whether or not there are any optionals that look like negative + // numbers -- uses a list so it can be shared and edited + this._has_negative_number_optionals = [] + } + + // ==================== + // Registration methods + // ==================== + register(registry_name, value, object) { + let registry = setdefault(this._registries, registry_name, {}) + registry[value] = object + } + + _registry_get(registry_name, value, default_value = undefined) { + return getattr(this._registries[registry_name], value, default_value) + } + + // ================================== + // Namespace default accessor methods + // ================================== + set_defaults(kwargs) { + Object.assign(this._defaults, kwargs) + + // if these defaults match any existing arguments, replace + // the previous default on the object with the new one + for (let action of this._actions) { + if (action.dest in kwargs) { + action.default = kwargs[action.dest] + } + } + } + + get_default(dest) { + for (let action of this._actions) { + if (action.dest === dest && action.default !== undefined) { + return action.default + } + } + return this._defaults[dest] + } + + + // ======================= + // Adding argument actions + // ======================= + add_argument() { + /* + * add_argument(dest, ..., name=value, ...) + * add_argument(option_string, option_string, ..., name=value, ...) + */ + let [ + args, + kwargs + ] = _parse_opts(arguments, { + '*args': no_default, + '**kwargs': no_default + }) + // LEGACY (v1 compatibility), old-style add_argument([ args ], { options }) + if (args.length === 1 && Array.isArray(args[0])) { + args = args[0] + deprecate('argument-array', + sub('use add_argument(%(args)s, {...}) instead of add_argument([ %(args)s ], { ... })', { + args: args.map(repr).join(', ') + })) + } + // end + + // if no positional args are supplied or only one is supplied and + // it doesn't look like an option string, parse a positional + // argument + let chars = this.prefix_chars + if (!args.length || args.length === 1 && !chars.includes(args[0][0])) { + if (args.length && 'dest' in kwargs) { + throw new TypeError('dest supplied twice for positional argument') + } + kwargs = this._get_positional_kwargs(...args, kwargs) + + // otherwise, we're adding an optional argument + } else { + kwargs = this._get_optional_kwargs(...args, kwargs) + } + + // if no default was supplied, use the parser-level default + if (!('default' in kwargs)) { + let dest = kwargs.dest + if (dest in this._defaults) { + kwargs.default = this._defaults[dest] + } else if (this.argument_default !== undefined) { + kwargs.default = this.argument_default + } + } + + // create the action object, and add it to the parser + let action_class = this._pop_action_class(kwargs) + if (typeof action_class !== 'function') { + throw new TypeError(sub('unknown action "%s"', action_class)) + } + // eslint-disable-next-line new-cap + let action = new action_class(kwargs) + + // raise an error if the action type is not callable + let type_func = this._registry_get('type', action.type, action.type) + if (typeof type_func !== 'function') { + throw new TypeError(sub('%r is not callable', type_func)) + } + + if (type_func === FileType) { + throw new TypeError(sub('%r is a FileType class object, instance of it' + + ' must be passed', type_func)) + } + + // raise an error if the metavar does not match the type + if ('_get_formatter' in this) { + try { + this._get_formatter()._format_args(action, undefined) + } catch (err) { + // check for 'invalid nargs value' is an artifact of TypeError and ValueError in js being the same + if (err instanceof TypeError && err.message !== 'invalid nargs value') { + throw new TypeError('length of metavar tuple does not match nargs') + } else { + throw err + } + } + } + + return this._add_action(action) + } + + add_argument_group() { + let group = _ArgumentGroup(this, ...arguments) + this._action_groups.push(group) + return group + } + + add_mutually_exclusive_group() { + // eslint-disable-next-line no-use-before-define + let group = _MutuallyExclusiveGroup(this, ...arguments) + this._mutually_exclusive_groups.push(group) + return group + } + + _add_action(action) { + // resolve any conflicts + this._check_conflict(action) + + // add to actions list + this._actions.push(action) + action.container = this + + // index the action by any option strings it has + for (let option_string of action.option_strings) { + this._option_string_actions[option_string] = action + } + + // set the flag if any option strings look like negative numbers + for (let option_string of action.option_strings) { + if (this._negative_number_matcher.test(option_string)) { + if (!this._has_negative_number_optionals.length) { + this._has_negative_number_optionals.push(true) + } + } + } + + // return the created action + return action + } + + _remove_action(action) { + _array_remove(this._actions, action) + } + + _add_container_actions(container) { + // collect groups by titles + let title_group_map = {} + for (let group of this._action_groups) { + if (group.title in title_group_map) { + let msg = 'cannot merge actions - two groups are named %r' + throw new TypeError(sub(msg, group.title)) + } + title_group_map[group.title] = group + } + + // map each action to its group + let group_map = new Map() + for (let group of container._action_groups) { + + // if a group with the title exists, use that, otherwise + // create a new group matching the container's group + if (!(group.title in title_group_map)) { + title_group_map[group.title] = this.add_argument_group({ + title: group.title, + description: group.description, + conflict_handler: group.conflict_handler + }) + } + + // map the actions to their new group + for (let action of group._group_actions) { + group_map.set(action, title_group_map[group.title]) + } + } + + // add container's mutually exclusive groups + // NOTE: if add_mutually_exclusive_group ever gains title= and + // description= then this code will need to be expanded as above + for (let group of container._mutually_exclusive_groups) { + let mutex_group = this.add_mutually_exclusive_group({ + required: group.required + }) + + // map the actions to their new mutex group + for (let action of group._group_actions) { + group_map.set(action, mutex_group) + } + } + + // add all actions to this container or their group + for (let action of container._actions) { + group_map.get(action)._add_action(action) + } + } + + _get_positional_kwargs() { + let [ + dest, + kwargs + ] = _parse_opts(arguments, { + dest: no_default, + '**kwargs': no_default + }) + + // make sure required is not specified + if ('required' in kwargs) { + let msg = "'required' is an invalid argument for positionals" + throw new TypeError(msg) + } + + // mark positional arguments as required if at least one is + // always required + if (![OPTIONAL, ZERO_OR_MORE].includes(kwargs.nargs)) { + kwargs.required = true + } + if (kwargs.nargs === ZERO_OR_MORE && !('default' in kwargs)) { + kwargs.required = true + } + + // return the keyword arguments with no option strings + return Object.assign(kwargs, { dest, option_strings: [] }) + } + + _get_optional_kwargs() { + let [ + args, + kwargs + ] = _parse_opts(arguments, { + '*args': no_default, + '**kwargs': no_default + }) + + // determine short and long option strings + let option_strings = [] + let long_option_strings = [] + let option_string + for (option_string of args) { + // error on strings that don't start with an appropriate prefix + if (!this.prefix_chars.includes(option_string[0])) { + let args = {option: option_string, + prefix_chars: this.prefix_chars} + let msg = 'invalid option string %(option)r: ' + + 'must start with a character %(prefix_chars)r' + throw new TypeError(sub(msg, args)) + } + + // strings starting with two prefix characters are long options + option_strings.push(option_string) + if (option_string.length > 1 && this.prefix_chars.includes(option_string[1])) { + long_option_strings.push(option_string) + } + } + + // infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x' + let dest = kwargs.dest + delete kwargs.dest + if (dest === undefined) { + let dest_option_string + if (long_option_strings.length) { + dest_option_string = long_option_strings[0] + } else { + dest_option_string = option_strings[0] + } + dest = _string_lstrip(dest_option_string, this.prefix_chars) + if (!dest) { + let msg = 'dest= is required for options like %r' + throw new TypeError(sub(msg, option_string)) + } + dest = dest.replace(/-/g, '_') + } + + // return the updated keyword arguments + return Object.assign(kwargs, { dest, option_strings }) + } + + _pop_action_class(kwargs, default_value = undefined) { + let action = getattr(kwargs, 'action', default_value) + delete kwargs.action + return this._registry_get('action', action, action) + } + + _get_handler() { + // determine function from conflict handler string + let handler_func_name = sub('_handle_conflict_%s', this.conflict_handler) + if (typeof this[handler_func_name] === 'function') { + return this[handler_func_name] + } else { + let msg = 'invalid conflict_resolution value: %r' + throw new TypeError(sub(msg, this.conflict_handler)) + } + } + + _check_conflict(action) { + + // find all options that conflict with this option + let confl_optionals = [] + for (let option_string of action.option_strings) { + if (hasattr(this._option_string_actions, option_string)) { + let confl_optional = this._option_string_actions[option_string] + confl_optionals.push([ option_string, confl_optional ]) + } + } + + // resolve any conflicts + if (confl_optionals.length) { + let conflict_handler = this._get_handler() + conflict_handler.call(this, action, confl_optionals) + } + } + + _handle_conflict_error(action, conflicting_actions) { + let message = conflicting_actions.length === 1 ? + 'conflicting option string: %s' : + 'conflicting option strings: %s' + let conflict_string = conflicting_actions.map(([ option_string/*, action*/ ]) => option_string).join(', ') + throw new ArgumentError(action, sub(message, conflict_string)) + } + + _handle_conflict_resolve(action, conflicting_actions) { + + // remove all conflicting options + for (let [ option_string, action ] of conflicting_actions) { + + // remove the conflicting option + _array_remove(action.option_strings, option_string) + delete this._option_string_actions[option_string] + + // if the option now has no option string, remove it from the + // container holding it + if (!action.option_strings.length) { + action.container._remove_action(action) + } + } + } +})) + + +const _ArgumentGroup = _callable(class _ArgumentGroup extends _ActionsContainer { + + constructor() { + let [ + container, + title, + description, + kwargs + ] = _parse_opts(arguments, { + container: no_default, + title: undefined, + description: undefined, + '**kwargs': no_default + }) + + // add any missing keyword arguments by checking the container + setdefault(kwargs, 'conflict_handler', container.conflict_handler) + setdefault(kwargs, 'prefix_chars', container.prefix_chars) + setdefault(kwargs, 'argument_default', container.argument_default) + super(Object.assign({ description }, kwargs)) + + // group attributes + this.title = title + this._group_actions = [] + + // share most attributes with the container + this._registries = container._registries + this._actions = container._actions + this._option_string_actions = container._option_string_actions + this._defaults = container._defaults + this._has_negative_number_optionals = + container._has_negative_number_optionals + this._mutually_exclusive_groups = container._mutually_exclusive_groups + } + + _add_action(action) { + action = super._add_action(action) + this._group_actions.push(action) + return action + } + + _remove_action(action) { + super._remove_action(action) + _array_remove(this._group_actions, action) + } +}) + + +const _MutuallyExclusiveGroup = _callable(class _MutuallyExclusiveGroup extends _ArgumentGroup { + + constructor() { + let [ + container, + required + ] = _parse_opts(arguments, { + container: no_default, + required: false + }) + + super(container) + this.required = required + this._container = container + } + + _add_action(action) { + if (action.required) { + let msg = 'mutually exclusive arguments must be optional' + throw new TypeError(msg) + } + action = this._container._add_action(action) + this._group_actions.push(action) + return action + } + + _remove_action(action) { + this._container._remove_action(action) + _array_remove(this._group_actions, action) + } +}) + + +const ArgumentParser = _camelcase_alias(_callable(class ArgumentParser extends _AttributeHolder(_ActionsContainer) { + /* + * Object for parsing command line strings into Python objects. + * + * Keyword Arguments: + * - prog -- The name of the program (default: sys.argv[0]) + * - usage -- A usage message (default: auto-generated from arguments) + * - description -- A description of what the program does + * - epilog -- Text following the argument descriptions + * - parents -- Parsers whose arguments should be copied into this one + * - formatter_class -- HelpFormatter class for printing help messages + * - prefix_chars -- Characters that prefix optional arguments + * - fromfile_prefix_chars -- Characters that prefix files containing + * additional arguments + * - argument_default -- The default value for all arguments + * - conflict_handler -- String indicating how to handle conflicts + * - add_help -- Add a -h/-help option + * - allow_abbrev -- Allow long options to be abbreviated unambiguously + * - exit_on_error -- Determines whether or not ArgumentParser exits with + * error info when an error occurs + */ + + constructor() { + let [ + prog, + usage, + description, + epilog, + parents, + formatter_class, + prefix_chars, + fromfile_prefix_chars, + argument_default, + conflict_handler, + add_help, + allow_abbrev, + exit_on_error, + debug, // LEGACY (v1 compatibility), debug mode + version // LEGACY (v1 compatibility), version + ] = _parse_opts(arguments, { + prog: undefined, + usage: undefined, + description: undefined, + epilog: undefined, + parents: [], + formatter_class: HelpFormatter, + prefix_chars: '-', + fromfile_prefix_chars: undefined, + argument_default: undefined, + conflict_handler: 'error', + add_help: true, + allow_abbrev: true, + exit_on_error: true, + debug: undefined, // LEGACY (v1 compatibility), debug mode + version: undefined // LEGACY (v1 compatibility), version + }) + + // LEGACY (v1 compatibility) + if (debug !== undefined) { + deprecate('debug', + 'The "debug" argument to ArgumentParser is deprecated. Please ' + + 'override ArgumentParser.exit function instead.' + ) + } + + if (version !== undefined) { + deprecate('version', + 'The "version" argument to ArgumentParser is deprecated. Please use ' + + "add_argument(..., { action: 'version', version: 'N', ... }) instead." + ) + } + // end + + super({ + description, + prefix_chars, + argument_default, + conflict_handler + }) + + // default setting for prog + if (prog === undefined) { + prog = path.basename(get_argv()[0] || '') + } + + this.prog = prog + this.usage = usage + this.epilog = epilog + this.formatter_class = formatter_class + this.fromfile_prefix_chars = fromfile_prefix_chars + this.add_help = add_help + this.allow_abbrev = allow_abbrev + this.exit_on_error = exit_on_error + // LEGACY (v1 compatibility), debug mode + this.debug = debug + // end + + this._positionals = this.add_argument_group('positional arguments') + this._optionals = this.add_argument_group('optional arguments') + this._subparsers = undefined + + // register types + function identity(string) { + return string + } + this.register('type', undefined, identity) + this.register('type', null, identity) + this.register('type', 'auto', identity) + this.register('type', 'int', function (x) { + let result = Number(x) + if (!Number.isInteger(result)) { + throw new TypeError(sub('could not convert string to int: %r', x)) + } + return result + }) + this.register('type', 'float', function (x) { + let result = Number(x) + if (isNaN(result)) { + throw new TypeError(sub('could not convert string to float: %r', x)) + } + return result + }) + this.register('type', 'str', String) + // LEGACY (v1 compatibility): custom types + this.register('type', 'string', + util.deprecate(String, 'use {type:"str"} or {type:String} instead of {type:"string"}')) + // end + + // add help argument if necessary + // (using explicit default to override global argument_default) + let default_prefix = prefix_chars.includes('-') ? '-' : prefix_chars[0] + if (this.add_help) { + this.add_argument( + default_prefix + 'h', + default_prefix.repeat(2) + 'help', + { + action: 'help', + default: SUPPRESS, + help: 'show this help message and exit' + } + ) + } + // LEGACY (v1 compatibility), version + if (version) { + this.add_argument( + default_prefix + 'v', + default_prefix.repeat(2) + 'version', + { + action: 'version', + default: SUPPRESS, + version: this.version, + help: "show program's version number and exit" + } + ) + } + // end + + // add parent arguments and defaults + for (let parent of parents) { + this._add_container_actions(parent) + Object.assign(this._defaults, parent._defaults) + } + } + + // ======================= + // Pretty __repr__ methods + // ======================= + _get_kwargs() { + let names = [ + 'prog', + 'usage', + 'description', + 'formatter_class', + 'conflict_handler', + 'add_help' + ] + return names.map(name => [ name, getattr(this, name) ]) + } + + // ================================== + // Optional/Positional adding methods + // ================================== + add_subparsers() { + let [ + kwargs + ] = _parse_opts(arguments, { + '**kwargs': no_default + }) + + if (this._subparsers !== undefined) { + this.error('cannot have multiple subparser arguments') + } + + // add the parser class to the arguments if it's not present + setdefault(kwargs, 'parser_class', this.constructor) + + if ('title' in kwargs || 'description' in kwargs) { + let title = getattr(kwargs, 'title', 'subcommands') + let description = getattr(kwargs, 'description', undefined) + delete kwargs.title + delete kwargs.description + this._subparsers = this.add_argument_group(title, description) + } else { + this._subparsers = this._positionals + } + + // prog defaults to the usage message of this parser, skipping + // optional arguments and with no "usage:" prefix + if (kwargs.prog === undefined) { + let formatter = this._get_formatter() + let positionals = this._get_positional_actions() + let groups = this._mutually_exclusive_groups + formatter.add_usage(this.usage, positionals, groups, '') + kwargs.prog = formatter.format_help().trim() + } + + // create the parsers action and add it to the positionals list + let parsers_class = this._pop_action_class(kwargs, 'parsers') + // eslint-disable-next-line new-cap + let action = new parsers_class(Object.assign({ option_strings: [] }, kwargs)) + this._subparsers._add_action(action) + + // return the created parsers action + return action + } + + _add_action(action) { + if (action.option_strings.length) { + this._optionals._add_action(action) + } else { + this._positionals._add_action(action) + } + return action + } + + _get_optional_actions() { + return this._actions.filter(action => action.option_strings.length) + } + + _get_positional_actions() { + return this._actions.filter(action => !action.option_strings.length) + } + + // ===================================== + // Command line argument parsing methods + // ===================================== + parse_args(args = undefined, namespace = undefined) { + let argv + [ args, argv ] = this.parse_known_args(args, namespace) + if (argv && argv.length > 0) { + let msg = 'unrecognized arguments: %s' + this.error(sub(msg, argv.join(' '))) + } + return args + } + + parse_known_args(args = undefined, namespace = undefined) { + if (args === undefined) { + args = get_argv().slice(1) + } + + // default Namespace built from parser defaults + if (namespace === undefined) { + namespace = new Namespace() + } + + // add any action defaults that aren't present + for (let action of this._actions) { + if (action.dest !== SUPPRESS) { + if (!hasattr(namespace, action.dest)) { + if (action.default !== SUPPRESS) { + setattr(namespace, action.dest, action.default) + } + } + } + } + + // add any parser defaults that aren't present + for (let dest of Object.keys(this._defaults)) { + if (!hasattr(namespace, dest)) { + setattr(namespace, dest, this._defaults[dest]) + } + } + + // parse the arguments and exit if there are any errors + if (this.exit_on_error) { + try { + [ namespace, args ] = this._parse_known_args(args, namespace) + } catch (err) { + if (err instanceof ArgumentError) { + this.error(err.message) + } else { + throw err + } + } + } else { + [ namespace, args ] = this._parse_known_args(args, namespace) + } + + if (hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR)) { + args = args.concat(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR)) + delattr(namespace, _UNRECOGNIZED_ARGS_ATTR) + } + + return [ namespace, args ] + } + + _parse_known_args(arg_strings, namespace) { + // replace arg strings that are file references + if (this.fromfile_prefix_chars !== undefined) { + arg_strings = this._read_args_from_files(arg_strings) + } + + // map all mutually exclusive arguments to the other arguments + // they can't occur with + let action_conflicts = new Map() + for (let mutex_group of this._mutually_exclusive_groups) { + let group_actions = mutex_group._group_actions + for (let [ i, mutex_action ] of Object.entries(mutex_group._group_actions)) { + let conflicts = action_conflicts.get(mutex_action) || [] + conflicts = conflicts.concat(group_actions.slice(0, +i)) + conflicts = conflicts.concat(group_actions.slice(+i + 1)) + action_conflicts.set(mutex_action, conflicts) + } + } + + // find all option indices, and determine the arg_string_pattern + // which has an 'O' if there is an option at an index, + // an 'A' if there is an argument, or a '-' if there is a '--' + let option_string_indices = {} + let arg_string_pattern_parts = [] + let arg_strings_iter = Object.entries(arg_strings)[Symbol.iterator]() + for (let [ i, arg_string ] of arg_strings_iter) { + + // all args after -- are non-options + if (arg_string === '--') { + arg_string_pattern_parts.push('-') + for ([ i, arg_string ] of arg_strings_iter) { + arg_string_pattern_parts.push('A') + } + + // otherwise, add the arg to the arg strings + // and note the index if it was an option + } else { + let option_tuple = this._parse_optional(arg_string) + let pattern + if (option_tuple === undefined) { + pattern = 'A' + } else { + option_string_indices[i] = option_tuple + pattern = 'O' + } + arg_string_pattern_parts.push(pattern) + } + } + + // join the pieces together to form the pattern + let arg_strings_pattern = arg_string_pattern_parts.join('') + + // converts arg strings to the appropriate and then takes the action + let seen_actions = new Set() + let seen_non_default_actions = new Set() + let extras + + let take_action = (action, argument_strings, option_string = undefined) => { + seen_actions.add(action) + let argument_values = this._get_values(action, argument_strings) + + // error if this argument is not allowed with other previously + // seen arguments, assuming that actions that use the default + // value don't really count as "present" + if (argument_values !== action.default) { + seen_non_default_actions.add(action) + for (let conflict_action of action_conflicts.get(action) || []) { + if (seen_non_default_actions.has(conflict_action)) { + let msg = 'not allowed with argument %s' + let action_name = _get_action_name(conflict_action) + throw new ArgumentError(action, sub(msg, action_name)) + } + } + } + + // take the action if we didn't receive a SUPPRESS value + // (e.g. from a default) + if (argument_values !== SUPPRESS) { + action(this, namespace, argument_values, option_string) + } + } + + // function to convert arg_strings into an optional action + let consume_optional = start_index => { + + // get the optional identified at this index + let option_tuple = option_string_indices[start_index] + let [ action, option_string, explicit_arg ] = option_tuple + + // identify additional optionals in the same arg string + // (e.g. -xyz is the same as -x -y -z if no args are required) + let action_tuples = [] + let stop + for (;;) { + + // if we found no optional action, skip it + if (action === undefined) { + extras.push(arg_strings[start_index]) + return start_index + 1 + } + + // if there is an explicit argument, try to match the + // optional's string arguments to only this + if (explicit_arg !== undefined) { + let arg_count = this._match_argument(action, 'A') + + // if the action is a single-dash option and takes no + // arguments, try to parse more single-dash options out + // of the tail of the option string + let chars = this.prefix_chars + if (arg_count === 0 && !chars.includes(option_string[1])) { + action_tuples.push([ action, [], option_string ]) + let char = option_string[0] + option_string = char + explicit_arg[0] + let new_explicit_arg = explicit_arg.slice(1) || undefined + let optionals_map = this._option_string_actions + if (hasattr(optionals_map, option_string)) { + action = optionals_map[option_string] + explicit_arg = new_explicit_arg + } else { + let msg = 'ignored explicit argument %r' + throw new ArgumentError(action, sub(msg, explicit_arg)) + } + + // if the action expect exactly one argument, we've + // successfully matched the option; exit the loop + } else if (arg_count === 1) { + stop = start_index + 1 + let args = [ explicit_arg ] + action_tuples.push([ action, args, option_string ]) + break + + // error if a double-dash option did not use the + // explicit argument + } else { + let msg = 'ignored explicit argument %r' + throw new ArgumentError(action, sub(msg, explicit_arg)) + } + + // if there is no explicit argument, try to match the + // optional's string arguments with the following strings + // if successful, exit the loop + } else { + let start = start_index + 1 + let selected_patterns = arg_strings_pattern.slice(start) + let arg_count = this._match_argument(action, selected_patterns) + stop = start + arg_count + let args = arg_strings.slice(start, stop) + action_tuples.push([ action, args, option_string ]) + break + } + } + + // add the Optional to the list and return the index at which + // the Optional's string args stopped + assert(action_tuples.length) + for (let [ action, args, option_string ] of action_tuples) { + take_action(action, args, option_string) + } + return stop + } + + // the list of Positionals left to be parsed; this is modified + // by consume_positionals() + let positionals = this._get_positional_actions() + + // function to convert arg_strings into positional actions + let consume_positionals = start_index => { + // match as many Positionals as possible + let selected_pattern = arg_strings_pattern.slice(start_index) + let arg_counts = this._match_arguments_partial(positionals, selected_pattern) + + // slice off the appropriate arg strings for each Positional + // and add the Positional and its args to the list + for (let i = 0; i < positionals.length && i < arg_counts.length; i++) { + let action = positionals[i] + let arg_count = arg_counts[i] + let args = arg_strings.slice(start_index, start_index + arg_count) + start_index += arg_count + take_action(action, args) + } + + // slice off the Positionals that we just parsed and return the + // index at which the Positionals' string args stopped + positionals = positionals.slice(arg_counts.length) + return start_index + } + + // consume Positionals and Optionals alternately, until we have + // passed the last option string + extras = [] + let start_index = 0 + let max_option_string_index = Math.max(-1, ...Object.keys(option_string_indices).map(Number)) + while (start_index <= max_option_string_index) { + + // consume any Positionals preceding the next option + let next_option_string_index = Math.min( + // eslint-disable-next-line no-loop-func + ...Object.keys(option_string_indices).map(Number).filter(index => index >= start_index) + ) + if (start_index !== next_option_string_index) { + let positionals_end_index = consume_positionals(start_index) + + // only try to parse the next optional if we didn't consume + // the option string during the positionals parsing + if (positionals_end_index > start_index) { + start_index = positionals_end_index + continue + } else { + start_index = positionals_end_index + } + } + + // if we consumed all the positionals we could and we're not + // at the index of an option string, there were extra arguments + if (!(start_index in option_string_indices)) { + let strings = arg_strings.slice(start_index, next_option_string_index) + extras = extras.concat(strings) + start_index = next_option_string_index + } + + // consume the next optional and any arguments for it + start_index = consume_optional(start_index) + } + + // consume any positionals following the last Optional + let stop_index = consume_positionals(start_index) + + // if we didn't consume all the argument strings, there were extras + extras = extras.concat(arg_strings.slice(stop_index)) + + // make sure all required actions were present and also convert + // action defaults which were not given as arguments + let required_actions = [] + for (let action of this._actions) { + if (!seen_actions.has(action)) { + if (action.required) { + required_actions.push(_get_action_name(action)) + } else { + // Convert action default now instead of doing it before + // parsing arguments to avoid calling convert functions + // twice (which may fail) if the argument was given, but + // only if it was defined already in the namespace + if (action.default !== undefined && + typeof action.default === 'string' && + hasattr(namespace, action.dest) && + action.default === getattr(namespace, action.dest)) { + setattr(namespace, action.dest, + this._get_value(action, action.default)) + } + } + } + } + + if (required_actions.length) { + this.error(sub('the following arguments are required: %s', + required_actions.join(', '))) + } + + // make sure all required groups had one option present + for (let group of this._mutually_exclusive_groups) { + if (group.required) { + let no_actions_used = true + for (let action of group._group_actions) { + if (seen_non_default_actions.has(action)) { + no_actions_used = false + break + } + } + + // if no actions were used, report the error + if (no_actions_used) { + let names = group._group_actions + .filter(action => action.help !== SUPPRESS) + .map(action => _get_action_name(action)) + let msg = 'one of the arguments %s is required' + this.error(sub(msg, names.join(' '))) + } + } + } + + // return the updated namespace and the extra arguments + return [ namespace, extras ] + } + + _read_args_from_files(arg_strings) { + // expand arguments referencing files + let new_arg_strings = [] + for (let arg_string of arg_strings) { + + // for regular arguments, just add them back into the list + if (!arg_string || !this.fromfile_prefix_chars.includes(arg_string[0])) { + new_arg_strings.push(arg_string) + + // replace arguments referencing files with the file content + } else { + try { + let args_file = fs.readFileSync(arg_string.slice(1), 'utf8') + let arg_strings = [] + for (let arg_line of splitlines(args_file)) { + for (let arg of this.convert_arg_line_to_args(arg_line)) { + arg_strings.push(arg) + } + } + arg_strings = this._read_args_from_files(arg_strings) + new_arg_strings = new_arg_strings.concat(arg_strings) + } catch (err) { + this.error(err.message) + } + } + } + + // return the modified argument list + return new_arg_strings + } + + convert_arg_line_to_args(arg_line) { + return [arg_line] + } + + _match_argument(action, arg_strings_pattern) { + // match the pattern for this action to the arg strings + let nargs_pattern = this._get_nargs_pattern(action) + let match = arg_strings_pattern.match(new RegExp('^' + nargs_pattern)) + + // raise an exception if we weren't able to find a match + if (match === null) { + let nargs_errors = { + undefined: 'expected one argument', + [OPTIONAL]: 'expected at most one argument', + [ONE_OR_MORE]: 'expected at least one argument' + } + let msg = nargs_errors[action.nargs] + if (msg === undefined) { + msg = sub(action.nargs === 1 ? 'expected %s argument' : 'expected %s arguments', action.nargs) + } + throw new ArgumentError(action, msg) + } + + // return the number of arguments matched + return match[1].length + } + + _match_arguments_partial(actions, arg_strings_pattern) { + // progressively shorten the actions list by slicing off the + // final actions until we find a match + let result = [] + for (let i of range(actions.length, 0, -1)) { + let actions_slice = actions.slice(0, i) + let pattern = actions_slice.map(action => this._get_nargs_pattern(action)).join('') + let match = arg_strings_pattern.match(new RegExp('^' + pattern)) + if (match !== null) { + result = result.concat(match.slice(1).map(string => string.length)) + break + } + } + + // return the list of arg string counts + return result + } + + _parse_optional(arg_string) { + // if it's an empty string, it was meant to be a positional + if (!arg_string) { + return undefined + } + + // if it doesn't start with a prefix, it was meant to be positional + if (!this.prefix_chars.includes(arg_string[0])) { + return undefined + } + + // if the option string is present in the parser, return the action + if (arg_string in this._option_string_actions) { + let action = this._option_string_actions[arg_string] + return [ action, arg_string, undefined ] + } + + // if it's just a single character, it was meant to be positional + if (arg_string.length === 1) { + return undefined + } + + // if the option string before the "=" is present, return the action + if (arg_string.includes('=')) { + let [ option_string, explicit_arg ] = _string_split(arg_string, '=', 1) + if (option_string in this._option_string_actions) { + let action = this._option_string_actions[option_string] + return [ action, option_string, explicit_arg ] + } + } + + // search through all possible prefixes of the option string + // and all actions in the parser for possible interpretations + let option_tuples = this._get_option_tuples(arg_string) + + // if multiple actions match, the option string was ambiguous + if (option_tuples.length > 1) { + let options = option_tuples.map(([ /*action*/, option_string/*, explicit_arg*/ ]) => option_string).join(', ') + let args = {option: arg_string, matches: options} + let msg = 'ambiguous option: %(option)s could match %(matches)s' + this.error(sub(msg, args)) + + // if exactly one action matched, this segmentation is good, + // so return the parsed action + } else if (option_tuples.length === 1) { + let [ option_tuple ] = option_tuples + return option_tuple + } + + // if it was not found as an option, but it looks like a negative + // number, it was meant to be positional + // unless there are negative-number-like options + if (this._negative_number_matcher.test(arg_string)) { + if (!this._has_negative_number_optionals.length) { + return undefined + } + } + + // if it contains a space, it was meant to be a positional + if (arg_string.includes(' ')) { + return undefined + } + + // it was meant to be an optional but there is no such option + // in this parser (though it might be a valid option in a subparser) + return [ undefined, arg_string, undefined ] + } + + _get_option_tuples(option_string) { + let result = [] + + // option strings starting with two prefix characters are only + // split at the '=' + let chars = this.prefix_chars + if (chars.includes(option_string[0]) && chars.includes(option_string[1])) { + if (this.allow_abbrev) { + let option_prefix, explicit_arg + if (option_string.includes('=')) { + [ option_prefix, explicit_arg ] = _string_split(option_string, '=', 1) + } else { + option_prefix = option_string + explicit_arg = undefined + } + for (let option_string of Object.keys(this._option_string_actions)) { + if (option_string.startsWith(option_prefix)) { + let action = this._option_string_actions[option_string] + let tup = [ action, option_string, explicit_arg ] + result.push(tup) + } + } + } + + // single character options can be concatenated with their arguments + // but multiple character options always have to have their argument + // separate + } else if (chars.includes(option_string[0]) && !chars.includes(option_string[1])) { + let option_prefix = option_string + let explicit_arg = undefined + let short_option_prefix = option_string.slice(0, 2) + let short_explicit_arg = option_string.slice(2) + + for (let option_string of Object.keys(this._option_string_actions)) { + if (option_string === short_option_prefix) { + let action = this._option_string_actions[option_string] + let tup = [ action, option_string, short_explicit_arg ] + result.push(tup) + } else if (option_string.startsWith(option_prefix)) { + let action = this._option_string_actions[option_string] + let tup = [ action, option_string, explicit_arg ] + result.push(tup) + } + } + + // shouldn't ever get here + } else { + this.error(sub('unexpected option string: %s', option_string)) + } + + // return the collected option tuples + return result + } + + _get_nargs_pattern(action) { + // in all examples below, we have to allow for '--' args + // which are represented as '-' in the pattern + let nargs = action.nargs + let nargs_pattern + + // the default (None) is assumed to be a single argument + if (nargs === undefined) { + nargs_pattern = '(-*A-*)' + + // allow zero or one arguments + } else if (nargs === OPTIONAL) { + nargs_pattern = '(-*A?-*)' + + // allow zero or more arguments + } else if (nargs === ZERO_OR_MORE) { + nargs_pattern = '(-*[A-]*)' + + // allow one or more arguments + } else if (nargs === ONE_OR_MORE) { + nargs_pattern = '(-*A[A-]*)' + + // allow any number of options or arguments + } else if (nargs === REMAINDER) { + nargs_pattern = '([-AO]*)' + + // allow one argument followed by any number of options or arguments + } else if (nargs === PARSER) { + nargs_pattern = '(-*A[-AO]*)' + + // suppress action, like nargs=0 + } else if (nargs === SUPPRESS) { + nargs_pattern = '(-*-*)' + + // all others should be integers + } else { + nargs_pattern = sub('(-*%s-*)', 'A'.repeat(nargs).split('').join('-*')) + } + + // if this is an optional action, -- is not allowed + if (action.option_strings.length) { + nargs_pattern = nargs_pattern.replace(/-\*/g, '') + nargs_pattern = nargs_pattern.replace(/-/g, '') + } + + // return the pattern + return nargs_pattern + } + + // ======================== + // Alt command line argument parsing, allowing free intermix + // ======================== + + parse_intermixed_args(args = undefined, namespace = undefined) { + let argv + [ args, argv ] = this.parse_known_intermixed_args(args, namespace) + if (argv.length) { + let msg = 'unrecognized arguments: %s' + this.error(sub(msg, argv.join(' '))) + } + return args + } + + parse_known_intermixed_args(args = undefined, namespace = undefined) { + // returns a namespace and list of extras + // + // positional can be freely intermixed with optionals. optionals are + // first parsed with all positional arguments deactivated. The 'extras' + // are then parsed. If the parser definition is incompatible with the + // intermixed assumptions (e.g. use of REMAINDER, subparsers) a + // TypeError is raised. + // + // positionals are 'deactivated' by setting nargs and default to + // SUPPRESS. This blocks the addition of that positional to the + // namespace + + let extras + let positionals = this._get_positional_actions() + let a = positionals.filter(action => [ PARSER, REMAINDER ].includes(action.nargs)) + if (a.length) { + throw new TypeError(sub('parse_intermixed_args: positional arg' + + ' with nargs=%s', a[0].nargs)) + } + + for (let group of this._mutually_exclusive_groups) { + for (let action of group._group_actions) { + if (positionals.includes(action)) { + throw new TypeError('parse_intermixed_args: positional in' + + ' mutuallyExclusiveGroup') + } + } + } + + let save_usage + try { + save_usage = this.usage + let remaining_args + try { + if (this.usage === undefined) { + // capture the full usage for use in error messages + this.usage = this.format_usage().slice(7) + } + for (let action of positionals) { + // deactivate positionals + action.save_nargs = action.nargs + // action.nargs = 0 + action.nargs = SUPPRESS + action.save_default = action.default + action.default = SUPPRESS + } + [ namespace, remaining_args ] = this.parse_known_args(args, + namespace) + for (let action of positionals) { + // remove the empty positional values from namespace + let attr = getattr(namespace, action.dest) + if (Array.isArray(attr) && attr.length === 0) { + // eslint-disable-next-line no-console + console.warn(sub('Do not expect %s in %s', action.dest, namespace)) + delattr(namespace, action.dest) + } + } + } finally { + // restore nargs and usage before exiting + for (let action of positionals) { + action.nargs = action.save_nargs + action.default = action.save_default + } + } + let optionals = this._get_optional_actions() + try { + // parse positionals. optionals aren't normally required, but + // they could be, so make sure they aren't. + for (let action of optionals) { + action.save_required = action.required + action.required = false + } + for (let group of this._mutually_exclusive_groups) { + group.save_required = group.required + group.required = false + } + [ namespace, extras ] = this.parse_known_args(remaining_args, + namespace) + } finally { + // restore parser values before exiting + for (let action of optionals) { + action.required = action.save_required + } + for (let group of this._mutually_exclusive_groups) { + group.required = group.save_required + } + } + } finally { + this.usage = save_usage + } + return [ namespace, extras ] + } + + // ======================== + // Value conversion methods + // ======================== + _get_values(action, arg_strings) { + // for everything but PARSER, REMAINDER args, strip out first '--' + if (![PARSER, REMAINDER].includes(action.nargs)) { + try { + _array_remove(arg_strings, '--') + } catch (err) {} + } + + let value + // optional argument produces a default when not present + if (!arg_strings.length && action.nargs === OPTIONAL) { + if (action.option_strings.length) { + value = action.const + } else { + value = action.default + } + if (typeof value === 'string') { + value = this._get_value(action, value) + this._check_value(action, value) + } + + // when nargs='*' on a positional, if there were no command-line + // args, use the default if it is anything other than None + } else if (!arg_strings.length && action.nargs === ZERO_OR_MORE && + !action.option_strings.length) { + if (action.default !== undefined) { + value = action.default + } else { + value = arg_strings + } + this._check_value(action, value) + + // single argument or optional argument produces a single value + } else if (arg_strings.length === 1 && [undefined, OPTIONAL].includes(action.nargs)) { + let arg_string = arg_strings[0] + value = this._get_value(action, arg_string) + this._check_value(action, value) + + // REMAINDER arguments convert all values, checking none + } else if (action.nargs === REMAINDER) { + value = arg_strings.map(v => this._get_value(action, v)) + + // PARSER arguments convert all values, but check only the first + } else if (action.nargs === PARSER) { + value = arg_strings.map(v => this._get_value(action, v)) + this._check_value(action, value[0]) + + // SUPPRESS argument does not put anything in the namespace + } else if (action.nargs === SUPPRESS) { + value = SUPPRESS + + // all other types of nargs produce a list + } else { + value = arg_strings.map(v => this._get_value(action, v)) + for (let v of value) { + this._check_value(action, v) + } + } + + // return the converted value + return value + } + + _get_value(action, arg_string) { + let type_func = this._registry_get('type', action.type, action.type) + if (typeof type_func !== 'function') { + let msg = '%r is not callable' + throw new ArgumentError(action, sub(msg, type_func)) + } + + // convert the value to the appropriate type + let result + try { + try { + result = type_func(arg_string) + } catch (err) { + // Dear TC39, why would you ever consider making es6 classes not callable? + // We had one universal interface, [[Call]], which worked for anything + // (with familiar this-instanceof guard for classes). Now we have two. + if (err instanceof TypeError && + /Class constructor .* cannot be invoked without 'new'/.test(err.message)) { + // eslint-disable-next-line new-cap + result = new type_func(arg_string) + } else { + throw err + } + } + + } catch (err) { + // ArgumentTypeErrors indicate errors + if (err instanceof ArgumentTypeError) { + //let name = getattr(action.type, 'name', repr(action.type)) + let msg = err.message + throw new ArgumentError(action, msg) + + // TypeErrors or ValueErrors also indicate errors + } else if (err instanceof TypeError) { + let name = getattr(action.type, 'name', repr(action.type)) + let args = {type: name, value: arg_string} + let msg = 'invalid %(type)s value: %(value)r' + throw new ArgumentError(action, sub(msg, args)) + } else { + throw err + } + } + + // return the converted value + return result + } + + _check_value(action, value) { + // converted value must be one of the choices (if specified) + if (action.choices !== undefined && !_choices_to_array(action.choices).includes(value)) { + let args = {value, + choices: _choices_to_array(action.choices).map(repr).join(', ')} + let msg = 'invalid choice: %(value)r (choose from %(choices)s)' + throw new ArgumentError(action, sub(msg, args)) + } + } + + // ======================= + // Help-formatting methods + // ======================= + format_usage() { + let formatter = this._get_formatter() + formatter.add_usage(this.usage, this._actions, + this._mutually_exclusive_groups) + return formatter.format_help() + } + + format_help() { + let formatter = this._get_formatter() + + // usage + formatter.add_usage(this.usage, this._actions, + this._mutually_exclusive_groups) + + // description + formatter.add_text(this.description) + + // positionals, optionals and user-defined groups + for (let action_group of this._action_groups) { + formatter.start_section(action_group.title) + formatter.add_text(action_group.description) + formatter.add_arguments(action_group._group_actions) + formatter.end_section() + } + + // epilog + formatter.add_text(this.epilog) + + // determine help from format above + return formatter.format_help() + } + + _get_formatter() { + // eslint-disable-next-line new-cap + return new this.formatter_class({ prog: this.prog }) + } + + // ===================== + // Help-printing methods + // ===================== + print_usage(file = undefined) { + if (file === undefined) file = process.stdout + this._print_message(this.format_usage(), file) + } + + print_help(file = undefined) { + if (file === undefined) file = process.stdout + this._print_message(this.format_help(), file) + } + + _print_message(message, file = undefined) { + if (message) { + if (file === undefined) file = process.stderr + file.write(message) + } + } + + // =============== + // Exiting methods + // =============== + exit(status = 0, message = undefined) { + if (message) { + this._print_message(message, process.stderr) + } + process.exit(status) + } + + error(message) { + /* + * error(message: string) + * + * Prints a usage message incorporating the message to stderr and + * exits. + * + * If you override this in a subclass, it should not return -- it + * should either exit or raise an exception. + */ + + // LEGACY (v1 compatibility), debug mode + if (this.debug === true) throw new Error(message) + // end + this.print_usage(process.stderr) + let args = {prog: this.prog, message: message} + this.exit(2, sub('%(prog)s: error: %(message)s\n', args)) + } +})) + + +module.exports = { + ArgumentParser, + ArgumentError, + ArgumentTypeError, + BooleanOptionalAction, + FileType, + HelpFormatter, + ArgumentDefaultsHelpFormatter, + RawDescriptionHelpFormatter, + RawTextHelpFormatter, + MetavarTypeHelpFormatter, + Namespace, + Action, + ONE_OR_MORE, + OPTIONAL, + PARSER, + REMAINDER, + SUPPRESS, + ZERO_OR_MORE +} + +// LEGACY (v1 compatibility), Const alias +Object.defineProperty(module.exports, 'Const', { + get() { + let result = {} + Object.entries({ ONE_OR_MORE, OPTIONAL, PARSER, REMAINDER, SUPPRESS, ZERO_OR_MORE }).forEach(([ n, v ]) => { + Object.defineProperty(result, n, { + get() { + deprecate(n, sub('use argparse.%s instead of argparse.Const.%s', n, n)) + return v + } + }) + }) + Object.entries({ _UNRECOGNIZED_ARGS_ATTR }).forEach(([ n, v ]) => { + Object.defineProperty(result, n, { + get() { + deprecate(n, sub('argparse.Const.%s is an internal symbol and will no longer be available', n)) + return v + } + }) + }) + return result + }, + enumerable: false +}) +// end diff --git a/node_modules/argparse/package.json b/node_modules/argparse/package.json new file mode 100644 index 0000000..647d2af --- /dev/null +++ b/node_modules/argparse/package.json @@ -0,0 +1,31 @@ +{ + "name": "argparse", + "description": "CLI arguments parser. Native port of python's argparse.", + "version": "2.0.1", + "keywords": [ + "cli", + "parser", + "argparse", + "option", + "args" + ], + "main": "argparse.js", + "files": [ + "argparse.js", + "lib/" + ], + "license": "Python-2.0", + "repository": "nodeca/argparse", + "scripts": { + "lint": "eslint .", + "test": "npm run lint && nyc mocha", + "coverage": "npm run test && nyc report --reporter html" + }, + "devDependencies": { + "@babel/eslint-parser": "^7.11.0", + "@babel/plugin-syntax-class-properties": "^7.10.4", + "eslint": "^7.5.0", + "mocha": "^8.0.1", + "nyc": "^15.1.0" + } +} diff --git a/node_modules/balanced-match/.github/FUNDING.yml b/node_modules/balanced-match/.github/FUNDING.yml new file mode 100644 index 0000000..cea8b16 --- /dev/null +++ b/node_modules/balanced-match/.github/FUNDING.yml @@ -0,0 +1,2 @@ +tidelift: "npm/balanced-match" +patreon: juliangruber diff --git a/node_modules/balanced-match/LICENSE.md b/node_modules/balanced-match/LICENSE.md new file mode 100644 index 0000000..2cdc8e4 --- /dev/null +++ b/node_modules/balanced-match/LICENSE.md @@ -0,0 +1,21 @@ +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/README.md b/node_modules/balanced-match/README.md new file mode 100644 index 0000000..d2a48b6 --- /dev/null +++ b/node_modules/balanced-match/README.md @@ -0,0 +1,97 @@ +# balanced-match + +Match balanced string pairs, like `{` and `}` or `` and ``. Supports regular expressions as well! + +[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match) +[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match) + +[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match) + +## Example + +Get the first matching pair of braces: + +```js +var balanced = require('balanced-match'); + +console.log(balanced('{', '}', 'pre{in{nested}}post')); +console.log(balanced('{', '}', 'pre{first}between{second}post')); +console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post')); +``` + +The matches are: + +```bash +$ node example.js +{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' } +{ start: 3, + end: 9, + pre: 'pre', + body: 'first', + post: 'between{second}post' } +{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' } +``` + +## API + +### var m = balanced(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +object with those keys: + +* **start** the index of the first match of `a` +* **end** the index of the matching `b` +* **pre** the preamble, `a` and `b` not included +* **body** the match, `a` and `b` not included +* **post** the postscript, `a` and `b` not included + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`. + +### var r = balanced.range(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +array with indexes: `[ , ]`. + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install balanced-match +``` + +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/index.js b/node_modules/balanced-match/index.js new file mode 100644 index 0000000..c67a646 --- /dev/null +++ b/node_modules/balanced-match/index.js @@ -0,0 +1,62 @@ +'use strict'; +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} diff --git a/node_modules/balanced-match/package.json b/node_modules/balanced-match/package.json new file mode 100644 index 0000000..ce6073e --- /dev/null +++ b/node_modules/balanced-match/package.json @@ -0,0 +1,48 @@ +{ + "name": "balanced-match", + "description": "Match balanced character pairs, like \"{\" and \"}\"", + "version": "1.0.2", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/balanced-match.git" + }, + "homepage": "https://github.com/juliangruber/balanced-match", + "main": "index.js", + "scripts": { + "test": "tape test/test.js", + "bench": "matcha test/bench.js" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [ + "match", + "regexp", + "test", + "balanced", + "parse" + ], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/brace-expansion/.github/FUNDING.yml b/node_modules/brace-expansion/.github/FUNDING.yml new file mode 100644 index 0000000..79d1eaf --- /dev/null +++ b/node_modules/brace-expansion/.github/FUNDING.yml @@ -0,0 +1,2 @@ +tidelift: "npm/brace-expansion" +patreon: juliangruber diff --git a/node_modules/brace-expansion/LICENSE b/node_modules/brace-expansion/LICENSE new file mode 100644 index 0000000..de32266 --- /dev/null +++ b/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/README.md b/node_modules/brace-expansion/README.md new file mode 100644 index 0000000..e55c583 --- /dev/null +++ b/node_modules/brace-expansion/README.md @@ -0,0 +1,135 @@ +# brace-expansion + +[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), +as known from sh/bash, in JavaScript. + +[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion) +[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion) +[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/) + +[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion) + +## Example + +```js +var expand = require('brace-expansion'); + +expand('file-{a,b,c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('-v{,,}') +// => ['-v', '-v', '-v'] + +expand('file{0..2}.jpg') +// => ['file0.jpg', 'file1.jpg', 'file2.jpg'] + +expand('file-{a..c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('file{2..0}.jpg') +// => ['file2.jpg', 'file1.jpg', 'file0.jpg'] + +expand('file{0..4..2}.jpg') +// => ['file0.jpg', 'file2.jpg', 'file4.jpg'] + +expand('file-{a..e..2}.jpg') +// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg'] + +expand('file{00..10..5}.jpg') +// => ['file00.jpg', 'file05.jpg', 'file10.jpg'] + +expand('{{A..C},{a..c}}') +// => ['A', 'B', 'C', 'a', 'b', 'c'] + +expand('ppp{,config,oe{,conf}}') +// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf'] +``` + +## API + +```js +var expand = require('brace-expansion'); +``` + +### var expanded = expand(str) + +Return an array of all possible and valid expansions of `str`. If none are +found, `[str]` is returned. + +Valid expansions are: + +```js +/^(.*,)+(.+)?$/ +// {a,b,...} +``` + +A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +A numeric sequence from `x` to `y` inclusive, with optional increment. +If `x` or `y` start with a leading `0`, all the numbers will be padded +to have equal length. Negative numbers and backwards iteration work too. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +An alphabetic sequence from `x` to `y` inclusive, with optional increment. +`x` and `y` must be exactly one character, and if given, `incr` must be a +number. + +For compatibility reasons, the string `${` is not eligible for brace expansion. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install brace-expansion +``` + +## Contributors + +- [Julian Gruber](https://github.com/juliangruber) +- [Isaac Z. Schlueter](https://github.com/isaacs) + +## Sponsors + +This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)! + +Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)! + +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/index.js b/node_modules/brace-expansion/index.js new file mode 100644 index 0000000..4af9dde --- /dev/null +++ b/node_modules/brace-expansion/index.js @@ -0,0 +1,203 @@ +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m) return [str]; + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + if (/\$$/.test(m.pre)) { + for (var k = 0; k < post.length; k++) { + var expansion = pre+ '{' + m.body + '}' + post[k]; + expansions.push(expansion); + } + } else { + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = []; + + for (var j = 0; j < n.length; j++) { + N.push.apply(N, expand(n[j], false)); + } + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + } + + return expansions; +} + diff --git a/node_modules/brace-expansion/package.json b/node_modules/brace-expansion/package.json new file mode 100644 index 0000000..7097d41 --- /dev/null +++ b/node_modules/brace-expansion/package.json @@ -0,0 +1,46 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "2.0.1", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0" + }, + "devDependencies": { + "@c4312/matcha": "^1.3.1", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/entities/LICENSE b/node_modules/entities/LICENSE new file mode 100644 index 0000000..c464f86 --- /dev/null +++ b/node_modules/entities/LICENSE @@ -0,0 +1,11 @@ +Copyright (c) Felix Böhm +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS, +EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/entities/package.json b/node_modules/entities/package.json new file mode 100644 index 0000000..2e857f8 --- /dev/null +++ b/node_modules/entities/package.json @@ -0,0 +1,90 @@ +{ + "name": "entities", + "version": "4.5.0", + "description": "Encode & decode XML and HTML entities with ease & speed", + "author": "Felix Boehm ", + "funding": "https://github.com/fb55/entities?sponsor=1", + "sideEffects": false, + "keywords": [ + "entity", + "decoding", + "encoding", + "html", + "xml", + "html entities" + ], + "directories": { + "lib": "lib/" + }, + "main": "lib/index.js", + "types": "lib/index.d.ts", + "module": "lib/esm/index.js", + "exports": { + ".": { + "require": "./lib/index.js", + "import": "./lib/esm/index.js" + }, + "./lib/decode.js": { + "require": "./lib/decode.js", + "import": "./lib/esm/decode.js" + }, + "./lib/escape.js": { + "require": "./lib/escape.js", + "import": "./lib/esm/escape.js" + } + }, + "files": [ + "lib/**/*" + ], + "engines": { + "node": ">=0.12" + }, + "devDependencies": { + "@types/jest": "^28.1.8", + "@types/node": "^18.15.11", + "@typescript-eslint/eslint-plugin": "^5.58.0", + "@typescript-eslint/parser": "^5.58.0", + "eslint": "^8.38.0", + "eslint-config-prettier": "^8.8.0", + "eslint-plugin-node": "^11.1.0", + "jest": "^28.1.3", + "prettier": "^2.8.7", + "ts-jest": "^28.0.8", + "typedoc": "^0.24.1", + "typescript": "^5.0.4" + }, + "scripts": { + "test": "npm run test:jest && npm run lint", + "test:jest": "jest", + "lint": "npm run lint:es && npm run lint:prettier", + "lint:es": "eslint .", + "lint:prettier": "npm run prettier -- --check", + "format": "npm run format:es && npm run format:prettier", + "format:es": "npm run lint:es -- --fix", + "format:prettier": "npm run prettier -- --write", + "prettier": "prettier '**/*.{ts,md,json,yml}'", + "build": "npm run build:cjs && npm run build:esm", + "build:cjs": "tsc --sourceRoot https://raw.githubusercontent.com/fb55/entities/$(git rev-parse HEAD)/src/", + "build:esm": "npm run build:cjs -- --module esnext --target es2019 --outDir lib/esm && echo '{\"type\":\"module\"}' > lib/esm/package.json", + "build:docs": "typedoc --hideGenerator src/index.ts", + "build:trie": "ts-node scripts/write-decode-map.ts", + "build:encode-trie": "ts-node scripts/write-encode-map.ts", + "prepare": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/fb55/entities.git" + }, + "license": "BSD-2-Clause", + "jest": { + "preset": "ts-jest", + "coverageProvider": "v8", + "moduleNameMapper": { + "^(.*)\\.js$": "$1" + } + }, + "prettier": { + "tabWidth": 4, + "proseWrap": "always" + } +} diff --git a/node_modules/entities/readme.md b/node_modules/entities/readme.md new file mode 100644 index 0000000..731d90c --- /dev/null +++ b/node_modules/entities/readme.md @@ -0,0 +1,122 @@ +# entities [![NPM version](https://img.shields.io/npm/v/entities.svg)](https://npmjs.org/package/entities) [![Downloads](https://img.shields.io/npm/dm/entities.svg)](https://npmjs.org/package/entities) [![Node.js CI](https://github.com/fb55/entities/actions/workflows/nodejs-test.yml/badge.svg)](https://github.com/fb55/entities/actions/workflows/nodejs-test.yml) + +Encode & decode HTML & XML entities with ease & speed. + +## Features + +- 😇 Tried and true: `entities` is used by many popular libraries; eg. + [`htmlparser2`](https://github.com/fb55/htmlparser2), the official + [AWS SDK](https://github.com/aws/aws-sdk-js-v3) and + [`commonmark`](https://github.com/commonmark/commonmark.js) use it to + process HTML entities. +- ⚡️ Fast: `entities` is the fastest library for decoding HTML entities (as + of April 2022); see [performance](#performance). +- 🎛 Configurable: Get an output tailored for your needs. You are fine with + UTF8? That'll save you some bytes. Prefer to only have ASCII characters? We + can do that as well! + +## How to… + +### …install `entities` + + npm install entities + +### …use `entities` + +```javascript +const entities = require("entities"); + +// Encoding +entities.escapeUTF8("& ü"); // "&#38; ü" +entities.encodeXML("& ü"); // "&#38; ü" +entities.encodeHTML("& ü"); // "&#38; ü" + +// Decoding +entities.decodeXML("asdf & ÿ ü '"); // "asdf & ÿ ü '" +entities.decodeHTML("asdf & ÿ ü '"); // "asdf & ÿ ü '" +``` + +## Performance + +This is how `entities` compares to other libraries on a very basic benchmark +(see `scripts/benchmark.ts`, for 10,000,000 iterations; **lower is better**): + +| Library | Version | `decode` perf | `encode` perf | `escape` perf | +| -------------- | ------- | ------------- | ------------- | ------------- | +| entities | `3.0.1` | 1.418s | 6.786s | 2.196s | +| html-entities | `2.3.2` | 2.530s | 6.829s | 2.415s | +| he | `1.2.0` | 5.800s | 24.237s | 3.624s | +| parse-entities | `3.0.0` | 9.660s | N/A | N/A | + +--- + +## FAQ + +> What methods should I actually use to encode my documents? + +If your target supports UTF-8, the `escapeUTF8` method is going to be your best +choice. Otherwise, use either `encodeHTML` or `encodeXML` based on whether +you're dealing with an HTML or an XML document. + +You can have a look at the options for the `encode` and `decode` methods to see +everything you can configure. + +> When should I use strict decoding? + +When strict decoding, entities not terminated with a semicolon will be ignored. +This is helpful for decoding entities in legacy environments. + +> Why should I use `entities` instead of alternative modules? + +As of April 2022, `entities` is a bit faster than other modules. Still, this is +not a very differentiated space and other modules can catch up. + +**More importantly**, you might already have `entities` in your dependency graph +(as a dependency of eg. `cheerio`, or `htmlparser2`), and including it directly +might not even increase your bundle size. The same is true for other entity +libraries, so have a look through your `node_modules` directory! + +> Does `entities` support tree shaking? + +Yes! `entities` ships as both a CommonJS and a ES module. Note that for best +results, you should not use the `encode` and `decode` functions, as they wrap +around a number of other functions, all of which will remain in the bundle. +Instead, use the functions that you need directly. + +--- + +## Acknowledgements + +This library wouldn't be possible without the work of these individuals. Thanks +to + +- [@mathiasbynens](https://github.com/mathiasbynens) for his explanations + about character encodings, and his library `he`, which was one of the + inspirations for `entities` +- [@inikulin](https://github.com/inikulin) for his work on optimized tries for + decoding HTML entities for the `parse5` project +- [@mdevils](https://github.com/mdevils) for taking on the challenge of + producing a quick entity library with his `html-entities` library. + `entities` would be quite a bit slower if there wasn't any competition. + Right now `entities` is on top, but we'll see how long that lasts! + +--- + +License: BSD-2-Clause + +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). Tidelift will +coordinate the fix and disclosure. + +## `entities` for enterprise + +Available as part of the Tidelift Subscription + +The maintainers of `entities` and thousands of other packages are working with +Tidelift to deliver commercial support and maintenance for the open source +dependencies you use to build your applications. Save time, reduce risk, and +improve code health, while paying the maintainers of the exact dependencies you +use. +[Learn more.](https://tidelift.com/subscription/pkg/npm-entities?utm_source=npm-entities&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) diff --git a/node_modules/linkify-it/LICENSE b/node_modules/linkify-it/LICENSE new file mode 100644 index 0000000..67596f5 --- /dev/null +++ b/node_modules/linkify-it/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2015 Vitaly Puzrin. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/linkify-it/README.md b/node_modules/linkify-it/README.md new file mode 100644 index 0000000..7c2d9fb --- /dev/null +++ b/node_modules/linkify-it/README.md @@ -0,0 +1,196 @@ +linkify-it +========== + +[![CI](https://github.com/markdown-it/linkify-it/actions/workflows/ci.yml/badge.svg)](https://github.com/markdown-it/linkify-it/actions/workflows/ci.yml) +[![NPM version](https://img.shields.io/npm/v/linkify-it.svg?style=flat)](https://www.npmjs.org/package/linkify-it) +[![Coverage Status](https://img.shields.io/coveralls/markdown-it/linkify-it/master.svg?style=flat)](https://coveralls.io/r/markdown-it/linkify-it?branch=master) +[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/markdown-it/linkify-it) + +> Links recognition library with FULL unicode support. +> Focused on high quality link patterns detection in plain text. + +__[Demo](http://markdown-it.github.io/linkify-it/)__ + +Why it's awesome: + +- Full unicode support, _with astral characters_! +- International domains support. +- Allows rules extension & custom normalizers. + + +Install +------- + +```bash +npm install linkify-it --save +``` + +Browserification is also supported. + + +Usage examples +-------------- + +##### Example 1 + +```js +import linkifyit from 'linkify-it'; +const linkify = linkifyit(); + +// Reload full tlds list & add unofficial `.onion` domain. +linkify + .tlds(require('tlds')) // Reload with full tlds list + .tlds('onion', true) // Add unofficial `.onion` domain + .add('git:', 'http:') // Add `git:` protocol as "alias" + .add('ftp:', null) // Disable `ftp:` protocol + .set({ fuzzyIP: true }); // Enable IPs in fuzzy links (without schema) + +console.log(linkify.test('Site github.com!')); // true + +console.log(linkify.match('Site github.com!')); // [ { + // schema: "", + // index: 5, + // lastIndex: 15, + // raw: "github.com", + // text: "github.com", + // url: "http://github.com", + // } ] +``` + +##### Example 2. Add twitter mentions handler + +```js +linkify.add('@', { + validate: function (text, pos, self) { + const tail = text.slice(pos); + + if (!self.re.twitter) { + self.re.twitter = new RegExp( + '^([a-zA-Z0-9_]){1,15}(?!_)(?=$|' + self.re.src_ZPCc + ')' + ); + } + if (self.re.twitter.test(tail)) { + // Linkifier allows punctuation chars before prefix, + // but we additionally disable `@` ("@@mention" is invalid) + if (pos >= 2 && tail[pos - 2] === '@') { + return false; + } + return tail.match(self.re.twitter)[0].length; + } + return 0; + }, + normalize: function (match) { + match.url = 'https://twitter.com/' + match.url.replace(/^@/, ''); + } +}); +``` + + +API +--- + +__[API documentation](http://markdown-it.github.io/linkify-it/doc)__ + +### new LinkifyIt(schemas, options) + +Creates new linkifier instance with optional additional schemas. +Can be called without `new` keyword for convenience. + +By default understands: + +- `http(s)://...` , `ftp://...`, `mailto:...` & `//...` links +- "fuzzy" links and emails (google.com, foo@bar.com). + +`schemas` is an object, where each key/value describes protocol/rule: + +- __key__ - link prefix (usually, protocol name with `:` at the end, `skype:` + for example). `linkify-it` makes sure that prefix is not preceded with + alphanumeric char. +- __value__ - rule to check tail after link prefix + - _String_ - just alias to existing rule + - _Object_ + - _validate_ - either a `RegExp` (start with `^`, and don't include the + link prefix itself), or a validator function which, given arguments + _text_, _pos_, and _self_, returns the length of a match in _text_ + starting at index _pos_. _pos_ is the index right after the link prefix. + _self_ can be used to access the linkify object to cache data. + - _normalize_ - optional function to normalize text & url of matched result + (for example, for twitter mentions). + +`options`: + +- __fuzzyLink__ - recognize URL-s without `http(s)://` head. Default `true`. +- __fuzzyIP__ - allow IPs in fuzzy links above. Can conflict with some texts + like version numbers. Default `false`. +- __fuzzyEmail__ - recognize emails without `mailto:` prefix. Default `true`. +- __---__ - set `true` to terminate link with `---` (if it's considered as long dash). + + +### .test(text) + +Searches linkifiable pattern and returns `true` on success or `false` on fail. + + +### .pretest(text) + +Quick check if link MAY BE can exist. Can be used to optimize more expensive +`.test()` calls. Return `false` if link can not be found, `true` - if `.test()` +call needed to know exactly. + + +### .testSchemaAt(text, name, offset) + +Similar to `.test()` but checks only specific protocol tail exactly at given +position. Returns length of found pattern (0 on fail). + + +### .match(text) + +Returns `Array` of found link matches or null if nothing found. + +Each match has: + +- __schema__ - link schema, can be empty for fuzzy links, or `//` for + protocol-neutral links. +- __index__ - offset of matched text +- __lastIndex__ - index of next char after mathch end +- __raw__ - matched text +- __text__ - normalized text +- __url__ - link, generated from matched text + + +### .matchAtStart(text) + +Checks if a match exists at the start of the string. Returns `Match` +(see docs for `match(text)`) or null if no URL is at the start. +Doesn't work with fuzzy links. + + +### .tlds(list[, keepOld]) + +Load (or merge) new tlds list. Those are needed for fuzzy links (without schema) +to avoid false positives. By default: + +- 2-letter root zones are ok. +- biz|com|edu|gov|net|org|pro|web|xxx|aero|asia|coop|info|museum|name|shop|рф are ok. +- encoded (`xn--...`) root zones are ok. + +If that's not enough, you can reload defaults with more detailed zones list. + +### .add(key, value) + +Add a new schema to the schemas object. As described in the constructor +definition, `key` is a link prefix (`skype:`, for example), and `value` +is a String to alias to another schema, or an Object with `validate` and +optionally `normalize` definitions. To disable an existing rule, use +`.add(key, null)`. + + +### .set(options) + +Override default options. Missed properties will not be changed. + + +## License + +[MIT](https://github.com/markdown-it/linkify-it/blob/master/LICENSE) diff --git a/node_modules/linkify-it/index.mjs b/node_modules/linkify-it/index.mjs new file mode 100644 index 0000000..f4c8e13 --- /dev/null +++ b/node_modules/linkify-it/index.mjs @@ -0,0 +1,642 @@ +import reFactory from './lib/re.mjs' + +// +// Helpers +// + +// Merge objects +// +function assign (obj /* from1, from2, from3, ... */) { + const sources = Array.prototype.slice.call(arguments, 1) + + sources.forEach(function (source) { + if (!source) { return } + + Object.keys(source).forEach(function (key) { + obj[key] = source[key] + }) + }) + + return obj +} + +function _class (obj) { return Object.prototype.toString.call(obj) } +function isString (obj) { return _class(obj) === '[object String]' } +function isObject (obj) { return _class(obj) === '[object Object]' } +function isRegExp (obj) { return _class(obj) === '[object RegExp]' } +function isFunction (obj) { return _class(obj) === '[object Function]' } + +function escapeRE (str) { return str.replace(/[.?*+^$[\]\\(){}|-]/g, '\\$&') } + +// + +const defaultOptions = { + fuzzyLink: true, + fuzzyEmail: true, + fuzzyIP: false +} + +function isOptionsObj (obj) { + return Object.keys(obj || {}).reduce(function (acc, k) { + /* eslint-disable-next-line no-prototype-builtins */ + return acc || defaultOptions.hasOwnProperty(k) + }, false) +} + +const defaultSchemas = { + 'http:': { + validate: function (text, pos, self) { + const tail = text.slice(pos) + + if (!self.re.http) { + // compile lazily, because "host"-containing variables can change on tlds update. + self.re.http = new RegExp( + '^\\/\\/' + self.re.src_auth + self.re.src_host_port_strict + self.re.src_path, 'i' + ) + } + if (self.re.http.test(tail)) { + return tail.match(self.re.http)[0].length + } + return 0 + } + }, + 'https:': 'http:', + 'ftp:': 'http:', + '//': { + validate: function (text, pos, self) { + const tail = text.slice(pos) + + if (!self.re.no_http) { + // compile lazily, because "host"-containing variables can change on tlds update. + self.re.no_http = new RegExp( + '^' + + self.re.src_auth + + // Don't allow single-level domains, because of false positives like '//test' + // with code comments + '(?:localhost|(?:(?:' + self.re.src_domain + ')\\.)+' + self.re.src_domain_root + ')' + + self.re.src_port + + self.re.src_host_terminator + + self.re.src_path, + + 'i' + ) + } + + if (self.re.no_http.test(tail)) { + // should not be `://` & `///`, that protects from errors in protocol name + if (pos >= 3 && text[pos - 3] === ':') { return 0 } + if (pos >= 3 && text[pos - 3] === '/') { return 0 } + return tail.match(self.re.no_http)[0].length + } + return 0 + } + }, + 'mailto:': { + validate: function (text, pos, self) { + const tail = text.slice(pos) + + if (!self.re.mailto) { + self.re.mailto = new RegExp( + '^' + self.re.src_email_name + '@' + self.re.src_host_strict, 'i' + ) + } + if (self.re.mailto.test(tail)) { + return tail.match(self.re.mailto)[0].length + } + return 0 + } + } +} + +// RE pattern for 2-character tlds (autogenerated by ./support/tlds_2char_gen.js) +/* eslint-disable-next-line max-len */ +const tlds_2ch_src_re = 'a[cdefgilmnoqrstuwxz]|b[abdefghijmnorstvwyz]|c[acdfghiklmnoruvwxyz]|d[ejkmoz]|e[cegrstu]|f[ijkmor]|g[abdefghilmnpqrstuwy]|h[kmnrtu]|i[delmnoqrst]|j[emop]|k[eghimnprwyz]|l[abcikrstuvy]|m[acdeghklmnopqrstuvwxyz]|n[acefgilopruz]|om|p[aefghklmnrstwy]|qa|r[eosuw]|s[abcdeghijklmnortuvxyz]|t[cdfghjklmnortvwz]|u[agksyz]|v[aceginu]|w[fs]|y[et]|z[amw]' + +// DON'T try to make PRs with changes. Extend TLDs with LinkifyIt.tlds() instead +const tlds_default = 'biz|com|edu|gov|net|org|pro|web|xxx|aero|asia|coop|info|museum|name|shop|рф'.split('|') + +function resetScanCache (self) { + self.__index__ = -1 + self.__text_cache__ = '' +} + +function createValidator (re) { + return function (text, pos) { + const tail = text.slice(pos) + + if (re.test(tail)) { + return tail.match(re)[0].length + } + return 0 + } +} + +function createNormalizer () { + return function (match, self) { + self.normalize(match) + } +} + +// Schemas compiler. Build regexps. +// +function compile (self) { + // Load & clone RE patterns. + const re = self.re = reFactory(self.__opts__) + + // Define dynamic patterns + const tlds = self.__tlds__.slice() + + self.onCompile() + + if (!self.__tlds_replaced__) { + tlds.push(tlds_2ch_src_re) + } + tlds.push(re.src_xn) + + re.src_tlds = tlds.join('|') + + function untpl (tpl) { return tpl.replace('%TLDS%', re.src_tlds) } + + re.email_fuzzy = RegExp(untpl(re.tpl_email_fuzzy), 'i') + re.link_fuzzy = RegExp(untpl(re.tpl_link_fuzzy), 'i') + re.link_no_ip_fuzzy = RegExp(untpl(re.tpl_link_no_ip_fuzzy), 'i') + re.host_fuzzy_test = RegExp(untpl(re.tpl_host_fuzzy_test), 'i') + + // + // Compile each schema + // + + const aliases = [] + + self.__compiled__ = {} // Reset compiled data + + function schemaError (name, val) { + throw new Error('(LinkifyIt) Invalid schema "' + name + '": ' + val) + } + + Object.keys(self.__schemas__).forEach(function (name) { + const val = self.__schemas__[name] + + // skip disabled methods + if (val === null) { return } + + const compiled = { validate: null, link: null } + + self.__compiled__[name] = compiled + + if (isObject(val)) { + if (isRegExp(val.validate)) { + compiled.validate = createValidator(val.validate) + } else if (isFunction(val.validate)) { + compiled.validate = val.validate + } else { + schemaError(name, val) + } + + if (isFunction(val.normalize)) { + compiled.normalize = val.normalize + } else if (!val.normalize) { + compiled.normalize = createNormalizer() + } else { + schemaError(name, val) + } + + return + } + + if (isString(val)) { + aliases.push(name) + return + } + + schemaError(name, val) + }) + + // + // Compile postponed aliases + // + + aliases.forEach(function (alias) { + if (!self.__compiled__[self.__schemas__[alias]]) { + // Silently fail on missed schemas to avoid errons on disable. + // schemaError(alias, self.__schemas__[alias]); + return + } + + self.__compiled__[alias].validate = + self.__compiled__[self.__schemas__[alias]].validate + self.__compiled__[alias].normalize = + self.__compiled__[self.__schemas__[alias]].normalize + }) + + // + // Fake record for guessed links + // + self.__compiled__[''] = { validate: null, normalize: createNormalizer() } + + // + // Build schema condition + // + const slist = Object.keys(self.__compiled__) + .filter(function (name) { + // Filter disabled & fake schemas + return name.length > 0 && self.__compiled__[name] + }) + .map(escapeRE) + .join('|') + // (?!_) cause 1.5x slowdown + self.re.schema_test = RegExp('(^|(?!_)(?:[><\uff5c]|' + re.src_ZPCc + '))(' + slist + ')', 'i') + self.re.schema_search = RegExp('(^|(?!_)(?:[><\uff5c]|' + re.src_ZPCc + '))(' + slist + ')', 'ig') + self.re.schema_at_start = RegExp('^' + self.re.schema_search.source, 'i') + + self.re.pretest = RegExp( + '(' + self.re.schema_test.source + ')|(' + self.re.host_fuzzy_test.source + ')|@', + 'i' + ) + + // + // Cleanup + // + + resetScanCache(self) +} + +/** + * class Match + * + * Match result. Single element of array, returned by [[LinkifyIt#match]] + **/ +function Match (self, shift) { + const start = self.__index__ + const end = self.__last_index__ + const text = self.__text_cache__.slice(start, end) + + /** + * Match#schema -> String + * + * Prefix (protocol) for matched string. + **/ + this.schema = self.__schema__.toLowerCase() + /** + * Match#index -> Number + * + * First position of matched string. + **/ + this.index = start + shift + /** + * Match#lastIndex -> Number + * + * Next position after matched string. + **/ + this.lastIndex = end + shift + /** + * Match#raw -> String + * + * Matched string. + **/ + this.raw = text + /** + * Match#text -> String + * + * Notmalized text of matched string. + **/ + this.text = text + /** + * Match#url -> String + * + * Normalized url of matched string. + **/ + this.url = text +} + +function createMatch (self, shift) { + const match = new Match(self, shift) + + self.__compiled__[match.schema].normalize(match, self) + + return match +} + +/** + * class LinkifyIt + **/ + +/** + * new LinkifyIt(schemas, options) + * - schemas (Object): Optional. Additional schemas to validate (prefix/validator) + * - options (Object): { fuzzyLink|fuzzyEmail|fuzzyIP: true|false } + * + * Creates new linkifier instance with optional additional schemas. + * Can be called without `new` keyword for convenience. + * + * By default understands: + * + * - `http(s)://...` , `ftp://...`, `mailto:...` & `//...` links + * - "fuzzy" links and emails (example.com, foo@bar.com). + * + * `schemas` is an object, where each key/value describes protocol/rule: + * + * - __key__ - link prefix (usually, protocol name with `:` at the end, `skype:` + * for example). `linkify-it` makes shure that prefix is not preceeded with + * alphanumeric char and symbols. Only whitespaces and punctuation allowed. + * - __value__ - rule to check tail after link prefix + * - _String_ - just alias to existing rule + * - _Object_ + * - _validate_ - validator function (should return matched length on success), + * or `RegExp`. + * - _normalize_ - optional function to normalize text & url of matched result + * (for example, for @twitter mentions). + * + * `options`: + * + * - __fuzzyLink__ - recognige URL-s without `http(s):` prefix. Default `true`. + * - __fuzzyIP__ - allow IPs in fuzzy links above. Can conflict with some texts + * like version numbers. Default `false`. + * - __fuzzyEmail__ - recognize emails without `mailto:` prefix. + * + **/ +function LinkifyIt (schemas, options) { + if (!(this instanceof LinkifyIt)) { + return new LinkifyIt(schemas, options) + } + + if (!options) { + if (isOptionsObj(schemas)) { + options = schemas + schemas = {} + } + } + + this.__opts__ = assign({}, defaultOptions, options) + + // Cache last tested result. Used to skip repeating steps on next `match` call. + this.__index__ = -1 + this.__last_index__ = -1 // Next scan position + this.__schema__ = '' + this.__text_cache__ = '' + + this.__schemas__ = assign({}, defaultSchemas, schemas) + this.__compiled__ = {} + + this.__tlds__ = tlds_default + this.__tlds_replaced__ = false + + this.re = {} + + compile(this) +} + +/** chainable + * LinkifyIt#add(schema, definition) + * - schema (String): rule name (fixed pattern prefix) + * - definition (String|RegExp|Object): schema definition + * + * Add new rule definition. See constructor description for details. + **/ +LinkifyIt.prototype.add = function add (schema, definition) { + this.__schemas__[schema] = definition + compile(this) + return this +} + +/** chainable + * LinkifyIt#set(options) + * - options (Object): { fuzzyLink|fuzzyEmail|fuzzyIP: true|false } + * + * Set recognition options for links without schema. + **/ +LinkifyIt.prototype.set = function set (options) { + this.__opts__ = assign(this.__opts__, options) + return this +} + +/** + * LinkifyIt#test(text) -> Boolean + * + * Searches linkifiable pattern and returns `true` on success or `false` on fail. + **/ +LinkifyIt.prototype.test = function test (text) { + // Reset scan cache + this.__text_cache__ = text + this.__index__ = -1 + + if (!text.length) { return false } + + let m, ml, me, len, shift, next, re, tld_pos, at_pos + + // try to scan for link with schema - that's the most simple rule + if (this.re.schema_test.test(text)) { + re = this.re.schema_search + re.lastIndex = 0 + while ((m = re.exec(text)) !== null) { + len = this.testSchemaAt(text, m[2], re.lastIndex) + if (len) { + this.__schema__ = m[2] + this.__index__ = m.index + m[1].length + this.__last_index__ = m.index + m[0].length + len + break + } + } + } + + if (this.__opts__.fuzzyLink && this.__compiled__['http:']) { + // guess schemaless links + tld_pos = text.search(this.re.host_fuzzy_test) + if (tld_pos >= 0) { + // if tld is located after found link - no need to check fuzzy pattern + if (this.__index__ < 0 || tld_pos < this.__index__) { + if ((ml = text.match(this.__opts__.fuzzyIP ? this.re.link_fuzzy : this.re.link_no_ip_fuzzy)) !== null) { + shift = ml.index + ml[1].length + + if (this.__index__ < 0 || shift < this.__index__) { + this.__schema__ = '' + this.__index__ = shift + this.__last_index__ = ml.index + ml[0].length + } + } + } + } + } + + if (this.__opts__.fuzzyEmail && this.__compiled__['mailto:']) { + // guess schemaless emails + at_pos = text.indexOf('@') + if (at_pos >= 0) { + // We can't skip this check, because this cases are possible: + // 192.168.1.1@gmail.com, my.in@example.com + if ((me = text.match(this.re.email_fuzzy)) !== null) { + shift = me.index + me[1].length + next = me.index + me[0].length + + if (this.__index__ < 0 || shift < this.__index__ || + (shift === this.__index__ && next > this.__last_index__)) { + this.__schema__ = 'mailto:' + this.__index__ = shift + this.__last_index__ = next + } + } + } + } + + return this.__index__ >= 0 +} + +/** + * LinkifyIt#pretest(text) -> Boolean + * + * Very quick check, that can give false positives. Returns true if link MAY BE + * can exists. Can be used for speed optimization, when you need to check that + * link NOT exists. + **/ +LinkifyIt.prototype.pretest = function pretest (text) { + return this.re.pretest.test(text) +} + +/** + * LinkifyIt#testSchemaAt(text, name, position) -> Number + * - text (String): text to scan + * - name (String): rule (schema) name + * - position (Number): text offset to check from + * + * Similar to [[LinkifyIt#test]] but checks only specific protocol tail exactly + * at given position. Returns length of found pattern (0 on fail). + **/ +LinkifyIt.prototype.testSchemaAt = function testSchemaAt (text, schema, pos) { + // If not supported schema check requested - terminate + if (!this.__compiled__[schema.toLowerCase()]) { + return 0 + } + return this.__compiled__[schema.toLowerCase()].validate(text, pos, this) +} + +/** + * LinkifyIt#match(text) -> Array|null + * + * Returns array of found link descriptions or `null` on fail. We strongly + * recommend to use [[LinkifyIt#test]] first, for best speed. + * + * ##### Result match description + * + * - __schema__ - link schema, can be empty for fuzzy links, or `//` for + * protocol-neutral links. + * - __index__ - offset of matched text + * - __lastIndex__ - index of next char after mathch end + * - __raw__ - matched text + * - __text__ - normalized text + * - __url__ - link, generated from matched text + **/ +LinkifyIt.prototype.match = function match (text) { + const result = [] + let shift = 0 + + // Try to take previous element from cache, if .test() called before + if (this.__index__ >= 0 && this.__text_cache__ === text) { + result.push(createMatch(this, shift)) + shift = this.__last_index__ + } + + // Cut head if cache was used + let tail = shift ? text.slice(shift) : text + + // Scan string until end reached + while (this.test(tail)) { + result.push(createMatch(this, shift)) + + tail = tail.slice(this.__last_index__) + shift += this.__last_index__ + } + + if (result.length) { + return result + } + + return null +} + +/** + * LinkifyIt#matchAtStart(text) -> Match|null + * + * Returns fully-formed (not fuzzy) link if it starts at the beginning + * of the string, and null otherwise. + **/ +LinkifyIt.prototype.matchAtStart = function matchAtStart (text) { + // Reset scan cache + this.__text_cache__ = text + this.__index__ = -1 + + if (!text.length) return null + + const m = this.re.schema_at_start.exec(text) + if (!m) return null + + const len = this.testSchemaAt(text, m[2], m[0].length) + if (!len) return null + + this.__schema__ = m[2] + this.__index__ = m.index + m[1].length + this.__last_index__ = m.index + m[0].length + len + + return createMatch(this, 0) +} + +/** chainable + * LinkifyIt#tlds(list [, keepOld]) -> this + * - list (Array): list of tlds + * - keepOld (Boolean): merge with current list if `true` (`false` by default) + * + * Load (or merge) new tlds list. Those are user for fuzzy links (without prefix) + * to avoid false positives. By default this algorythm used: + * + * - hostname with any 2-letter root zones are ok. + * - biz|com|edu|gov|net|org|pro|web|xxx|aero|asia|coop|info|museum|name|shop|рф + * are ok. + * - encoded (`xn--...`) root zones are ok. + * + * If list is replaced, then exact match for 2-chars root zones will be checked. + **/ +LinkifyIt.prototype.tlds = function tlds (list, keepOld) { + list = Array.isArray(list) ? list : [list] + + if (!keepOld) { + this.__tlds__ = list.slice() + this.__tlds_replaced__ = true + compile(this) + return this + } + + this.__tlds__ = this.__tlds__.concat(list) + .sort() + .filter(function (el, idx, arr) { + return el !== arr[idx - 1] + }) + .reverse() + + compile(this) + return this +} + +/** + * LinkifyIt#normalize(match) + * + * Default normalizer (if schema does not define it's own). + **/ +LinkifyIt.prototype.normalize = function normalize (match) { + // Do minimal possible changes by default. Need to collect feedback prior + // to move forward https://github.com/markdown-it/linkify-it/issues/1 + + if (!match.schema) { match.url = 'http://' + match.url } + + if (match.schema === 'mailto:' && !/^mailto:/i.test(match.url)) { + match.url = 'mailto:' + match.url + } +} + +/** + * LinkifyIt#onCompile() + * + * Override to modify basic RegExp-s. + **/ +LinkifyIt.prototype.onCompile = function onCompile () { +} + +export default LinkifyIt diff --git a/node_modules/linkify-it/package.json b/node_modules/linkify-it/package.json new file mode 100644 index 0000000..ae3b7c1 --- /dev/null +++ b/node_modules/linkify-it/package.json @@ -0,0 +1,58 @@ +{ + "name": "linkify-it", + "version": "5.0.0", + "description": "Links recognition library with FULL unicode support", + "keywords": [ + "linkify", + "linkifier", + "autolink", + "autolinker" + ], + "repository": "markdown-it/linkify-it", + "main": "build/index.cjs.js", + "module": "index.mjs", + "exports": { + ".": { + "require": "./build/index.cjs.js", + "import": "./index.mjs" + }, + "./*": { + "require": "./*", + "import": "./*" + } + }, + "files": [ + "index.mjs", + "lib/", + "build/" + ], + "license": "MIT", + "scripts": { + "lint": "eslint .", + "test": "npm run lint && npm run build && c8 --exclude build --exclude test -r text -r html -r lcov mocha", + "demo": "npm run lint && node support/build_demo.mjs", + "doc": "node support/build_doc.mjs", + "build": "rollup -c support/rollup.config.mjs", + "gh-pages": "npm run demo && npm run doc && shx cp -R doc/ demo/ && gh-pages -d demo -f", + "prepublishOnly": "npm run lint && npm run build && npm run gh-pages" + }, + "dependencies": { + "uc.micro": "^2.0.0" + }, + "devDependencies": { + "@rollup/plugin-node-resolve": "^15.2.3", + "ansi": "^0.3.0", + "benchmark": "^2.1.0", + "c8": "^8.0.1", + "eslint": "^8.54.0", + "eslint-config-standard": "^17.1.0", + "gh-pages": "^6.1.0", + "mdurl": "^2.0.0", + "mocha": "^10.2.0", + "ndoc": "^6.0.0", + "rollup": "^4.6.1", + "shelljs": "^0.8.4", + "shx": "^0.3.2", + "tlds": "^1.166.0" + } +} diff --git a/node_modules/lunr/.eslintrc.json b/node_modules/lunr/.eslintrc.json new file mode 100644 index 0000000..c0aef7f --- /dev/null +++ b/node_modules/lunr/.eslintrc.json @@ -0,0 +1,86 @@ +{ + "env": { + "browser": true, + "node": true + }, + + "globals": { + "lunr": true + }, + + "extends": "eslint:recommended", + + "plugins": [ + "spellcheck" + ], + + "rules": { + "spellcheck/spell-checker": [1, + { + "lang": "en_GB", + "skipWords": [ + "lunr", "val", "param", "idx", "utils", "namespace", "eslint", "latin", + "str", "len", "sqrt", "wildcard", "concat", "metadata", "fn", "params", + "lexeme", "lex", "pos", "typedef", "wildcards", "lexemes", "fns", "stemmer", + "attrs", "tf", "idf", "lookups", "whitelist", "whitelisted", "tokenizer", + "whitespace", "automata", "i", "obj", "anymore", "lexer", "var", "refs", + "serializable", "tis", "twas", "int", "args", "unshift", "plugins", "upsert", + "upserting", "readonly", "baz", "tokenization", "lunrjs", "com", "olivernn", + "github", "js" + ] + } + ], + + "no-constant-condition": [ + "error", + { "checkLoops": false } + ], + + "no-redeclare": "off", + + "dot-location": [ + "error", + "property" + ], + + "no-alert": "error", + "no-caller": "error", + "no-eval": "error", + "no-implied-eval": "error", + "no-extend-native": "error", + "no-implicit-globals": "error", + "no-multi-spaces": "error", + "array-bracket-spacing": "error", + "block-spacing": "error", + + "brace-style": [ + "error", + "1tbs", + { "allowSingleLine": true } + ], + + "camelcase": "error", + "comma-dangle": "error", + "comma-spacing": "error", + "comma-style": "error", + "computed-property-spacing": "error", + "func-style": "error", + + "indent": [ + "error", + 2, + { "VariableDeclarator": 2, "SwitchCase": 1 } + ], + + "key-spacing": "error", + "keyword-spacing": "error", + "linebreak-style": "error", + "new-cap": "error", + "no-trailing-spaces": "error", + "no-whitespace-before-property": "error", + "semi": ["error", "never"], + "space-before-function-paren": ["error", "always"], + "space-in-parens": "error", + "space-infix-ops": "error" + } +} diff --git a/node_modules/lunr/.npmignore b/node_modules/lunr/.npmignore new file mode 100644 index 0000000..dee8632 --- /dev/null +++ b/node_modules/lunr/.npmignore @@ -0,0 +1,3 @@ +/node_modules +docs/ +test/env/file_list.json diff --git a/node_modules/lunr/.travis.yml b/node_modules/lunr/.travis.yml new file mode 100644 index 0000000..106e641 --- /dev/null +++ b/node_modules/lunr/.travis.yml @@ -0,0 +1,14 @@ +language: node_js +node_js: + - "node" + - "6" + - "5" + - "4" +script: "make" +addons: + artifacts: + branch: master + paths: + - ./docs + target_paths: /docs + diff --git a/node_modules/lunr/CHANGELOG.md b/node_modules/lunr/CHANGELOG.md new file mode 100644 index 0000000..e2a7ca6 --- /dev/null +++ b/node_modules/lunr/CHANGELOG.md @@ -0,0 +1,270 @@ +# Changelog + +## 2.3.9 + +* Fix bug [#469](https://github.com/olivernn/lunr.js/issues/469) where a union with a complete set returned a non-complete set. Thanks [Bertrand Le Roy](https://github.com/bleroy) for reporting and fixing. + +## 2.3.8 + +* Fix bug [#422](https://github.com/olivernn/lunr.js/issues/422) where a pipline function that returned null was not skipping the token as described in the documentation. Thanks [Stephen Cleary](https://github.com/StephenCleary) and [Rob Hoelz](https://github.com/hoelzro) for reporting and investigating. + +## 2.3.7 + +* Fix bug [#417](https://github.com/olivernn/lunr.js/issues/417) where leading white space would cause token position metadata to be reported incorrectly. Thanks [Rob Hoelz](https://github.com/hoelzro) for the fix. + +## 2.3.6 + +* Fix bug [#390](https://github.com/olivernn/lunr.js/issues/390) with fuzzy matching that meant deletions at the end of a word would not match. Thanks [Luca Ongaro](https://github.com/lucaong) for reporting. + +## 2.3.5 + +* Fix bug [#375](https://github.com/olivernn/lunr.js/issues/375) with fuzzy matching that meant insertions at the end of a word would not match. Thanks [Luca Ongaro](https://github.com/lucaong) for reporting and to [Rob Hoelz](https://github.com/hoelzro) for providing a fix. +* Switch to using `Array.isArray` when checking for results from pipeline functions to support `vm.runInContext`, [#381](https://github.com/olivernn/lunr.js/pull/381) thanks [Rob Hoelz](https://github.com/hoelzro). + +## 2.3.4 + +* Ensure that [inverted index is prototype-less](https://github.com/olivernn/lunr.js/pull/378) after serialization, thanks [Rob Hoelz](https://github.com/hoelzro). + +## 2.3.3 + +* Fig bugs [#270](https://github.com/olivernn/lunr.js/issues/270) and [#368](https://github.com/olivernn/lunr.js/issues/368), some wildcard searches over long tokens could be extremely slow, potentially pinning the current thread indefinitely. Thanks [Kyle Spearrin](https://github.com/kspearrin) and [Mohamed Eltuhamy](https://github.com/meltuhamy) for reporting. + +## 2.3.2 + +* Fix bug [#369](https://github.com/olivernn/lunr.js/issues/369) in parsing queries that include either a boost or edit distance modifier followed by a presence modifier on a subsequent term. Thanks [mtdjr](https://github.com/mtdjr) for reporting. + +## 2.3.1 + +* Add workaround for inconsistent browser behaviour [#279](https://github.com/olivernn/lunr.js/issues/279), thanks [Luca Ongaro](https://github.com/lucaong). +* Fix bug in intersect/union of `lunr.Set` [#360](https://github.com/olivernn/lunr.js/issues/360), thanks [Brandon Bethke](https://github.com/brandon-bethke-neudesic) for reporting. + +## 2.3.0 + +* Add support for build time field and document boosts. +* Add support for indexing nested document fields using field extractors. +* Prevent usage of problematic characters in field names, thanks [Stephane Mankowski](https://github.com/miraks31). +* Fix bug when using an array of tokens in a single query term, thanks [Michael Manukyan](https://github.com/mike1808). + +## 2.2.1 + +* Fix bug [#344](https://github.com/olivernn/lunr.js/issues/344) in logic for required terms in multiple fields, thanks [Stephane Mankowski](https://github.com/miraks31). +* Upgrade mocha and fix some test snafus. + +## 2.2.0 + +* Add support for queries with term presence, e.g. required terms and prohibited terms. +* Add support for using the output of `lunr.tokenizer` directly with `lunr.Query#term`. +* Add field name metadata to tokens in build and search pipelines. +* Fix documentation for `lunr.Index` constructor, thanks [Michael Manukyan](https://github.com/mike1808). + +## 2.1.6 + +* Improve pipeline performance for large fields [#329](https://github.com/olivernn/lunr.js/pull/329), thanks [andymcm](https://github.com/andymcm). + +## 2.1.5 + +* Fix bug [#320](https://github.com/olivernn/lunr.js/issues/320) which caused result metadata to be nested under search term instead of field name. Thanks [Jonny Gerig Meyer](https://github.com/jgerigmeyer) for reporting and fixing. + +## 2.1.4 + +* Cache inverse document calculation during build to improve build performance. +* Introduce new method for combining term metadata at search time. +* Improve performance of searches with duplicate search terms. +* Tweaks to build process. + +## 2.1.3 + +* Remove private tag from `lunr.Builder#build`, it should be public, thanks [Sean Tan](https://github.com/seantanly). + +## 2.1.2 + +* Fix bug [#282](https://github.com/olivernn/lunr.js/issues/282) which caused metadata stored in the index to be mutated during search, thanks [Andrew Aldridge](https://github.com/i80and). + +## 2.1.1 + +* Fix bug [#280](https://github.com/olivernn/lunr.js/issues/280) in builder where an object with prototype was being used as a Map, thanks [Pete Bacon Darwin](https://github.com/petebacondarwin). + +## 2.1.0 + +* Improve handling of term boosts across multiple fields [#263](https://github.com/olivernn/lunr.js/issues/263) +* Enable escaping of special characters when performing a search [#271](https://github.com/olivernn/lunr.js/issues/271) +* Add ability to programatically include leading and trailing wildcards when performing a query. + +## 2.0.4 + +* Fix bug in IDF calculation that meant the weight for common words was not correctly calculated. + +## 2.0.3 + +* Fix bug [#256](https://github.com/olivernn/lunr.js/issues/256) where duplicate query terms could cause a 'duplicate index' error when building the query vector. Thanks [Bjorn Svensson](https://github.com/bsvensson), [Jason Feng](https://github.com/IYCI), and [et1421](https://github.com/et1421) for reporting and confirming the issue. + +## 2.0.2 + +* Fix bug [#255](https://github.com/olivernn/lunr.js/issues/255) where search queries used a different separator than the tokeniser causing some terms to not be searchable. Thanks [Wes Cossick](https://github.com/WesCossick) for reporting. +* Reduce precision of term scores stored in document vectors to reduce the size of serialised indexes by ~15%, thanks [Qvatra](https://github.com/Qvatra) for the idea. + +## 2.0.1 + +* Fix regression [#254](https://github.com/olivernn/lunr.js/issues/254) where documents containing terms that match properties from Object.prototype cause errors during indexing. Thanks [VonFry](https://github.com/VonFry) for reporting. + +## 2.0.0 + +* Indexes are now immutable, this allows for more space efficient indexes, more advanced searching and better performance. +* Text processing can now attach metadata to tokens the enter the index, this opens up the possibility of highlighting search terms in results. +* More advanced searching including search time field boosts, search by field, fuzzy matching and leading and trailing wildcards. + +## 1.0.0 + +* Deprecate incorrectly spelled lunr.tokenizer.separator. +* No other changes, but bumping to 1.0.0 because it's overdue, and the interfaces are pretty stable now. It also paves the way for 2.0.0... + +## 0.7.2 + +* Fix bug when loading a serialised tokeniser [#226](https://github.com/olivernn/lunr.js/issues/226), thanks [Alex Turpin](https://github.com/alexturpin) for reporting the issue. +* Learn how to spell separator [#223](https://github.com/olivernn/lunr.js/pull/223), thanks [peterennis](https://github.com/peterennis) for helping me learn to spell. + +## 0.7.1 + +* Correctly set the license using the @license doc tag [#217](https://github.com/olivernn/lunr.js/issues/217), thanks [Carlos Araya](https://github.com/caraya). + +## 0.7.0 + +* Make tokenizer a property of the index, allowing for different indexes to use different tokenizers [#205](https://github.com/olivernn/lunr.js/pull/205) and [#21](https://github.com/olivernn/lunr.js/issues/21). +* Fix bug that prevented very large documents from being indexed [#203](https://github.com/olivernn/lunr.js/pull/203), thanks [Daniel Grießhaber](https://github.com/dangrie158). +* Performance improvements when adding documents to the index [#208](https://github.com/olivernn/lunr.js/pull/208), thanks [Dougal Matthews](https://github.com/d0ugal). + +## 0.6.0 + +* Ensure document ref property type is preserved when returning results [#117](https://github.com/olivernn/lunr.js/issues/117), thanks [Kyle Kirby](https://github.com/kkirby). +* Introduce `lunr.generateStopWordFilter` for generating a stop word filter from a provided list of stop words. +* Replace array-like string access with ES3 compatible `String.prototype.charAt` [#186](https://github.com/olivernn/lunr.js/pull/186), thanks [jkellerer](https://github.com/jkellerer). +* Move empty string filtering from `lunr.trimmer` to `lunr.Pipeline.prototype.run` so that empty tokens do not enter the index, regardless of the trimmer being used [#178](https://github.com/olivernn/lunr.js/issues/178), [#177](https://github.com/olivernn/lunr.js/issues/177) and [#174](https://github.com/olivernn/lunr.js/issues/174) +* Allow tokenization of arrays with null and non string elements [#172](https://github.com/olivernn/lunr.js/issues/172). +* Parameterize the seperator used by `lunr.tokenizer`, fixes [#102](https://github.com/olivernn/lunr.js/issues/102). + +## 0.5.12 + +* Implement `lunr.stopWordFilter` with an object instead of using `lunr.SortedSet` [#170](https://github.com/olivernn/lunr.js/pull/170), resulting in a performance boost for the text processing pipeline, thanks to [Brian Vaughn](https://github.com/bvaughn). +* Ensure that `lunr.trimmer` does not introduce empty tokens into the index, [#166](https://github.com/olivernn/lunr.js/pull/166), thanks to [janeisklar](https://github.com/janeisklar) + +## 0.5.11 + +* Fix [bug](https://github.com/olivernn/lunr.js/issues/162) when using the unminified build of lunr in some project builds, thanks [Alessio Michelini](https://github.com/darkmavis1980) + +## 0.5.10 + +* Fix bug in IDF calculation, thanks to [weixsong](https://github.com/weixsong) for discovering the issue. +* Documentation fixes [#111](https://github.com/olivernn/lunr.js/pull/111) thanks [Chris Van](https://github.com/cvan). +* Remove version from bower.json as it is not needed [#160](https://github.com/olivernn/lunr.js/pull/160), thanks [Kevin Kirsche](https://github.com/kkirsche) +* Fix link to augment.js on the home page [#159](https://github.com/olivernn/lunr.js/issues/159), thanks [Gábor Nádai](https://github.com/mefiblogger) + +## 0.5.9 + +* Remove recursion from SortedSet#indexOf and SortedSet#locationFor to gain small performance gains in Index#search and Index#add +* Fix incorrect handling of non existant functions when adding/removing from a Pipeline [#146](https://github.com/olivernn/lunr.js/issues/146) thanks to [weixsong](https://github.com/weixsong) + +## 0.5.8 + +* Fix typo when referencing Martin Porter's home page http://tartarus.org/~martin/ [#132](https://github.com/olivernn/lunr.js/pull/132) thanks [James Aylett](https://github.com/jaylett) +* Performance improvement for tokenizer [#139](https://github.com/olivernn/lunr.js/pull/139) thanks [Arun Srinivasan](https://github.com/satchmorun) +* Fix vector magnitude caching bug :flushed: [#142](https://github.com/olivernn/lunr.js/pull/142) thanks [Richard Poole](https://github.com/richardpoole) +* Fix vector insertion bug that prevented lesser ordered nodes to be inserted into a vector [#143](https://github.com/olivernn/lunr.js/pull/143) thanks [Richard Poole](https://github.com/richardpoole) +* Fix inefficient use of arguments in SortedSet add method, thanks to [Max Nordlund](https://github.com/maxnordlund). +* Fix deprecated use of path.exists in test server [#141](https://github.com/olivernn/lunr.js/pull/141) thanks [wei song](https://github.com/weixsong) + +## 0.5.7 + +* Performance improvement for stemmer [#124](https://github.com/olivernn/lunr.js/pull/124) thanks [Tony Jacobs](https://github.com/tony-jacobs) + +## 0.5.6 + +* Performance improvement when add documents to the index [#114](https://github.com/olivernn/lunr.js/pull/114) thanks [Alex Holmes](https://github.com/alex2) + +## 0.5.5 + +* Fix bug in tokenizer introduced in 0.5.4 [#101](https://github.com/olivernn/lunr.js/pull/101) thanks [Nolan Lawson](https://github.com/nolanlawson) + +## 0.5.4 + +* Tokenizer also splits on hyphens [#98](https://github.com/olivernn/lunr.js/pull/98/files) thanks [Nolan Lawson](https://github.com/nolanlawson) + +## 0.5.3 + +* Correctly stem words ending with the letter 'y' [#84](https://github.com/olivernn/lunr.js/pull/84) thanks [Mihai Valentin](https://github.com/MihaiValentin) +* Improve build tools and dev dependency installation [#78](https://github.com/olivernn/lunr.js/pull/78) thanks [Ben Pickles](https://github.com/benpickles) + +## 0.5.2 + +* Use npm they said, it'll be easy they said. + +## 0.5.1 + +* Because [npm issues](https://github.com/olivernn/lunr.js/issues/77) :( + +## 0.5.0 + +* Add plugin support to enable i18n and other extensions to lunr. +* Add AMD support [#72](https://github.com/olivernn/lunr.js/issues/72) thanks [lnwdr](https://github.com/lnwdr). +* lunr.Vector now implemented using linked lists for better performance especially in indexes with large numbers of unique tokens. +* Build system clean up. + +## 0.4.5 + +* Fix performance regression introduced in 0.4.4 by fixing #64. + +## 0.4.4 + +* Fix bug [#64](https://github.com/olivernn/lunr.js/issues/64) idf cache should handle tokens with the same name as object properties, thanks [gitgrimbo](https://github.com/gitgrimbo). +* Intersperse source files with a semicolon as part of the build process, fixes [#61](https://github.com/olivernn/lunr.js/issues/61), thanks [shyndman](https://github.com/shyndman). + +## 0.4.3 + +* Fix bug [#49](https://github.com/olivernn/lunr.js/issues/49) tokenizer should handle null and undefined as arguments, thanks [jona](https://github.com/jona). + +## 0.4.2 + +* Fix bug [#47](https://github.com/olivernn/lunr.js/issues/47) tokenizer converts its input to a string before trying to split it into tokens, thanks [mikhailkozlov](https://github.com/mikhailkozlov). + +## 0.4.1 + +* Fix bug [#41](https://github.com/olivernn/lunr.js/issues/41) that caused issues when indexing mixed case tags, thanks [Aptary](https://github.com/Aptary) + +## 0.4.0 + +* Add index mutation events ('add', 'update' and 'remove'). +* Performance improvements to searching. +* Penalise non-exact matches so exact matches are better ranked than expanded matches. + +## 0.3.3 + +* Fix bug [#32](https://github.com/olivernn/lunr.js/pull/32) which prevented lunr being used where a `console` object is not present, thanks [Tony Marklove](https://github.com/jjbananas) and [wyuenho](https://github.com/wyuenho) + +## 0.3.2 + +* Fix bug [#27](https://github.com/olivernn/lunr.js/pull/27) when trying to calculate tf with empty fields, thanks [Gambhiro](https://github.com/gambhiro) + +## 0.3.1 + +* Fix bug [#24](https://github.com/olivernn/lunr.js/pull/24) that caused an error when trying to remove a non-existant document from the index, thanks [Jesús Leganés Combarro](https://github.com/piranna) + +## 0.3.0 + +* Implement [JSON serialisation](https://github.com/olivernn/lunr.js/pull/14), allows indexes to be loaded and dumped, thanks [ssured](https://github.com/ssured). +* Performance improvements to searching and indexing. +* Fix bug [#15](https://github.com/olivernn/lunr.js/pull/15) with tokeniser that added stray empty white space to the index, thanks [ssured](https://github.com/ssured). + +## 0.2.3 + +* Fix issue with searching for a term not in the index [#12](https://github.com/olivernn/lunr.js/issues/12), thanks [mcnerthney](https://github.com/mcnerthney) and [makoto](https://github.com/makoto) + +## 0.2.2 + +* Boost exact term matches so they are better ranked than expanded term matches, fixes [#10](https://github.com/olivernn/lunr.js/issues/10), thanks [ssured](https://github.com/ssured) + +## 0.2.1 + +* Changes to the build process. +* Add component.json and package.json +* Add phantomjs test runner +* Remove redundant attributes +* Many [spelling corrections](https://github.com/olivernn/lunr.js/pull/8), thanks [Pascal Borreli](https://github.com/pborreli) diff --git a/node_modules/lunr/CNAME b/node_modules/lunr/CNAME new file mode 100644 index 0000000..d162cd8 --- /dev/null +++ b/node_modules/lunr/CNAME @@ -0,0 +1 @@ +lunrjs.com diff --git a/node_modules/lunr/CONTRIBUTING.md b/node_modules/lunr/CONTRIBUTING.md new file mode 100644 index 0000000..2f4441f --- /dev/null +++ b/node_modules/lunr/CONTRIBUTING.md @@ -0,0 +1,20 @@ +Contributions are very welcome. To make the process as easy as possible please follow these steps: + +* Open an issue detailing the bug you've found, or the feature you wish to add. Simplified working examples using something like [jsFiddle](http://jsfiddle.net) make it easier to diagnose your problem. +* Add tests for your code (so I don't accidentally break it in the future). +* Don't change version numbers or make new builds as part of your changes. +* Don't change the built versions of the library; only make changes to code in the `lib` directory. + +# Developer Dependencies + +A JavaScript runtime is required for building the library. + +Run the tests (using PhantomJS): + + make test + +The tests can also be run in the browser by starting the test server: + + make server + +This will start a server on port 3000, the tests are then available at `/test`. diff --git a/node_modules/lunr/LICENSE b/node_modules/lunr/LICENSE new file mode 100644 index 0000000..e6e4e21 --- /dev/null +++ b/node_modules/lunr/LICENSE @@ -0,0 +1,19 @@ +Copyright (C) 2013 by Oliver Nightingale + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/lunr/Makefile b/node_modules/lunr/Makefile new file mode 100644 index 0000000..37c7ce7 --- /dev/null +++ b/node_modules/lunr/Makefile @@ -0,0 +1,90 @@ + +SRC = lib/lunr.js \ + lib/utils.js \ + lib/field_ref.js \ + lib/set.js \ + lib/idf.js \ + lib/token.js \ + lib/tokenizer.js \ + lib/pipeline.js \ + lib/vector.js \ + lib/stemmer.js \ + lib/stop_word_filter.js \ + lib/trimmer.js \ + lib/token_set.js \ + lib/token_set_builder.js \ + lib/index.js \ + lib/builder.js \ + lib/match_data.js \ + lib/query.js \ + lib/query_parse_error.js \ + lib/query_lexer.js \ + lib/query_parser.js \ + +YEAR = $(shell date +%Y) +VERSION = $(shell cat VERSION) + +NODE ?= $(shell which node) +NPM ?= $(shell which npm) +UGLIFYJS ?= ./node_modules/.bin/uglifyjs +MOCHA ?= ./node_modules/.bin/mocha +MUSTACHE ?= ./node_modules/.bin/mustache +ESLINT ?= ./node_modules/.bin/eslint +JSDOC ?= ./node_modules/.bin/jsdoc +NODE_STATIC ?= ./node_modules/.bin/static + +all: test lint docs +release: lunr.js lunr.min.js bower.json package.json component.json docs + +lunr.js: $(SRC) + cat build/wrapper_start $^ build/wrapper_end | \ + sed "s/@YEAR/${YEAR}/" | \ + sed "s/@VERSION/${VERSION}/" > $@ + +lunr.min.js: lunr.js + ${UGLIFYJS} --compress --mangle --comments < $< > $@ + +%.json: build/%.json.template + cat $< | sed "s/@VERSION/${VERSION}/" > $@ + +size: lunr.min.js + @gzip -c lunr.min.js | wc -c + +server: test/index.html + ${NODE_STATIC} -a 0.0.0.0 -H '{"Cache-Control": "no-cache, must-revalidate"}' + +lint: $(SRC) + ${ESLINT} $^ + +perf/*_perf.js: + ${NODE} -r ./perf/perf_helper.js $@ + +benchmark: perf/*_perf.js + +test: node_modules lunr.js + ${MOCHA} test/*.js -u tdd -r test/test_helper.js -R dot -C + +test/inspect: node_modules lunr.js + ${MOCHA} test/*.js -u tdd -r test/test_helper.js -R dot -C --inspect-brk=0.0.0.0:9292 + +test/env/file_list.json: $(wildcard test/*test.js) + ${NODE} -p 'JSON.stringify({test_files: process.argv.slice(1)})' $^ > $@ + +test/index.html: test/env/file_list.json test/env/index.mustache + ${MUSTACHE} $^ > $@ + +docs: $(SRC) + ${JSDOC} -R README.md -d docs -c build/jsdoc.conf.json $^ + +clean: + rm -f lunr{.min,}.js + rm -rf docs + rm *.json + +reset: + git checkout lunr.* *.json + +node_modules: package.json + ${NPM} -s install + +.PHONY: test clean docs reset perf/*_perf.js test/inspect diff --git a/node_modules/lunr/README.md b/node_modules/lunr/README.md new file mode 100644 index 0000000..ea7ace6 --- /dev/null +++ b/node_modules/lunr/README.md @@ -0,0 +1,78 @@ +# Lunr.js + +[![Join the chat at https://gitter.im/olivernn/lunr.js](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/olivernn/lunr.js?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) + +[![Build Status](https://travis-ci.org/olivernn/lunr.js.svg?branch=master)](https://travis-ci.org/olivernn/lunr.js) + +A bit like Solr, but much smaller and not as bright. + +## Example + +A very simple search index can be created using the following: + +```javascript +var idx = lunr(function () { + this.field('title') + this.field('body') + + this.add({ + "title": "Twelfth-Night", + "body": "If music be the food of love, play on: Give me excess of it…", + "author": "William Shakespeare", + "id": "1" + }) +}) +``` + +Then searching is as simple as: + +```javascript +idx.search("love") +``` + +This returns a list of matching documents with a score of how closely they match the search query as well as any associated metadata about the match: + +```javascript +[ + { + "ref": "1", + "score": 0.3535533905932737, + "matchData": { + "metadata": { + "love": { + "body": {} + } + } + } + } +] +``` + +[API documentation](https://lunrjs.com/docs/index.html) is available, as well as a [full working example](https://olivernn.github.io/moonwalkers/). + +## Description + +Lunr.js is a small, full-text search library for use in the browser. It indexes JSON documents and provides a simple search interface for retrieving documents that best match text queries. + +## Why + +For web applications with all their data already sitting in the client, it makes sense to be able to search that data on the client too. It saves adding extra, compacted services on the server. A local search index will be quicker, there is no network overhead, and will remain available and usable even without a network connection. + +## Installation + +Simply include the lunr.js source file in the page that you want to use it. Lunr.js is supported in all modern browsers. + +Alternatively an npm package is also available `npm install lunr`. + +Browsers that do not support ES5 will require a JavaScript shim for Lunr to work. You can either use [Augment.js](https://github.com/olivernn/augment.js), [ES5-Shim](https://github.com/kriskowal/es5-shim) or any library that patches old browsers to provide an ES5 compatible JavaScript environment. + +## Features + +* Full text search support for 14 languages +* Boost terms at query time or boost entire documents at index time +* Scope searches to specific fields +* Fuzzy term matching with wildcards or edit distance + +## Contributing + +See the [`CONTRIBUTING.md` file](CONTRIBUTING.md). diff --git a/node_modules/lunr/VERSION b/node_modules/lunr/VERSION new file mode 100644 index 0000000..d63fa57 --- /dev/null +++ b/node_modules/lunr/VERSION @@ -0,0 +1 @@ +2.3.9 \ No newline at end of file diff --git a/node_modules/lunr/bower.json b/node_modules/lunr/bower.json new file mode 100644 index 0000000..16d6d24 --- /dev/null +++ b/node_modules/lunr/bower.json @@ -0,0 +1,11 @@ +{ + "name": "lunr.js", + "version": "2.3.9", + "main": "lunr.js", + "ignore": [ + "tests/", + "perf/", + "build/", + "docs/" + ] +} diff --git a/node_modules/lunr/component.json b/node_modules/lunr/component.json new file mode 100644 index 0000000..418b729 --- /dev/null +++ b/node_modules/lunr/component.json @@ -0,0 +1,9 @@ +{ + "name": "lunr", + "repo": "olivernn/lunr.js", + "version": "2.3.9", + "description": "Simple full-text search in your browser.", + "license": "MIT", + "main": "lunr.js", + "scripts": ["lunr.js"] +} diff --git a/node_modules/lunr/index.html b/node_modules/lunr/index.html new file mode 100644 index 0000000..0c55ecf --- /dev/null +++ b/node_modules/lunr/index.html @@ -0,0 +1,305 @@ + + + + lunr.js - A bit like Solr, but much smaller and not as bright + + + + + +
+
+

lunr.js

+

Simple full-text search in your browser

+
+ +
+ +
+
+
+

Get Started

+
+ +

Open your browser's developer tools on this page to follow along.

+ +

Set up an index for your notes:

+
+  var index = lunr(function () {
+    this.field('title', {boost: 10})
+    this.field('body')
+    this.ref('id')
+  })
+ +

Add documents to your index

+
+  index.add({
+    id: 1,
+    title: 'Foo',
+    body: 'Foo foo foo!'
+  })
+
+  index.add({
+    id: 2,
+    title: 'Bar',
+    body: 'Bar bar bar!'
+  })
+ +

Search your documents

+
+  index.search('foo')
+
+ +
+
+

About

+
+ +

lunr.js is a simple full text search engine for your client side applications. It is designed to be small, yet full featured, enabling you to provide a great search experience without the need for external, server side, search services.

+ +

lunr.js has no external dependencies, although it does require a modern browser with ES5 support. In older browsers you can use an ES5 shim, such as augment.js, to provide any missing JavaScript functionality.

+
+ + +
+ +
+
+
+

Pipeline

+
+ +

Every document and search query that enters lunr is passed through a text processing pipeline. The pipeline is simply a stack of functions that perform some processing on the text. Pipeline functions act on the text one token at a time, and what they return is passed to the next function in the pipeline.

+ +

By default lunr adds a stop word filter and stemmer to the pipeline. You can also add your own processors or remove the default ones depending on your requirements. The stemmer currently used is an English language stemmer, which could be replaced with a non-English language stemmer if required, or a Metaphoning processor could be added.

+ + +
+  var index = lunr(function () {
+    this.pipeline.add(function (token, tokenIndex, tokens) {
+      // text processing in here
+    })
+
+    this.pipeline.after(lunr.stopWordFilter, function (token, tokenIndex, tokens) {
+      // text processing in here
+    })
+  })
+        
+ +

Functions in the pipeline are called with three arguments: the current token being processed; the index of that token in the array of tokens, and the whole list of tokens part of the document being processed. This enables simple unigram processing of tokens as well as more sophisticated n-gram processing.

+ +

The function should return the processed version of the text, which will in turn be passed to the next function in the pipeline. Returning undefined will prevent any further processing of the token, and that token will not make it to the index.

+
+
+ +
+
+
+

Tokenization

+
+ +

Tokenization is how lunr converts documents and searches into individual tokens, ready to be run through the text processing pipeline and entered or looked up in the index.

+ +

The default tokenizer included with lunr is designed to handle general english text well, although application, or language specific tokenizers can be used instead.

+
+ +
+
+

Stemming

+
+ +

Stemming increases the recall of the search index by reducing related words down to their stem, so that non-exact search terms still match relevant documents. For example 'search', 'searching' and 'searched' all get reduced to the stem 'search'.

+ +

lunr automatically includes a stemmer based on Martin Porter's algorithms.

+
+ +
+
+

Stop words

+
+ +

Stop words are words that are very common and are not useful in differentiating between documents. These are automatically removed by lunr. This helps to reduce the size of the index and improve search speed and accuracy.

+ +

The default stop word filter contains a large list of very common words in English. For best results a corpus specific stop word filter can also be added to the pipeline. The search algorithm already penalises more common words, but preventing them from entering the index at all can be very beneficial for both space and speed performance.

+
+ +
+ +
+ + + + + + diff --git a/node_modules/lunr/lunr.js b/node_modules/lunr/lunr.js new file mode 100644 index 0000000..6aa370f --- /dev/null +++ b/node_modules/lunr/lunr.js @@ -0,0 +1,3475 @@ +/** + * lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 2.3.9 + * Copyright (C) 2020 Oliver Nightingale + * @license MIT + */ + +;(function(){ + +/** + * A convenience function for configuring and constructing + * a new lunr Index. + * + * A lunr.Builder instance is created and the pipeline setup + * with a trimmer, stop word filter and stemmer. + * + * This builder object is yielded to the configuration function + * that is passed as a parameter, allowing the list of fields + * and other builder parameters to be customised. + * + * All documents _must_ be added within the passed config function. + * + * @example + * var idx = lunr(function () { + * this.field('title') + * this.field('body') + * this.ref('id') + * + * documents.forEach(function (doc) { + * this.add(doc) + * }, this) + * }) + * + * @see {@link lunr.Builder} + * @see {@link lunr.Pipeline} + * @see {@link lunr.trimmer} + * @see {@link lunr.stopWordFilter} + * @see {@link lunr.stemmer} + * @namespace {function} lunr + */ +var lunr = function (config) { + var builder = new lunr.Builder + + builder.pipeline.add( + lunr.trimmer, + lunr.stopWordFilter, + lunr.stemmer + ) + + builder.searchPipeline.add( + lunr.stemmer + ) + + config.call(builder, builder) + return builder.build() +} + +lunr.version = "2.3.9" +/*! + * lunr.utils + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * A namespace containing utils for the rest of the lunr library + * @namespace lunr.utils + */ +lunr.utils = {} + +/** + * Print a warning message to the console. + * + * @param {String} message The message to be printed. + * @memberOf lunr.utils + * @function + */ +lunr.utils.warn = (function (global) { + /* eslint-disable no-console */ + return function (message) { + if (global.console && console.warn) { + console.warn(message) + } + } + /* eslint-enable no-console */ +})(this) + +/** + * Convert an object to a string. + * + * In the case of `null` and `undefined` the function returns + * the empty string, in all other cases the result of calling + * `toString` on the passed object is returned. + * + * @param {Any} obj The object to convert to a string. + * @return {String} string representation of the passed object. + * @memberOf lunr.utils + */ +lunr.utils.asString = function (obj) { + if (obj === void 0 || obj === null) { + return "" + } else { + return obj.toString() + } +} + +/** + * Clones an object. + * + * Will create a copy of an existing object such that any mutations + * on the copy cannot affect the original. + * + * Only shallow objects are supported, passing a nested object to this + * function will cause a TypeError. + * + * Objects with primitives, and arrays of primitives are supported. + * + * @param {Object} obj The object to clone. + * @return {Object} a clone of the passed object. + * @throws {TypeError} when a nested object is passed. + * @memberOf Utils + */ +lunr.utils.clone = function (obj) { + if (obj === null || obj === undefined) { + return obj + } + + var clone = Object.create(null), + keys = Object.keys(obj) + + for (var i = 0; i < keys.length; i++) { + var key = keys[i], + val = obj[key] + + if (Array.isArray(val)) { + clone[key] = val.slice() + continue + } + + if (typeof val === 'string' || + typeof val === 'number' || + typeof val === 'boolean') { + clone[key] = val + continue + } + + throw new TypeError("clone is not deep and does not support nested objects") + } + + return clone +} +lunr.FieldRef = function (docRef, fieldName, stringValue) { + this.docRef = docRef + this.fieldName = fieldName + this._stringValue = stringValue +} + +lunr.FieldRef.joiner = "/" + +lunr.FieldRef.fromString = function (s) { + var n = s.indexOf(lunr.FieldRef.joiner) + + if (n === -1) { + throw "malformed field ref string" + } + + var fieldRef = s.slice(0, n), + docRef = s.slice(n + 1) + + return new lunr.FieldRef (docRef, fieldRef, s) +} + +lunr.FieldRef.prototype.toString = function () { + if (this._stringValue == undefined) { + this._stringValue = this.fieldName + lunr.FieldRef.joiner + this.docRef + } + + return this._stringValue +} +/*! + * lunr.Set + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * A lunr set. + * + * @constructor + */ +lunr.Set = function (elements) { + this.elements = Object.create(null) + + if (elements) { + this.length = elements.length + + for (var i = 0; i < this.length; i++) { + this.elements[elements[i]] = true + } + } else { + this.length = 0 + } +} + +/** + * A complete set that contains all elements. + * + * @static + * @readonly + * @type {lunr.Set} + */ +lunr.Set.complete = { + intersect: function (other) { + return other + }, + + union: function () { + return this + }, + + contains: function () { + return true + } +} + +/** + * An empty set that contains no elements. + * + * @static + * @readonly + * @type {lunr.Set} + */ +lunr.Set.empty = { + intersect: function () { + return this + }, + + union: function (other) { + return other + }, + + contains: function () { + return false + } +} + +/** + * Returns true if this set contains the specified object. + * + * @param {object} object - Object whose presence in this set is to be tested. + * @returns {boolean} - True if this set contains the specified object. + */ +lunr.Set.prototype.contains = function (object) { + return !!this.elements[object] +} + +/** + * Returns a new set containing only the elements that are present in both + * this set and the specified set. + * + * @param {lunr.Set} other - set to intersect with this set. + * @returns {lunr.Set} a new set that is the intersection of this and the specified set. + */ + +lunr.Set.prototype.intersect = function (other) { + var a, b, elements, intersection = [] + + if (other === lunr.Set.complete) { + return this + } + + if (other === lunr.Set.empty) { + return other + } + + if (this.length < other.length) { + a = this + b = other + } else { + a = other + b = this + } + + elements = Object.keys(a.elements) + + for (var i = 0; i < elements.length; i++) { + var element = elements[i] + if (element in b.elements) { + intersection.push(element) + } + } + + return new lunr.Set (intersection) +} + +/** + * Returns a new set combining the elements of this and the specified set. + * + * @param {lunr.Set} other - set to union with this set. + * @return {lunr.Set} a new set that is the union of this and the specified set. + */ + +lunr.Set.prototype.union = function (other) { + if (other === lunr.Set.complete) { + return lunr.Set.complete + } + + if (other === lunr.Set.empty) { + return this + } + + return new lunr.Set(Object.keys(this.elements).concat(Object.keys(other.elements))) +} +/** + * A function to calculate the inverse document frequency for + * a posting. This is shared between the builder and the index + * + * @private + * @param {object} posting - The posting for a given term + * @param {number} documentCount - The total number of documents. + */ +lunr.idf = function (posting, documentCount) { + var documentsWithTerm = 0 + + for (var fieldName in posting) { + if (fieldName == '_index') continue // Ignore the term index, its not a field + documentsWithTerm += Object.keys(posting[fieldName]).length + } + + var x = (documentCount - documentsWithTerm + 0.5) / (documentsWithTerm + 0.5) + + return Math.log(1 + Math.abs(x)) +} + +/** + * A token wraps a string representation of a token + * as it is passed through the text processing pipeline. + * + * @constructor + * @param {string} [str=''] - The string token being wrapped. + * @param {object} [metadata={}] - Metadata associated with this token. + */ +lunr.Token = function (str, metadata) { + this.str = str || "" + this.metadata = metadata || {} +} + +/** + * Returns the token string that is being wrapped by this object. + * + * @returns {string} + */ +lunr.Token.prototype.toString = function () { + return this.str +} + +/** + * A token update function is used when updating or optionally + * when cloning a token. + * + * @callback lunr.Token~updateFunction + * @param {string} str - The string representation of the token. + * @param {Object} metadata - All metadata associated with this token. + */ + +/** + * Applies the given function to the wrapped string token. + * + * @example + * token.update(function (str, metadata) { + * return str.toUpperCase() + * }) + * + * @param {lunr.Token~updateFunction} fn - A function to apply to the token string. + * @returns {lunr.Token} + */ +lunr.Token.prototype.update = function (fn) { + this.str = fn(this.str, this.metadata) + return this +} + +/** + * Creates a clone of this token. Optionally a function can be + * applied to the cloned token. + * + * @param {lunr.Token~updateFunction} [fn] - An optional function to apply to the cloned token. + * @returns {lunr.Token} + */ +lunr.Token.prototype.clone = function (fn) { + fn = fn || function (s) { return s } + return new lunr.Token (fn(this.str, this.metadata), this.metadata) +} +/*! + * lunr.tokenizer + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * A function for splitting a string into tokens ready to be inserted into + * the search index. Uses `lunr.tokenizer.separator` to split strings, change + * the value of this property to change how strings are split into tokens. + * + * This tokenizer will convert its parameter to a string by calling `toString` and + * then will split this string on the character in `lunr.tokenizer.separator`. + * Arrays will have their elements converted to strings and wrapped in a lunr.Token. + * + * Optional metadata can be passed to the tokenizer, this metadata will be cloned and + * added as metadata to every token that is created from the object to be tokenized. + * + * @static + * @param {?(string|object|object[])} obj - The object to convert into tokens + * @param {?object} metadata - Optional metadata to associate with every token + * @returns {lunr.Token[]} + * @see {@link lunr.Pipeline} + */ +lunr.tokenizer = function (obj, metadata) { + if (obj == null || obj == undefined) { + return [] + } + + if (Array.isArray(obj)) { + return obj.map(function (t) { + return new lunr.Token( + lunr.utils.asString(t).toLowerCase(), + lunr.utils.clone(metadata) + ) + }) + } + + var str = obj.toString().toLowerCase(), + len = str.length, + tokens = [] + + for (var sliceEnd = 0, sliceStart = 0; sliceEnd <= len; sliceEnd++) { + var char = str.charAt(sliceEnd), + sliceLength = sliceEnd - sliceStart + + if ((char.match(lunr.tokenizer.separator) || sliceEnd == len)) { + + if (sliceLength > 0) { + var tokenMetadata = lunr.utils.clone(metadata) || {} + tokenMetadata["position"] = [sliceStart, sliceLength] + tokenMetadata["index"] = tokens.length + + tokens.push( + new lunr.Token ( + str.slice(sliceStart, sliceEnd), + tokenMetadata + ) + ) + } + + sliceStart = sliceEnd + 1 + } + + } + + return tokens +} + +/** + * The separator used to split a string into tokens. Override this property to change the behaviour of + * `lunr.tokenizer` behaviour when tokenizing strings. By default this splits on whitespace and hyphens. + * + * @static + * @see lunr.tokenizer + */ +lunr.tokenizer.separator = /[\s\-]+/ +/*! + * lunr.Pipeline + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * lunr.Pipelines maintain an ordered list of functions to be applied to all + * tokens in documents entering the search index and queries being ran against + * the index. + * + * An instance of lunr.Index created with the lunr shortcut will contain a + * pipeline with a stop word filter and an English language stemmer. Extra + * functions can be added before or after either of these functions or these + * default functions can be removed. + * + * When run the pipeline will call each function in turn, passing a token, the + * index of that token in the original list of all tokens and finally a list of + * all the original tokens. + * + * The output of functions in the pipeline will be passed to the next function + * in the pipeline. To exclude a token from entering the index the function + * should return undefined, the rest of the pipeline will not be called with + * this token. + * + * For serialisation of pipelines to work, all functions used in an instance of + * a pipeline should be registered with lunr.Pipeline. Registered functions can + * then be loaded. If trying to load a serialised pipeline that uses functions + * that are not registered an error will be thrown. + * + * If not planning on serialising the pipeline then registering pipeline functions + * is not necessary. + * + * @constructor + */ +lunr.Pipeline = function () { + this._stack = [] +} + +lunr.Pipeline.registeredFunctions = Object.create(null) + +/** + * A pipeline function maps lunr.Token to lunr.Token. A lunr.Token contains the token + * string as well as all known metadata. A pipeline function can mutate the token string + * or mutate (or add) metadata for a given token. + * + * A pipeline function can indicate that the passed token should be discarded by returning + * null, undefined or an empty string. This token will not be passed to any downstream pipeline + * functions and will not be added to the index. + * + * Multiple tokens can be returned by returning an array of tokens. Each token will be passed + * to any downstream pipeline functions and all will returned tokens will be added to the index. + * + * Any number of pipeline functions may be chained together using a lunr.Pipeline. + * + * @interface lunr.PipelineFunction + * @param {lunr.Token} token - A token from the document being processed. + * @param {number} i - The index of this token in the complete list of tokens for this document/field. + * @param {lunr.Token[]} tokens - All tokens for this document/field. + * @returns {(?lunr.Token|lunr.Token[])} + */ + +/** + * Register a function with the pipeline. + * + * Functions that are used in the pipeline should be registered if the pipeline + * needs to be serialised, or a serialised pipeline needs to be loaded. + * + * Registering a function does not add it to a pipeline, functions must still be + * added to instances of the pipeline for them to be used when running a pipeline. + * + * @param {lunr.PipelineFunction} fn - The function to check for. + * @param {String} label - The label to register this function with + */ +lunr.Pipeline.registerFunction = function (fn, label) { + if (label in this.registeredFunctions) { + lunr.utils.warn('Overwriting existing registered function: ' + label) + } + + fn.label = label + lunr.Pipeline.registeredFunctions[fn.label] = fn +} + +/** + * Warns if the function is not registered as a Pipeline function. + * + * @param {lunr.PipelineFunction} fn - The function to check for. + * @private + */ +lunr.Pipeline.warnIfFunctionNotRegistered = function (fn) { + var isRegistered = fn.label && (fn.label in this.registeredFunctions) + + if (!isRegistered) { + lunr.utils.warn('Function is not registered with pipeline. This may cause problems when serialising the index.\n', fn) + } +} + +/** + * Loads a previously serialised pipeline. + * + * All functions to be loaded must already be registered with lunr.Pipeline. + * If any function from the serialised data has not been registered then an + * error will be thrown. + * + * @param {Object} serialised - The serialised pipeline to load. + * @returns {lunr.Pipeline} + */ +lunr.Pipeline.load = function (serialised) { + var pipeline = new lunr.Pipeline + + serialised.forEach(function (fnName) { + var fn = lunr.Pipeline.registeredFunctions[fnName] + + if (fn) { + pipeline.add(fn) + } else { + throw new Error('Cannot load unregistered function: ' + fnName) + } + }) + + return pipeline +} + +/** + * Adds new functions to the end of the pipeline. + * + * Logs a warning if the function has not been registered. + * + * @param {lunr.PipelineFunction[]} functions - Any number of functions to add to the pipeline. + */ +lunr.Pipeline.prototype.add = function () { + var fns = Array.prototype.slice.call(arguments) + + fns.forEach(function (fn) { + lunr.Pipeline.warnIfFunctionNotRegistered(fn) + this._stack.push(fn) + }, this) +} + +/** + * Adds a single function after a function that already exists in the + * pipeline. + * + * Logs a warning if the function has not been registered. + * + * @param {lunr.PipelineFunction} existingFn - A function that already exists in the pipeline. + * @param {lunr.PipelineFunction} newFn - The new function to add to the pipeline. + */ +lunr.Pipeline.prototype.after = function (existingFn, newFn) { + lunr.Pipeline.warnIfFunctionNotRegistered(newFn) + + var pos = this._stack.indexOf(existingFn) + if (pos == -1) { + throw new Error('Cannot find existingFn') + } + + pos = pos + 1 + this._stack.splice(pos, 0, newFn) +} + +/** + * Adds a single function before a function that already exists in the + * pipeline. + * + * Logs a warning if the function has not been registered. + * + * @param {lunr.PipelineFunction} existingFn - A function that already exists in the pipeline. + * @param {lunr.PipelineFunction} newFn - The new function to add to the pipeline. + */ +lunr.Pipeline.prototype.before = function (existingFn, newFn) { + lunr.Pipeline.warnIfFunctionNotRegistered(newFn) + + var pos = this._stack.indexOf(existingFn) + if (pos == -1) { + throw new Error('Cannot find existingFn') + } + + this._stack.splice(pos, 0, newFn) +} + +/** + * Removes a function from the pipeline. + * + * @param {lunr.PipelineFunction} fn The function to remove from the pipeline. + */ +lunr.Pipeline.prototype.remove = function (fn) { + var pos = this._stack.indexOf(fn) + if (pos == -1) { + return + } + + this._stack.splice(pos, 1) +} + +/** + * Runs the current list of functions that make up the pipeline against the + * passed tokens. + * + * @param {Array} tokens The tokens to run through the pipeline. + * @returns {Array} + */ +lunr.Pipeline.prototype.run = function (tokens) { + var stackLength = this._stack.length + + for (var i = 0; i < stackLength; i++) { + var fn = this._stack[i] + var memo = [] + + for (var j = 0; j < tokens.length; j++) { + var result = fn(tokens[j], j, tokens) + + if (result === null || result === void 0 || result === '') continue + + if (Array.isArray(result)) { + for (var k = 0; k < result.length; k++) { + memo.push(result[k]) + } + } else { + memo.push(result) + } + } + + tokens = memo + } + + return tokens +} + +/** + * Convenience method for passing a string through a pipeline and getting + * strings out. This method takes care of wrapping the passed string in a + * token and mapping the resulting tokens back to strings. + * + * @param {string} str - The string to pass through the pipeline. + * @param {?object} metadata - Optional metadata to associate with the token + * passed to the pipeline. + * @returns {string[]} + */ +lunr.Pipeline.prototype.runString = function (str, metadata) { + var token = new lunr.Token (str, metadata) + + return this.run([token]).map(function (t) { + return t.toString() + }) +} + +/** + * Resets the pipeline by removing any existing processors. + * + */ +lunr.Pipeline.prototype.reset = function () { + this._stack = [] +} + +/** + * Returns a representation of the pipeline ready for serialisation. + * + * Logs a warning if the function has not been registered. + * + * @returns {Array} + */ +lunr.Pipeline.prototype.toJSON = function () { + return this._stack.map(function (fn) { + lunr.Pipeline.warnIfFunctionNotRegistered(fn) + + return fn.label + }) +} +/*! + * lunr.Vector + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * A vector is used to construct the vector space of documents and queries. These + * vectors support operations to determine the similarity between two documents or + * a document and a query. + * + * Normally no parameters are required for initializing a vector, but in the case of + * loading a previously dumped vector the raw elements can be provided to the constructor. + * + * For performance reasons vectors are implemented with a flat array, where an elements + * index is immediately followed by its value. E.g. [index, value, index, value]. This + * allows the underlying array to be as sparse as possible and still offer decent + * performance when being used for vector calculations. + * + * @constructor + * @param {Number[]} [elements] - The flat list of element index and element value pairs. + */ +lunr.Vector = function (elements) { + this._magnitude = 0 + this.elements = elements || [] +} + + +/** + * Calculates the position within the vector to insert a given index. + * + * This is used internally by insert and upsert. If there are duplicate indexes then + * the position is returned as if the value for that index were to be updated, but it + * is the callers responsibility to check whether there is a duplicate at that index + * + * @param {Number} insertIdx - The index at which the element should be inserted. + * @returns {Number} + */ +lunr.Vector.prototype.positionForIndex = function (index) { + // For an empty vector the tuple can be inserted at the beginning + if (this.elements.length == 0) { + return 0 + } + + var start = 0, + end = this.elements.length / 2, + sliceLength = end - start, + pivotPoint = Math.floor(sliceLength / 2), + pivotIndex = this.elements[pivotPoint * 2] + + while (sliceLength > 1) { + if (pivotIndex < index) { + start = pivotPoint + } + + if (pivotIndex > index) { + end = pivotPoint + } + + if (pivotIndex == index) { + break + } + + sliceLength = end - start + pivotPoint = start + Math.floor(sliceLength / 2) + pivotIndex = this.elements[pivotPoint * 2] + } + + if (pivotIndex == index) { + return pivotPoint * 2 + } + + if (pivotIndex > index) { + return pivotPoint * 2 + } + + if (pivotIndex < index) { + return (pivotPoint + 1) * 2 + } +} + +/** + * Inserts an element at an index within the vector. + * + * Does not allow duplicates, will throw an error if there is already an entry + * for this index. + * + * @param {Number} insertIdx - The index at which the element should be inserted. + * @param {Number} val - The value to be inserted into the vector. + */ +lunr.Vector.prototype.insert = function (insertIdx, val) { + this.upsert(insertIdx, val, function () { + throw "duplicate index" + }) +} + +/** + * Inserts or updates an existing index within the vector. + * + * @param {Number} insertIdx - The index at which the element should be inserted. + * @param {Number} val - The value to be inserted into the vector. + * @param {function} fn - A function that is called for updates, the existing value and the + * requested value are passed as arguments + */ +lunr.Vector.prototype.upsert = function (insertIdx, val, fn) { + this._magnitude = 0 + var position = this.positionForIndex(insertIdx) + + if (this.elements[position] == insertIdx) { + this.elements[position + 1] = fn(this.elements[position + 1], val) + } else { + this.elements.splice(position, 0, insertIdx, val) + } +} + +/** + * Calculates the magnitude of this vector. + * + * @returns {Number} + */ +lunr.Vector.prototype.magnitude = function () { + if (this._magnitude) return this._magnitude + + var sumOfSquares = 0, + elementsLength = this.elements.length + + for (var i = 1; i < elementsLength; i += 2) { + var val = this.elements[i] + sumOfSquares += val * val + } + + return this._magnitude = Math.sqrt(sumOfSquares) +} + +/** + * Calculates the dot product of this vector and another vector. + * + * @param {lunr.Vector} otherVector - The vector to compute the dot product with. + * @returns {Number} + */ +lunr.Vector.prototype.dot = function (otherVector) { + var dotProduct = 0, + a = this.elements, b = otherVector.elements, + aLen = a.length, bLen = b.length, + aVal = 0, bVal = 0, + i = 0, j = 0 + + while (i < aLen && j < bLen) { + aVal = a[i], bVal = b[j] + if (aVal < bVal) { + i += 2 + } else if (aVal > bVal) { + j += 2 + } else if (aVal == bVal) { + dotProduct += a[i + 1] * b[j + 1] + i += 2 + j += 2 + } + } + + return dotProduct +} + +/** + * Calculates the similarity between this vector and another vector. + * + * @param {lunr.Vector} otherVector - The other vector to calculate the + * similarity with. + * @returns {Number} + */ +lunr.Vector.prototype.similarity = function (otherVector) { + return this.dot(otherVector) / this.magnitude() || 0 +} + +/** + * Converts the vector to an array of the elements within the vector. + * + * @returns {Number[]} + */ +lunr.Vector.prototype.toArray = function () { + var output = new Array (this.elements.length / 2) + + for (var i = 1, j = 0; i < this.elements.length; i += 2, j++) { + output[j] = this.elements[i] + } + + return output +} + +/** + * A JSON serializable representation of the vector. + * + * @returns {Number[]} + */ +lunr.Vector.prototype.toJSON = function () { + return this.elements +} +/* eslint-disable */ +/*! + * lunr.stemmer + * Copyright (C) 2020 Oliver Nightingale + * Includes code from - http://tartarus.org/~martin/PorterStemmer/js.txt + */ + +/** + * lunr.stemmer is an english language stemmer, this is a JavaScript + * implementation of the PorterStemmer taken from http://tartarus.org/~martin + * + * @static + * @implements {lunr.PipelineFunction} + * @param {lunr.Token} token - The string to stem + * @returns {lunr.Token} + * @see {@link lunr.Pipeline} + * @function + */ +lunr.stemmer = (function(){ + var step2list = { + "ational" : "ate", + "tional" : "tion", + "enci" : "ence", + "anci" : "ance", + "izer" : "ize", + "bli" : "ble", + "alli" : "al", + "entli" : "ent", + "eli" : "e", + "ousli" : "ous", + "ization" : "ize", + "ation" : "ate", + "ator" : "ate", + "alism" : "al", + "iveness" : "ive", + "fulness" : "ful", + "ousness" : "ous", + "aliti" : "al", + "iviti" : "ive", + "biliti" : "ble", + "logi" : "log" + }, + + step3list = { + "icate" : "ic", + "ative" : "", + "alize" : "al", + "iciti" : "ic", + "ical" : "ic", + "ful" : "", + "ness" : "" + }, + + c = "[^aeiou]", // consonant + v = "[aeiouy]", // vowel + C = c + "[^aeiouy]*", // consonant sequence + V = v + "[aeiou]*", // vowel sequence + + mgr0 = "^(" + C + ")?" + V + C, // [C]VC... is m>0 + meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$", // [C]VC[V] is m=1 + mgr1 = "^(" + C + ")?" + V + C + V + C, // [C]VCVC... is m>1 + s_v = "^(" + C + ")?" + v; // vowel in stem + + var re_mgr0 = new RegExp(mgr0); + var re_mgr1 = new RegExp(mgr1); + var re_meq1 = new RegExp(meq1); + var re_s_v = new RegExp(s_v); + + var re_1a = /^(.+?)(ss|i)es$/; + var re2_1a = /^(.+?)([^s])s$/; + var re_1b = /^(.+?)eed$/; + var re2_1b = /^(.+?)(ed|ing)$/; + var re_1b_2 = /.$/; + var re2_1b_2 = /(at|bl|iz)$/; + var re3_1b_2 = new RegExp("([^aeiouylsz])\\1$"); + var re4_1b_2 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + + var re_1c = /^(.+?[^aeiou])y$/; + var re_2 = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + + var re_3 = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + + var re_4 = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + var re2_4 = /^(.+?)(s|t)(ion)$/; + + var re_5 = /^(.+?)e$/; + var re_5_1 = /ll$/; + var re3_5 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + + var porterStemmer = function porterStemmer(w) { + var stem, + suffix, + firstch, + re, + re2, + re3, + re4; + + if (w.length < 3) { return w; } + + firstch = w.substr(0,1); + if (firstch == "y") { + w = firstch.toUpperCase() + w.substr(1); + } + + // Step 1a + re = re_1a + re2 = re2_1a; + + if (re.test(w)) { w = w.replace(re,"$1$2"); } + else if (re2.test(w)) { w = w.replace(re2,"$1$2"); } + + // Step 1b + re = re_1b; + re2 = re2_1b; + if (re.test(w)) { + var fp = re.exec(w); + re = re_mgr0; + if (re.test(fp[1])) { + re = re_1b_2; + w = w.replace(re,""); + } + } else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = re_s_v; + if (re2.test(stem)) { + w = stem; + re2 = re2_1b_2; + re3 = re3_1b_2; + re4 = re4_1b_2; + if (re2.test(w)) { w = w + "e"; } + else if (re3.test(w)) { re = re_1b_2; w = w.replace(re,""); } + else if (re4.test(w)) { w = w + "e"; } + } + } + + // Step 1c - replace suffix y or Y by i if preceded by a non-vowel which is not the first letter of the word (so cry -> cri, by -> by, say -> say) + re = re_1c; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + w = stem + "i"; + } + + // Step 2 + re = re_2; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = re_mgr0; + if (re.test(stem)) { + w = stem + step2list[suffix]; + } + } + + // Step 3 + re = re_3; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = re_mgr0; + if (re.test(stem)) { + w = stem + step3list[suffix]; + } + } + + // Step 4 + re = re_4; + re2 = re2_4; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = re_mgr1; + if (re.test(stem)) { + w = stem; + } + } else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = re_mgr1; + if (re2.test(stem)) { + w = stem; + } + } + + // Step 5 + re = re_5; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = re_mgr1; + re2 = re_meq1; + re3 = re3_5; + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) { + w = stem; + } + } + + re = re_5_1; + re2 = re_mgr1; + if (re.test(w) && re2.test(w)) { + re = re_1b_2; + w = w.replace(re,""); + } + + // and turn initial Y back to y + + if (firstch == "y") { + w = firstch.toLowerCase() + w.substr(1); + } + + return w; + }; + + return function (token) { + return token.update(porterStemmer); + } +})(); + +lunr.Pipeline.registerFunction(lunr.stemmer, 'stemmer') +/*! + * lunr.stopWordFilter + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * lunr.generateStopWordFilter builds a stopWordFilter function from the provided + * list of stop words. + * + * The built in lunr.stopWordFilter is built using this generator and can be used + * to generate custom stopWordFilters for applications or non English languages. + * + * @function + * @param {Array} token The token to pass through the filter + * @returns {lunr.PipelineFunction} + * @see lunr.Pipeline + * @see lunr.stopWordFilter + */ +lunr.generateStopWordFilter = function (stopWords) { + var words = stopWords.reduce(function (memo, stopWord) { + memo[stopWord] = stopWord + return memo + }, {}) + + return function (token) { + if (token && words[token.toString()] !== token.toString()) return token + } +} + +/** + * lunr.stopWordFilter is an English language stop word list filter, any words + * contained in the list will not be passed through the filter. + * + * This is intended to be used in the Pipeline. If the token does not pass the + * filter then undefined will be returned. + * + * @function + * @implements {lunr.PipelineFunction} + * @params {lunr.Token} token - A token to check for being a stop word. + * @returns {lunr.Token} + * @see {@link lunr.Pipeline} + */ +lunr.stopWordFilter = lunr.generateStopWordFilter([ + 'a', + 'able', + 'about', + 'across', + 'after', + 'all', + 'almost', + 'also', + 'am', + 'among', + 'an', + 'and', + 'any', + 'are', + 'as', + 'at', + 'be', + 'because', + 'been', + 'but', + 'by', + 'can', + 'cannot', + 'could', + 'dear', + 'did', + 'do', + 'does', + 'either', + 'else', + 'ever', + 'every', + 'for', + 'from', + 'get', + 'got', + 'had', + 'has', + 'have', + 'he', + 'her', + 'hers', + 'him', + 'his', + 'how', + 'however', + 'i', + 'if', + 'in', + 'into', + 'is', + 'it', + 'its', + 'just', + 'least', + 'let', + 'like', + 'likely', + 'may', + 'me', + 'might', + 'most', + 'must', + 'my', + 'neither', + 'no', + 'nor', + 'not', + 'of', + 'off', + 'often', + 'on', + 'only', + 'or', + 'other', + 'our', + 'own', + 'rather', + 'said', + 'say', + 'says', + 'she', + 'should', + 'since', + 'so', + 'some', + 'than', + 'that', + 'the', + 'their', + 'them', + 'then', + 'there', + 'these', + 'they', + 'this', + 'tis', + 'to', + 'too', + 'twas', + 'us', + 'wants', + 'was', + 'we', + 'were', + 'what', + 'when', + 'where', + 'which', + 'while', + 'who', + 'whom', + 'why', + 'will', + 'with', + 'would', + 'yet', + 'you', + 'your' +]) + +lunr.Pipeline.registerFunction(lunr.stopWordFilter, 'stopWordFilter') +/*! + * lunr.trimmer + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * lunr.trimmer is a pipeline function for trimming non word + * characters from the beginning and end of tokens before they + * enter the index. + * + * This implementation may not work correctly for non latin + * characters and should either be removed or adapted for use + * with languages with non-latin characters. + * + * @static + * @implements {lunr.PipelineFunction} + * @param {lunr.Token} token The token to pass through the filter + * @returns {lunr.Token} + * @see lunr.Pipeline + */ +lunr.trimmer = function (token) { + return token.update(function (s) { + return s.replace(/^\W+/, '').replace(/\W+$/, '') + }) +} + +lunr.Pipeline.registerFunction(lunr.trimmer, 'trimmer') +/*! + * lunr.TokenSet + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * A token set is used to store the unique list of all tokens + * within an index. Token sets are also used to represent an + * incoming query to the index, this query token set and index + * token set are then intersected to find which tokens to look + * up in the inverted index. + * + * A token set can hold multiple tokens, as in the case of the + * index token set, or it can hold a single token as in the + * case of a simple query token set. + * + * Additionally token sets are used to perform wildcard matching. + * Leading, contained and trailing wildcards are supported, and + * from this edit distance matching can also be provided. + * + * Token sets are implemented as a minimal finite state automata, + * where both common prefixes and suffixes are shared between tokens. + * This helps to reduce the space used for storing the token set. + * + * @constructor + */ +lunr.TokenSet = function () { + this.final = false + this.edges = {} + this.id = lunr.TokenSet._nextId + lunr.TokenSet._nextId += 1 +} + +/** + * Keeps track of the next, auto increment, identifier to assign + * to a new tokenSet. + * + * TokenSets require a unique identifier to be correctly minimised. + * + * @private + */ +lunr.TokenSet._nextId = 1 + +/** + * Creates a TokenSet instance from the given sorted array of words. + * + * @param {String[]} arr - A sorted array of strings to create the set from. + * @returns {lunr.TokenSet} + * @throws Will throw an error if the input array is not sorted. + */ +lunr.TokenSet.fromArray = function (arr) { + var builder = new lunr.TokenSet.Builder + + for (var i = 0, len = arr.length; i < len; i++) { + builder.insert(arr[i]) + } + + builder.finish() + return builder.root +} + +/** + * Creates a token set from a query clause. + * + * @private + * @param {Object} clause - A single clause from lunr.Query. + * @param {string} clause.term - The query clause term. + * @param {number} [clause.editDistance] - The optional edit distance for the term. + * @returns {lunr.TokenSet} + */ +lunr.TokenSet.fromClause = function (clause) { + if ('editDistance' in clause) { + return lunr.TokenSet.fromFuzzyString(clause.term, clause.editDistance) + } else { + return lunr.TokenSet.fromString(clause.term) + } +} + +/** + * Creates a token set representing a single string with a specified + * edit distance. + * + * Insertions, deletions, substitutions and transpositions are each + * treated as an edit distance of 1. + * + * Increasing the allowed edit distance will have a dramatic impact + * on the performance of both creating and intersecting these TokenSets. + * It is advised to keep the edit distance less than 3. + * + * @param {string} str - The string to create the token set from. + * @param {number} editDistance - The allowed edit distance to match. + * @returns {lunr.Vector} + */ +lunr.TokenSet.fromFuzzyString = function (str, editDistance) { + var root = new lunr.TokenSet + + var stack = [{ + node: root, + editsRemaining: editDistance, + str: str + }] + + while (stack.length) { + var frame = stack.pop() + + // no edit + if (frame.str.length > 0) { + var char = frame.str.charAt(0), + noEditNode + + if (char in frame.node.edges) { + noEditNode = frame.node.edges[char] + } else { + noEditNode = new lunr.TokenSet + frame.node.edges[char] = noEditNode + } + + if (frame.str.length == 1) { + noEditNode.final = true + } + + stack.push({ + node: noEditNode, + editsRemaining: frame.editsRemaining, + str: frame.str.slice(1) + }) + } + + if (frame.editsRemaining == 0) { + continue + } + + // insertion + if ("*" in frame.node.edges) { + var insertionNode = frame.node.edges["*"] + } else { + var insertionNode = new lunr.TokenSet + frame.node.edges["*"] = insertionNode + } + + if (frame.str.length == 0) { + insertionNode.final = true + } + + stack.push({ + node: insertionNode, + editsRemaining: frame.editsRemaining - 1, + str: frame.str + }) + + // deletion + // can only do a deletion if we have enough edits remaining + // and if there are characters left to delete in the string + if (frame.str.length > 1) { + stack.push({ + node: frame.node, + editsRemaining: frame.editsRemaining - 1, + str: frame.str.slice(1) + }) + } + + // deletion + // just removing the last character from the str + if (frame.str.length == 1) { + frame.node.final = true + } + + // substitution + // can only do a substitution if we have enough edits remaining + // and if there are characters left to substitute + if (frame.str.length >= 1) { + if ("*" in frame.node.edges) { + var substitutionNode = frame.node.edges["*"] + } else { + var substitutionNode = new lunr.TokenSet + frame.node.edges["*"] = substitutionNode + } + + if (frame.str.length == 1) { + substitutionNode.final = true + } + + stack.push({ + node: substitutionNode, + editsRemaining: frame.editsRemaining - 1, + str: frame.str.slice(1) + }) + } + + // transposition + // can only do a transposition if there are edits remaining + // and there are enough characters to transpose + if (frame.str.length > 1) { + var charA = frame.str.charAt(0), + charB = frame.str.charAt(1), + transposeNode + + if (charB in frame.node.edges) { + transposeNode = frame.node.edges[charB] + } else { + transposeNode = new lunr.TokenSet + frame.node.edges[charB] = transposeNode + } + + if (frame.str.length == 1) { + transposeNode.final = true + } + + stack.push({ + node: transposeNode, + editsRemaining: frame.editsRemaining - 1, + str: charA + frame.str.slice(2) + }) + } + } + + return root +} + +/** + * Creates a TokenSet from a string. + * + * The string may contain one or more wildcard characters (*) + * that will allow wildcard matching when intersecting with + * another TokenSet. + * + * @param {string} str - The string to create a TokenSet from. + * @returns {lunr.TokenSet} + */ +lunr.TokenSet.fromString = function (str) { + var node = new lunr.TokenSet, + root = node + + /* + * Iterates through all characters within the passed string + * appending a node for each character. + * + * When a wildcard character is found then a self + * referencing edge is introduced to continually match + * any number of any characters. + */ + for (var i = 0, len = str.length; i < len; i++) { + var char = str[i], + final = (i == len - 1) + + if (char == "*") { + node.edges[char] = node + node.final = final + + } else { + var next = new lunr.TokenSet + next.final = final + + node.edges[char] = next + node = next + } + } + + return root +} + +/** + * Converts this TokenSet into an array of strings + * contained within the TokenSet. + * + * This is not intended to be used on a TokenSet that + * contains wildcards, in these cases the results are + * undefined and are likely to cause an infinite loop. + * + * @returns {string[]} + */ +lunr.TokenSet.prototype.toArray = function () { + var words = [] + + var stack = [{ + prefix: "", + node: this + }] + + while (stack.length) { + var frame = stack.pop(), + edges = Object.keys(frame.node.edges), + len = edges.length + + if (frame.node.final) { + /* In Safari, at this point the prefix is sometimes corrupted, see: + * https://github.com/olivernn/lunr.js/issues/279 Calling any + * String.prototype method forces Safari to "cast" this string to what + * it's supposed to be, fixing the bug. */ + frame.prefix.charAt(0) + words.push(frame.prefix) + } + + for (var i = 0; i < len; i++) { + var edge = edges[i] + + stack.push({ + prefix: frame.prefix.concat(edge), + node: frame.node.edges[edge] + }) + } + } + + return words +} + +/** + * Generates a string representation of a TokenSet. + * + * This is intended to allow TokenSets to be used as keys + * in objects, largely to aid the construction and minimisation + * of a TokenSet. As such it is not designed to be a human + * friendly representation of the TokenSet. + * + * @returns {string} + */ +lunr.TokenSet.prototype.toString = function () { + // NOTE: Using Object.keys here as this.edges is very likely + // to enter 'hash-mode' with many keys being added + // + // avoiding a for-in loop here as it leads to the function + // being de-optimised (at least in V8). From some simple + // benchmarks the performance is comparable, but allowing + // V8 to optimize may mean easy performance wins in the future. + + if (this._str) { + return this._str + } + + var str = this.final ? '1' : '0', + labels = Object.keys(this.edges).sort(), + len = labels.length + + for (var i = 0; i < len; i++) { + var label = labels[i], + node = this.edges[label] + + str = str + label + node.id + } + + return str +} + +/** + * Returns a new TokenSet that is the intersection of + * this TokenSet and the passed TokenSet. + * + * This intersection will take into account any wildcards + * contained within the TokenSet. + * + * @param {lunr.TokenSet} b - An other TokenSet to intersect with. + * @returns {lunr.TokenSet} + */ +lunr.TokenSet.prototype.intersect = function (b) { + var output = new lunr.TokenSet, + frame = undefined + + var stack = [{ + qNode: b, + output: output, + node: this + }] + + while (stack.length) { + frame = stack.pop() + + // NOTE: As with the #toString method, we are using + // Object.keys and a for loop instead of a for-in loop + // as both of these objects enter 'hash' mode, causing + // the function to be de-optimised in V8 + var qEdges = Object.keys(frame.qNode.edges), + qLen = qEdges.length, + nEdges = Object.keys(frame.node.edges), + nLen = nEdges.length + + for (var q = 0; q < qLen; q++) { + var qEdge = qEdges[q] + + for (var n = 0; n < nLen; n++) { + var nEdge = nEdges[n] + + if (nEdge == qEdge || qEdge == '*') { + var node = frame.node.edges[nEdge], + qNode = frame.qNode.edges[qEdge], + final = node.final && qNode.final, + next = undefined + + if (nEdge in frame.output.edges) { + // an edge already exists for this character + // no need to create a new node, just set the finality + // bit unless this node is already final + next = frame.output.edges[nEdge] + next.final = next.final || final + + } else { + // no edge exists yet, must create one + // set the finality bit and insert it + // into the output + next = new lunr.TokenSet + next.final = final + frame.output.edges[nEdge] = next + } + + stack.push({ + qNode: qNode, + output: next, + node: node + }) + } + } + } + } + + return output +} +lunr.TokenSet.Builder = function () { + this.previousWord = "" + this.root = new lunr.TokenSet + this.uncheckedNodes = [] + this.minimizedNodes = {} +} + +lunr.TokenSet.Builder.prototype.insert = function (word) { + var node, + commonPrefix = 0 + + if (word < this.previousWord) { + throw new Error ("Out of order word insertion") + } + + for (var i = 0; i < word.length && i < this.previousWord.length; i++) { + if (word[i] != this.previousWord[i]) break + commonPrefix++ + } + + this.minimize(commonPrefix) + + if (this.uncheckedNodes.length == 0) { + node = this.root + } else { + node = this.uncheckedNodes[this.uncheckedNodes.length - 1].child + } + + for (var i = commonPrefix; i < word.length; i++) { + var nextNode = new lunr.TokenSet, + char = word[i] + + node.edges[char] = nextNode + + this.uncheckedNodes.push({ + parent: node, + char: char, + child: nextNode + }) + + node = nextNode + } + + node.final = true + this.previousWord = word +} + +lunr.TokenSet.Builder.prototype.finish = function () { + this.minimize(0) +} + +lunr.TokenSet.Builder.prototype.minimize = function (downTo) { + for (var i = this.uncheckedNodes.length - 1; i >= downTo; i--) { + var node = this.uncheckedNodes[i], + childKey = node.child.toString() + + if (childKey in this.minimizedNodes) { + node.parent.edges[node.char] = this.minimizedNodes[childKey] + } else { + // Cache the key for this node since + // we know it can't change anymore + node.child._str = childKey + + this.minimizedNodes[childKey] = node.child + } + + this.uncheckedNodes.pop() + } +} +/*! + * lunr.Index + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * An index contains the built index of all documents and provides a query interface + * to the index. + * + * Usually instances of lunr.Index will not be created using this constructor, instead + * lunr.Builder should be used to construct new indexes, or lunr.Index.load should be + * used to load previously built and serialized indexes. + * + * @constructor + * @param {Object} attrs - The attributes of the built search index. + * @param {Object} attrs.invertedIndex - An index of term/field to document reference. + * @param {Object} attrs.fieldVectors - Field vectors + * @param {lunr.TokenSet} attrs.tokenSet - An set of all corpus tokens. + * @param {string[]} attrs.fields - The names of indexed document fields. + * @param {lunr.Pipeline} attrs.pipeline - The pipeline to use for search terms. + */ +lunr.Index = function (attrs) { + this.invertedIndex = attrs.invertedIndex + this.fieldVectors = attrs.fieldVectors + this.tokenSet = attrs.tokenSet + this.fields = attrs.fields + this.pipeline = attrs.pipeline +} + +/** + * A result contains details of a document matching a search query. + * @typedef {Object} lunr.Index~Result + * @property {string} ref - The reference of the document this result represents. + * @property {number} score - A number between 0 and 1 representing how similar this document is to the query. + * @property {lunr.MatchData} matchData - Contains metadata about this match including which term(s) caused the match. + */ + +/** + * Although lunr provides the ability to create queries using lunr.Query, it also provides a simple + * query language which itself is parsed into an instance of lunr.Query. + * + * For programmatically building queries it is advised to directly use lunr.Query, the query language + * is best used for human entered text rather than program generated text. + * + * At its simplest queries can just be a single term, e.g. `hello`, multiple terms are also supported + * and will be combined with OR, e.g `hello world` will match documents that contain either 'hello' + * or 'world', though those that contain both will rank higher in the results. + * + * Wildcards can be included in terms to match one or more unspecified characters, these wildcards can + * be inserted anywhere within the term, and more than one wildcard can exist in a single term. Adding + * wildcards will increase the number of documents that will be found but can also have a negative + * impact on query performance, especially with wildcards at the beginning of a term. + * + * Terms can be restricted to specific fields, e.g. `title:hello`, only documents with the term + * hello in the title field will match this query. Using a field not present in the index will lead + * to an error being thrown. + * + * Modifiers can also be added to terms, lunr supports edit distance and boost modifiers on terms. A term + * boost will make documents matching that term score higher, e.g. `foo^5`. Edit distance is also supported + * to provide fuzzy matching, e.g. 'hello~2' will match documents with hello with an edit distance of 2. + * Avoid large values for edit distance to improve query performance. + * + * Each term also supports a presence modifier. By default a term's presence in document is optional, however + * this can be changed to either required or prohibited. For a term's presence to be required in a document the + * term should be prefixed with a '+', e.g. `+foo bar` is a search for documents that must contain 'foo' and + * optionally contain 'bar'. Conversely a leading '-' sets the terms presence to prohibited, i.e. it must not + * appear in a document, e.g. `-foo bar` is a search for documents that do not contain 'foo' but may contain 'bar'. + * + * To escape special characters the backslash character '\' can be used, this allows searches to include + * characters that would normally be considered modifiers, e.g. `foo\~2` will search for a term "foo~2" instead + * of attempting to apply a boost of 2 to the search term "foo". + * + * @typedef {string} lunr.Index~QueryString + * @example Simple single term query + * hello + * @example Multiple term query + * hello world + * @example term scoped to a field + * title:hello + * @example term with a boost of 10 + * hello^10 + * @example term with an edit distance of 2 + * hello~2 + * @example terms with presence modifiers + * -foo +bar baz + */ + +/** + * Performs a search against the index using lunr query syntax. + * + * Results will be returned sorted by their score, the most relevant results + * will be returned first. For details on how the score is calculated, please see + * the {@link https://lunrjs.com/guides/searching.html#scoring|guide}. + * + * For more programmatic querying use lunr.Index#query. + * + * @param {lunr.Index~QueryString} queryString - A string containing a lunr query. + * @throws {lunr.QueryParseError} If the passed query string cannot be parsed. + * @returns {lunr.Index~Result[]} + */ +lunr.Index.prototype.search = function (queryString) { + return this.query(function (query) { + var parser = new lunr.QueryParser(queryString, query) + parser.parse() + }) +} + +/** + * A query builder callback provides a query object to be used to express + * the query to perform on the index. + * + * @callback lunr.Index~queryBuilder + * @param {lunr.Query} query - The query object to build up. + * @this lunr.Query + */ + +/** + * Performs a query against the index using the yielded lunr.Query object. + * + * If performing programmatic queries against the index, this method is preferred + * over lunr.Index#search so as to avoid the additional query parsing overhead. + * + * A query object is yielded to the supplied function which should be used to + * express the query to be run against the index. + * + * Note that although this function takes a callback parameter it is _not_ an + * asynchronous operation, the callback is just yielded a query object to be + * customized. + * + * @param {lunr.Index~queryBuilder} fn - A function that is used to build the query. + * @returns {lunr.Index~Result[]} + */ +lunr.Index.prototype.query = function (fn) { + // for each query clause + // * process terms + // * expand terms from token set + // * find matching documents and metadata + // * get document vectors + // * score documents + + var query = new lunr.Query(this.fields), + matchingFields = Object.create(null), + queryVectors = Object.create(null), + termFieldCache = Object.create(null), + requiredMatches = Object.create(null), + prohibitedMatches = Object.create(null) + + /* + * To support field level boosts a query vector is created per + * field. An empty vector is eagerly created to support negated + * queries. + */ + for (var i = 0; i < this.fields.length; i++) { + queryVectors[this.fields[i]] = new lunr.Vector + } + + fn.call(query, query) + + for (var i = 0; i < query.clauses.length; i++) { + /* + * Unless the pipeline has been disabled for this term, which is + * the case for terms with wildcards, we need to pass the clause + * term through the search pipeline. A pipeline returns an array + * of processed terms. Pipeline functions may expand the passed + * term, which means we may end up performing multiple index lookups + * for a single query term. + */ + var clause = query.clauses[i], + terms = null, + clauseMatches = lunr.Set.empty + + if (clause.usePipeline) { + terms = this.pipeline.runString(clause.term, { + fields: clause.fields + }) + } else { + terms = [clause.term] + } + + for (var m = 0; m < terms.length; m++) { + var term = terms[m] + + /* + * Each term returned from the pipeline needs to use the same query + * clause object, e.g. the same boost and or edit distance. The + * simplest way to do this is to re-use the clause object but mutate + * its term property. + */ + clause.term = term + + /* + * From the term in the clause we create a token set which will then + * be used to intersect the indexes token set to get a list of terms + * to lookup in the inverted index + */ + var termTokenSet = lunr.TokenSet.fromClause(clause), + expandedTerms = this.tokenSet.intersect(termTokenSet).toArray() + + /* + * If a term marked as required does not exist in the tokenSet it is + * impossible for the search to return any matches. We set all the field + * scoped required matches set to empty and stop examining any further + * clauses. + */ + if (expandedTerms.length === 0 && clause.presence === lunr.Query.presence.REQUIRED) { + for (var k = 0; k < clause.fields.length; k++) { + var field = clause.fields[k] + requiredMatches[field] = lunr.Set.empty + } + + break + } + + for (var j = 0; j < expandedTerms.length; j++) { + /* + * For each term get the posting and termIndex, this is required for + * building the query vector. + */ + var expandedTerm = expandedTerms[j], + posting = this.invertedIndex[expandedTerm], + termIndex = posting._index + + for (var k = 0; k < clause.fields.length; k++) { + /* + * For each field that this query term is scoped by (by default + * all fields are in scope) we need to get all the document refs + * that have this term in that field. + * + * The posting is the entry in the invertedIndex for the matching + * term from above. + */ + var field = clause.fields[k], + fieldPosting = posting[field], + matchingDocumentRefs = Object.keys(fieldPosting), + termField = expandedTerm + "/" + field, + matchingDocumentsSet = new lunr.Set(matchingDocumentRefs) + + /* + * if the presence of this term is required ensure that the matching + * documents are added to the set of required matches for this clause. + * + */ + if (clause.presence == lunr.Query.presence.REQUIRED) { + clauseMatches = clauseMatches.union(matchingDocumentsSet) + + if (requiredMatches[field] === undefined) { + requiredMatches[field] = lunr.Set.complete + } + } + + /* + * if the presence of this term is prohibited ensure that the matching + * documents are added to the set of prohibited matches for this field, + * creating that set if it does not yet exist. + */ + if (clause.presence == lunr.Query.presence.PROHIBITED) { + if (prohibitedMatches[field] === undefined) { + prohibitedMatches[field] = lunr.Set.empty + } + + prohibitedMatches[field] = prohibitedMatches[field].union(matchingDocumentsSet) + + /* + * Prohibited matches should not be part of the query vector used for + * similarity scoring and no metadata should be extracted so we continue + * to the next field + */ + continue + } + + /* + * The query field vector is populated using the termIndex found for + * the term and a unit value with the appropriate boost applied. + * Using upsert because there could already be an entry in the vector + * for the term we are working with. In that case we just add the scores + * together. + */ + queryVectors[field].upsert(termIndex, clause.boost, function (a, b) { return a + b }) + + /** + * If we've already seen this term, field combo then we've already collected + * the matching documents and metadata, no need to go through all that again + */ + if (termFieldCache[termField]) { + continue + } + + for (var l = 0; l < matchingDocumentRefs.length; l++) { + /* + * All metadata for this term/field/document triple + * are then extracted and collected into an instance + * of lunr.MatchData ready to be returned in the query + * results + */ + var matchingDocumentRef = matchingDocumentRefs[l], + matchingFieldRef = new lunr.FieldRef (matchingDocumentRef, field), + metadata = fieldPosting[matchingDocumentRef], + fieldMatch + + if ((fieldMatch = matchingFields[matchingFieldRef]) === undefined) { + matchingFields[matchingFieldRef] = new lunr.MatchData (expandedTerm, field, metadata) + } else { + fieldMatch.add(expandedTerm, field, metadata) + } + + } + + termFieldCache[termField] = true + } + } + } + + /** + * If the presence was required we need to update the requiredMatches field sets. + * We do this after all fields for the term have collected their matches because + * the clause terms presence is required in _any_ of the fields not _all_ of the + * fields. + */ + if (clause.presence === lunr.Query.presence.REQUIRED) { + for (var k = 0; k < clause.fields.length; k++) { + var field = clause.fields[k] + requiredMatches[field] = requiredMatches[field].intersect(clauseMatches) + } + } + } + + /** + * Need to combine the field scoped required and prohibited + * matching documents into a global set of required and prohibited + * matches + */ + var allRequiredMatches = lunr.Set.complete, + allProhibitedMatches = lunr.Set.empty + + for (var i = 0; i < this.fields.length; i++) { + var field = this.fields[i] + + if (requiredMatches[field]) { + allRequiredMatches = allRequiredMatches.intersect(requiredMatches[field]) + } + + if (prohibitedMatches[field]) { + allProhibitedMatches = allProhibitedMatches.union(prohibitedMatches[field]) + } + } + + var matchingFieldRefs = Object.keys(matchingFields), + results = [], + matches = Object.create(null) + + /* + * If the query is negated (contains only prohibited terms) + * we need to get _all_ fieldRefs currently existing in the + * index. This is only done when we know that the query is + * entirely prohibited terms to avoid any cost of getting all + * fieldRefs unnecessarily. + * + * Additionally, blank MatchData must be created to correctly + * populate the results. + */ + if (query.isNegated()) { + matchingFieldRefs = Object.keys(this.fieldVectors) + + for (var i = 0; i < matchingFieldRefs.length; i++) { + var matchingFieldRef = matchingFieldRefs[i] + var fieldRef = lunr.FieldRef.fromString(matchingFieldRef) + matchingFields[matchingFieldRef] = new lunr.MatchData + } + } + + for (var i = 0; i < matchingFieldRefs.length; i++) { + /* + * Currently we have document fields that match the query, but we + * need to return documents. The matchData and scores are combined + * from multiple fields belonging to the same document. + * + * Scores are calculated by field, using the query vectors created + * above, and combined into a final document score using addition. + */ + var fieldRef = lunr.FieldRef.fromString(matchingFieldRefs[i]), + docRef = fieldRef.docRef + + if (!allRequiredMatches.contains(docRef)) { + continue + } + + if (allProhibitedMatches.contains(docRef)) { + continue + } + + var fieldVector = this.fieldVectors[fieldRef], + score = queryVectors[fieldRef.fieldName].similarity(fieldVector), + docMatch + + if ((docMatch = matches[docRef]) !== undefined) { + docMatch.score += score + docMatch.matchData.combine(matchingFields[fieldRef]) + } else { + var match = { + ref: docRef, + score: score, + matchData: matchingFields[fieldRef] + } + matches[docRef] = match + results.push(match) + } + } + + /* + * Sort the results objects by score, highest first. + */ + return results.sort(function (a, b) { + return b.score - a.score + }) +} + +/** + * Prepares the index for JSON serialization. + * + * The schema for this JSON blob will be described in a + * separate JSON schema file. + * + * @returns {Object} + */ +lunr.Index.prototype.toJSON = function () { + var invertedIndex = Object.keys(this.invertedIndex) + .sort() + .map(function (term) { + return [term, this.invertedIndex[term]] + }, this) + + var fieldVectors = Object.keys(this.fieldVectors) + .map(function (ref) { + return [ref, this.fieldVectors[ref].toJSON()] + }, this) + + return { + version: lunr.version, + fields: this.fields, + fieldVectors: fieldVectors, + invertedIndex: invertedIndex, + pipeline: this.pipeline.toJSON() + } +} + +/** + * Loads a previously serialized lunr.Index + * + * @param {Object} serializedIndex - A previously serialized lunr.Index + * @returns {lunr.Index} + */ +lunr.Index.load = function (serializedIndex) { + var attrs = {}, + fieldVectors = {}, + serializedVectors = serializedIndex.fieldVectors, + invertedIndex = Object.create(null), + serializedInvertedIndex = serializedIndex.invertedIndex, + tokenSetBuilder = new lunr.TokenSet.Builder, + pipeline = lunr.Pipeline.load(serializedIndex.pipeline) + + if (serializedIndex.version != lunr.version) { + lunr.utils.warn("Version mismatch when loading serialised index. Current version of lunr '" + lunr.version + "' does not match serialized index '" + serializedIndex.version + "'") + } + + for (var i = 0; i < serializedVectors.length; i++) { + var tuple = serializedVectors[i], + ref = tuple[0], + elements = tuple[1] + + fieldVectors[ref] = new lunr.Vector(elements) + } + + for (var i = 0; i < serializedInvertedIndex.length; i++) { + var tuple = serializedInvertedIndex[i], + term = tuple[0], + posting = tuple[1] + + tokenSetBuilder.insert(term) + invertedIndex[term] = posting + } + + tokenSetBuilder.finish() + + attrs.fields = serializedIndex.fields + + attrs.fieldVectors = fieldVectors + attrs.invertedIndex = invertedIndex + attrs.tokenSet = tokenSetBuilder.root + attrs.pipeline = pipeline + + return new lunr.Index(attrs) +} +/*! + * lunr.Builder + * Copyright (C) 2020 Oliver Nightingale + */ + +/** + * lunr.Builder performs indexing on a set of documents and + * returns instances of lunr.Index ready for querying. + * + * All configuration of the index is done via the builder, the + * fields to index, the document reference, the text processing + * pipeline and document scoring parameters are all set on the + * builder before indexing. + * + * @constructor + * @property {string} _ref - Internal reference to the document reference field. + * @property {string[]} _fields - Internal reference to the document fields to index. + * @property {object} invertedIndex - The inverted index maps terms to document fields. + * @property {object} documentTermFrequencies - Keeps track of document term frequencies. + * @property {object} documentLengths - Keeps track of the length of documents added to the index. + * @property {lunr.tokenizer} tokenizer - Function for splitting strings into tokens for indexing. + * @property {lunr.Pipeline} pipeline - The pipeline performs text processing on tokens before indexing. + * @property {lunr.Pipeline} searchPipeline - A pipeline for processing search terms before querying the index. + * @property {number} documentCount - Keeps track of the total number of documents indexed. + * @property {number} _b - A parameter to control field length normalization, setting this to 0 disabled normalization, 1 fully normalizes field lengths, the default value is 0.75. + * @property {number} _k1 - A parameter to control how quickly an increase in term frequency results in term frequency saturation, the default value is 1.2. + * @property {number} termIndex - A counter incremented for each unique term, used to identify a terms position in the vector space. + * @property {array} metadataWhitelist - A list of metadata keys that have been whitelisted for entry in the index. + */ +lunr.Builder = function () { + this._ref = "id" + this._fields = Object.create(null) + this._documents = Object.create(null) + this.invertedIndex = Object.create(null) + this.fieldTermFrequencies = {} + this.fieldLengths = {} + this.tokenizer = lunr.tokenizer + this.pipeline = new lunr.Pipeline + this.searchPipeline = new lunr.Pipeline + this.documentCount = 0 + this._b = 0.75 + this._k1 = 1.2 + this.termIndex = 0 + this.metadataWhitelist = [] +} + +/** + * Sets the document field used as the document reference. Every document must have this field. + * The type of this field in the document should be a string, if it is not a string it will be + * coerced into a string by calling toString. + * + * The default ref is 'id'. + * + * The ref should _not_ be changed during indexing, it should be set before any documents are + * added to the index. Changing it during indexing can lead to inconsistent results. + * + * @param {string} ref - The name of the reference field in the document. + */ +lunr.Builder.prototype.ref = function (ref) { + this._ref = ref +} + +/** + * A function that is used to extract a field from a document. + * + * Lunr expects a field to be at the top level of a document, if however the field + * is deeply nested within a document an extractor function can be used to extract + * the right field for indexing. + * + * @callback fieldExtractor + * @param {object} doc - The document being added to the index. + * @returns {?(string|object|object[])} obj - The object that will be indexed for this field. + * @example Extracting a nested field + * function (doc) { return doc.nested.field } + */ + +/** + * Adds a field to the list of document fields that will be indexed. Every document being + * indexed should have this field. Null values for this field in indexed documents will + * not cause errors but will limit the chance of that document being retrieved by searches. + * + * All fields should be added before adding documents to the index. Adding fields after + * a document has been indexed will have no effect on already indexed documents. + * + * Fields can be boosted at build time. This allows terms within that field to have more + * importance when ranking search results. Use a field boost to specify that matches within + * one field are more important than other fields. + * + * @param {string} fieldName - The name of a field to index in all documents. + * @param {object} attributes - Optional attributes associated with this field. + * @param {number} [attributes.boost=1] - Boost applied to all terms within this field. + * @param {fieldExtractor} [attributes.extractor] - Function to extract a field from a document. + * @throws {RangeError} fieldName cannot contain unsupported characters '/' + */ +lunr.Builder.prototype.field = function (fieldName, attributes) { + if (/\//.test(fieldName)) { + throw new RangeError ("Field '" + fieldName + "' contains illegal character '/'") + } + + this._fields[fieldName] = attributes || {} +} + +/** + * A parameter to tune the amount of field length normalisation that is applied when + * calculating relevance scores. A value of 0 will completely disable any normalisation + * and a value of 1 will fully normalise field lengths. The default is 0.75. Values of b + * will be clamped to the range 0 - 1. + * + * @param {number} number - The value to set for this tuning parameter. + */ +lunr.Builder.prototype.b = function (number) { + if (number < 0) { + this._b = 0 + } else if (number > 1) { + this._b = 1 + } else { + this._b = number + } +} + +/** + * A parameter that controls the speed at which a rise in term frequency results in term + * frequency saturation. The default value is 1.2. Setting this to a higher value will give + * slower saturation levels, a lower value will result in quicker saturation. + * + * @param {number} number - The value to set for this tuning parameter. + */ +lunr.Builder.prototype.k1 = function (number) { + this._k1 = number +} + +/** + * Adds a document to the index. + * + * Before adding fields to the index the index should have been fully setup, with the document + * ref and all fields to index already having been specified. + * + * The document must have a field name as specified by the ref (by default this is 'id') and + * it should have all fields defined for indexing, though null or undefined values will not + * cause errors. + * + * Entire documents can be boosted at build time. Applying a boost to a document indicates that + * this document should rank higher in search results than other documents. + * + * @param {object} doc - The document to add to the index. + * @param {object} attributes - Optional attributes associated with this document. + * @param {number} [attributes.boost=1] - Boost applied to all terms within this document. + */ +lunr.Builder.prototype.add = function (doc, attributes) { + var docRef = doc[this._ref], + fields = Object.keys(this._fields) + + this._documents[docRef] = attributes || {} + this.documentCount += 1 + + for (var i = 0; i < fields.length; i++) { + var fieldName = fields[i], + extractor = this._fields[fieldName].extractor, + field = extractor ? extractor(doc) : doc[fieldName], + tokens = this.tokenizer(field, { + fields: [fieldName] + }), + terms = this.pipeline.run(tokens), + fieldRef = new lunr.FieldRef (docRef, fieldName), + fieldTerms = Object.create(null) + + this.fieldTermFrequencies[fieldRef] = fieldTerms + this.fieldLengths[fieldRef] = 0 + + // store the length of this field for this document + this.fieldLengths[fieldRef] += terms.length + + // calculate term frequencies for this field + for (var j = 0; j < terms.length; j++) { + var term = terms[j] + + if (fieldTerms[term] == undefined) { + fieldTerms[term] = 0 + } + + fieldTerms[term] += 1 + + // add to inverted index + // create an initial posting if one doesn't exist + if (this.invertedIndex[term] == undefined) { + var posting = Object.create(null) + posting["_index"] = this.termIndex + this.termIndex += 1 + + for (var k = 0; k < fields.length; k++) { + posting[fields[k]] = Object.create(null) + } + + this.invertedIndex[term] = posting + } + + // add an entry for this term/fieldName/docRef to the invertedIndex + if (this.invertedIndex[term][fieldName][docRef] == undefined) { + this.invertedIndex[term][fieldName][docRef] = Object.create(null) + } + + // store all whitelisted metadata about this token in the + // inverted index + for (var l = 0; l < this.metadataWhitelist.length; l++) { + var metadataKey = this.metadataWhitelist[l], + metadata = term.metadata[metadataKey] + + if (this.invertedIndex[term][fieldName][docRef][metadataKey] == undefined) { + this.invertedIndex[term][fieldName][docRef][metadataKey] = [] + } + + this.invertedIndex[term][fieldName][docRef][metadataKey].push(metadata) + } + } + + } +} + +/** + * Calculates the average document length for this index + * + * @private + */ +lunr.Builder.prototype.calculateAverageFieldLengths = function () { + + var fieldRefs = Object.keys(this.fieldLengths), + numberOfFields = fieldRefs.length, + accumulator = {}, + documentsWithField = {} + + for (var i = 0; i < numberOfFields; i++) { + var fieldRef = lunr.FieldRef.fromString(fieldRefs[i]), + field = fieldRef.fieldName + + documentsWithField[field] || (documentsWithField[field] = 0) + documentsWithField[field] += 1 + + accumulator[field] || (accumulator[field] = 0) + accumulator[field] += this.fieldLengths[fieldRef] + } + + var fields = Object.keys(this._fields) + + for (var i = 0; i < fields.length; i++) { + var fieldName = fields[i] + accumulator[fieldName] = accumulator[fieldName] / documentsWithField[fieldName] + } + + this.averageFieldLength = accumulator +} + +/** + * Builds a vector space model of every document using lunr.Vector + * + * @private + */ +lunr.Builder.prototype.createFieldVectors = function () { + var fieldVectors = {}, + fieldRefs = Object.keys(this.fieldTermFrequencies), + fieldRefsLength = fieldRefs.length, + termIdfCache = Object.create(null) + + for (var i = 0; i < fieldRefsLength; i++) { + var fieldRef = lunr.FieldRef.fromString(fieldRefs[i]), + fieldName = fieldRef.fieldName, + fieldLength = this.fieldLengths[fieldRef], + fieldVector = new lunr.Vector, + termFrequencies = this.fieldTermFrequencies[fieldRef], + terms = Object.keys(termFrequencies), + termsLength = terms.length + + + var fieldBoost = this._fields[fieldName].boost || 1, + docBoost = this._documents[fieldRef.docRef].boost || 1 + + for (var j = 0; j < termsLength; j++) { + var term = terms[j], + tf = termFrequencies[term], + termIndex = this.invertedIndex[term]._index, + idf, score, scoreWithPrecision + + if (termIdfCache[term] === undefined) { + idf = lunr.idf(this.invertedIndex[term], this.documentCount) + termIdfCache[term] = idf + } else { + idf = termIdfCache[term] + } + + score = idf * ((this._k1 + 1) * tf) / (this._k1 * (1 - this._b + this._b * (fieldLength / this.averageFieldLength[fieldName])) + tf) + score *= fieldBoost + score *= docBoost + scoreWithPrecision = Math.round(score * 1000) / 1000 + // Converts 1.23456789 to 1.234. + // Reducing the precision so that the vectors take up less + // space when serialised. Doing it now so that they behave + // the same before and after serialisation. Also, this is + // the fastest approach to reducing a number's precision in + // JavaScript. + + fieldVector.insert(termIndex, scoreWithPrecision) + } + + fieldVectors[fieldRef] = fieldVector + } + + this.fieldVectors = fieldVectors +} + +/** + * Creates a token set of all tokens in the index using lunr.TokenSet + * + * @private + */ +lunr.Builder.prototype.createTokenSet = function () { + this.tokenSet = lunr.TokenSet.fromArray( + Object.keys(this.invertedIndex).sort() + ) +} + +/** + * Builds the index, creating an instance of lunr.Index. + * + * This completes the indexing process and should only be called + * once all documents have been added to the index. + * + * @returns {lunr.Index} + */ +lunr.Builder.prototype.build = function () { + this.calculateAverageFieldLengths() + this.createFieldVectors() + this.createTokenSet() + + return new lunr.Index({ + invertedIndex: this.invertedIndex, + fieldVectors: this.fieldVectors, + tokenSet: this.tokenSet, + fields: Object.keys(this._fields), + pipeline: this.searchPipeline + }) +} + +/** + * Applies a plugin to the index builder. + * + * A plugin is a function that is called with the index builder as its context. + * Plugins can be used to customise or extend the behaviour of the index + * in some way. A plugin is just a function, that encapsulated the custom + * behaviour that should be applied when building the index. + * + * The plugin function will be called with the index builder as its argument, additional + * arguments can also be passed when calling use. The function will be called + * with the index builder as its context. + * + * @param {Function} plugin The plugin to apply. + */ +lunr.Builder.prototype.use = function (fn) { + var args = Array.prototype.slice.call(arguments, 1) + args.unshift(this) + fn.apply(this, args) +} +/** + * Contains and collects metadata about a matching document. + * A single instance of lunr.MatchData is returned as part of every + * lunr.Index~Result. + * + * @constructor + * @param {string} term - The term this match data is associated with + * @param {string} field - The field in which the term was found + * @param {object} metadata - The metadata recorded about this term in this field + * @property {object} metadata - A cloned collection of metadata associated with this document. + * @see {@link lunr.Index~Result} + */ +lunr.MatchData = function (term, field, metadata) { + var clonedMetadata = Object.create(null), + metadataKeys = Object.keys(metadata || {}) + + // Cloning the metadata to prevent the original + // being mutated during match data combination. + // Metadata is kept in an array within the inverted + // index so cloning the data can be done with + // Array#slice + for (var i = 0; i < metadataKeys.length; i++) { + var key = metadataKeys[i] + clonedMetadata[key] = metadata[key].slice() + } + + this.metadata = Object.create(null) + + if (term !== undefined) { + this.metadata[term] = Object.create(null) + this.metadata[term][field] = clonedMetadata + } +} + +/** + * An instance of lunr.MatchData will be created for every term that matches a + * document. However only one instance is required in a lunr.Index~Result. This + * method combines metadata from another instance of lunr.MatchData with this + * objects metadata. + * + * @param {lunr.MatchData} otherMatchData - Another instance of match data to merge with this one. + * @see {@link lunr.Index~Result} + */ +lunr.MatchData.prototype.combine = function (otherMatchData) { + var terms = Object.keys(otherMatchData.metadata) + + for (var i = 0; i < terms.length; i++) { + var term = terms[i], + fields = Object.keys(otherMatchData.metadata[term]) + + if (this.metadata[term] == undefined) { + this.metadata[term] = Object.create(null) + } + + for (var j = 0; j < fields.length; j++) { + var field = fields[j], + keys = Object.keys(otherMatchData.metadata[term][field]) + + if (this.metadata[term][field] == undefined) { + this.metadata[term][field] = Object.create(null) + } + + for (var k = 0; k < keys.length; k++) { + var key = keys[k] + + if (this.metadata[term][field][key] == undefined) { + this.metadata[term][field][key] = otherMatchData.metadata[term][field][key] + } else { + this.metadata[term][field][key] = this.metadata[term][field][key].concat(otherMatchData.metadata[term][field][key]) + } + + } + } + } +} + +/** + * Add metadata for a term/field pair to this instance of match data. + * + * @param {string} term - The term this match data is associated with + * @param {string} field - The field in which the term was found + * @param {object} metadata - The metadata recorded about this term in this field + */ +lunr.MatchData.prototype.add = function (term, field, metadata) { + if (!(term in this.metadata)) { + this.metadata[term] = Object.create(null) + this.metadata[term][field] = metadata + return + } + + if (!(field in this.metadata[term])) { + this.metadata[term][field] = metadata + return + } + + var metadataKeys = Object.keys(metadata) + + for (var i = 0; i < metadataKeys.length; i++) { + var key = metadataKeys[i] + + if (key in this.metadata[term][field]) { + this.metadata[term][field][key] = this.metadata[term][field][key].concat(metadata[key]) + } else { + this.metadata[term][field][key] = metadata[key] + } + } +} +/** + * A lunr.Query provides a programmatic way of defining queries to be performed + * against a {@link lunr.Index}. + * + * Prefer constructing a lunr.Query using the {@link lunr.Index#query} method + * so the query object is pre-initialized with the right index fields. + * + * @constructor + * @property {lunr.Query~Clause[]} clauses - An array of query clauses. + * @property {string[]} allFields - An array of all available fields in a lunr.Index. + */ +lunr.Query = function (allFields) { + this.clauses = [] + this.allFields = allFields +} + +/** + * Constants for indicating what kind of automatic wildcard insertion will be used when constructing a query clause. + * + * This allows wildcards to be added to the beginning and end of a term without having to manually do any string + * concatenation. + * + * The wildcard constants can be bitwise combined to select both leading and trailing wildcards. + * + * @constant + * @default + * @property {number} wildcard.NONE - The term will have no wildcards inserted, this is the default behaviour + * @property {number} wildcard.LEADING - Prepend the term with a wildcard, unless a leading wildcard already exists + * @property {number} wildcard.TRAILING - Append a wildcard to the term, unless a trailing wildcard already exists + * @see lunr.Query~Clause + * @see lunr.Query#clause + * @see lunr.Query#term + * @example query term with trailing wildcard + * query.term('foo', { wildcard: lunr.Query.wildcard.TRAILING }) + * @example query term with leading and trailing wildcard + * query.term('foo', { + * wildcard: lunr.Query.wildcard.LEADING | lunr.Query.wildcard.TRAILING + * }) + */ + +lunr.Query.wildcard = new String ("*") +lunr.Query.wildcard.NONE = 0 +lunr.Query.wildcard.LEADING = 1 +lunr.Query.wildcard.TRAILING = 2 + +/** + * Constants for indicating what kind of presence a term must have in matching documents. + * + * @constant + * @enum {number} + * @see lunr.Query~Clause + * @see lunr.Query#clause + * @see lunr.Query#term + * @example query term with required presence + * query.term('foo', { presence: lunr.Query.presence.REQUIRED }) + */ +lunr.Query.presence = { + /** + * Term's presence in a document is optional, this is the default value. + */ + OPTIONAL: 1, + + /** + * Term's presence in a document is required, documents that do not contain + * this term will not be returned. + */ + REQUIRED: 2, + + /** + * Term's presence in a document is prohibited, documents that do contain + * this term will not be returned. + */ + PROHIBITED: 3 +} + +/** + * A single clause in a {@link lunr.Query} contains a term and details on how to + * match that term against a {@link lunr.Index}. + * + * @typedef {Object} lunr.Query~Clause + * @property {string[]} fields - The fields in an index this clause should be matched against. + * @property {number} [boost=1] - Any boost that should be applied when matching this clause. + * @property {number} [editDistance] - Whether the term should have fuzzy matching applied, and how fuzzy the match should be. + * @property {boolean} [usePipeline] - Whether the term should be passed through the search pipeline. + * @property {number} [wildcard=lunr.Query.wildcard.NONE] - Whether the term should have wildcards appended or prepended. + * @property {number} [presence=lunr.Query.presence.OPTIONAL] - The terms presence in any matching documents. + */ + +/** + * Adds a {@link lunr.Query~Clause} to this query. + * + * Unless the clause contains the fields to be matched all fields will be matched. In addition + * a default boost of 1 is applied to the clause. + * + * @param {lunr.Query~Clause} clause - The clause to add to this query. + * @see lunr.Query~Clause + * @returns {lunr.Query} + */ +lunr.Query.prototype.clause = function (clause) { + if (!('fields' in clause)) { + clause.fields = this.allFields + } + + if (!('boost' in clause)) { + clause.boost = 1 + } + + if (!('usePipeline' in clause)) { + clause.usePipeline = true + } + + if (!('wildcard' in clause)) { + clause.wildcard = lunr.Query.wildcard.NONE + } + + if ((clause.wildcard & lunr.Query.wildcard.LEADING) && (clause.term.charAt(0) != lunr.Query.wildcard)) { + clause.term = "*" + clause.term + } + + if ((clause.wildcard & lunr.Query.wildcard.TRAILING) && (clause.term.slice(-1) != lunr.Query.wildcard)) { + clause.term = "" + clause.term + "*" + } + + if (!('presence' in clause)) { + clause.presence = lunr.Query.presence.OPTIONAL + } + + this.clauses.push(clause) + + return this +} + +/** + * A negated query is one in which every clause has a presence of + * prohibited. These queries require some special processing to return + * the expected results. + * + * @returns boolean + */ +lunr.Query.prototype.isNegated = function () { + for (var i = 0; i < this.clauses.length; i++) { + if (this.clauses[i].presence != lunr.Query.presence.PROHIBITED) { + return false + } + } + + return true +} + +/** + * Adds a term to the current query, under the covers this will create a {@link lunr.Query~Clause} + * to the list of clauses that make up this query. + * + * The term is used as is, i.e. no tokenization will be performed by this method. Instead conversion + * to a token or token-like string should be done before calling this method. + * + * The term will be converted to a string by calling `toString`. Multiple terms can be passed as an + * array, each term in the array will share the same options. + * + * @param {object|object[]} term - The term(s) to add to the query. + * @param {object} [options] - Any additional properties to add to the query clause. + * @returns {lunr.Query} + * @see lunr.Query#clause + * @see lunr.Query~Clause + * @example adding a single term to a query + * query.term("foo") + * @example adding a single term to a query and specifying search fields, term boost and automatic trailing wildcard + * query.term("foo", { + * fields: ["title"], + * boost: 10, + * wildcard: lunr.Query.wildcard.TRAILING + * }) + * @example using lunr.tokenizer to convert a string to tokens before using them as terms + * query.term(lunr.tokenizer("foo bar")) + */ +lunr.Query.prototype.term = function (term, options) { + if (Array.isArray(term)) { + term.forEach(function (t) { this.term(t, lunr.utils.clone(options)) }, this) + return this + } + + var clause = options || {} + clause.term = term.toString() + + this.clause(clause) + + return this +} +lunr.QueryParseError = function (message, start, end) { + this.name = "QueryParseError" + this.message = message + this.start = start + this.end = end +} + +lunr.QueryParseError.prototype = new Error +lunr.QueryLexer = function (str) { + this.lexemes = [] + this.str = str + this.length = str.length + this.pos = 0 + this.start = 0 + this.escapeCharPositions = [] +} + +lunr.QueryLexer.prototype.run = function () { + var state = lunr.QueryLexer.lexText + + while (state) { + state = state(this) + } +} + +lunr.QueryLexer.prototype.sliceString = function () { + var subSlices = [], + sliceStart = this.start, + sliceEnd = this.pos + + for (var i = 0; i < this.escapeCharPositions.length; i++) { + sliceEnd = this.escapeCharPositions[i] + subSlices.push(this.str.slice(sliceStart, sliceEnd)) + sliceStart = sliceEnd + 1 + } + + subSlices.push(this.str.slice(sliceStart, this.pos)) + this.escapeCharPositions.length = 0 + + return subSlices.join('') +} + +lunr.QueryLexer.prototype.emit = function (type) { + this.lexemes.push({ + type: type, + str: this.sliceString(), + start: this.start, + end: this.pos + }) + + this.start = this.pos +} + +lunr.QueryLexer.prototype.escapeCharacter = function () { + this.escapeCharPositions.push(this.pos - 1) + this.pos += 1 +} + +lunr.QueryLexer.prototype.next = function () { + if (this.pos >= this.length) { + return lunr.QueryLexer.EOS + } + + var char = this.str.charAt(this.pos) + this.pos += 1 + return char +} + +lunr.QueryLexer.prototype.width = function () { + return this.pos - this.start +} + +lunr.QueryLexer.prototype.ignore = function () { + if (this.start == this.pos) { + this.pos += 1 + } + + this.start = this.pos +} + +lunr.QueryLexer.prototype.backup = function () { + this.pos -= 1 +} + +lunr.QueryLexer.prototype.acceptDigitRun = function () { + var char, charCode + + do { + char = this.next() + charCode = char.charCodeAt(0) + } while (charCode > 47 && charCode < 58) + + if (char != lunr.QueryLexer.EOS) { + this.backup() + } +} + +lunr.QueryLexer.prototype.more = function () { + return this.pos < this.length +} + +lunr.QueryLexer.EOS = 'EOS' +lunr.QueryLexer.FIELD = 'FIELD' +lunr.QueryLexer.TERM = 'TERM' +lunr.QueryLexer.EDIT_DISTANCE = 'EDIT_DISTANCE' +lunr.QueryLexer.BOOST = 'BOOST' +lunr.QueryLexer.PRESENCE = 'PRESENCE' + +lunr.QueryLexer.lexField = function (lexer) { + lexer.backup() + lexer.emit(lunr.QueryLexer.FIELD) + lexer.ignore() + return lunr.QueryLexer.lexText +} + +lunr.QueryLexer.lexTerm = function (lexer) { + if (lexer.width() > 1) { + lexer.backup() + lexer.emit(lunr.QueryLexer.TERM) + } + + lexer.ignore() + + if (lexer.more()) { + return lunr.QueryLexer.lexText + } +} + +lunr.QueryLexer.lexEditDistance = function (lexer) { + lexer.ignore() + lexer.acceptDigitRun() + lexer.emit(lunr.QueryLexer.EDIT_DISTANCE) + return lunr.QueryLexer.lexText +} + +lunr.QueryLexer.lexBoost = function (lexer) { + lexer.ignore() + lexer.acceptDigitRun() + lexer.emit(lunr.QueryLexer.BOOST) + return lunr.QueryLexer.lexText +} + +lunr.QueryLexer.lexEOS = function (lexer) { + if (lexer.width() > 0) { + lexer.emit(lunr.QueryLexer.TERM) + } +} + +// This matches the separator used when tokenising fields +// within a document. These should match otherwise it is +// not possible to search for some tokens within a document. +// +// It is possible for the user to change the separator on the +// tokenizer so it _might_ clash with any other of the special +// characters already used within the search string, e.g. :. +// +// This means that it is possible to change the separator in +// such a way that makes some words unsearchable using a search +// string. +lunr.QueryLexer.termSeparator = lunr.tokenizer.separator + +lunr.QueryLexer.lexText = function (lexer) { + while (true) { + var char = lexer.next() + + if (char == lunr.QueryLexer.EOS) { + return lunr.QueryLexer.lexEOS + } + + // Escape character is '\' + if (char.charCodeAt(0) == 92) { + lexer.escapeCharacter() + continue + } + + if (char == ":") { + return lunr.QueryLexer.lexField + } + + if (char == "~") { + lexer.backup() + if (lexer.width() > 0) { + lexer.emit(lunr.QueryLexer.TERM) + } + return lunr.QueryLexer.lexEditDistance + } + + if (char == "^") { + lexer.backup() + if (lexer.width() > 0) { + lexer.emit(lunr.QueryLexer.TERM) + } + return lunr.QueryLexer.lexBoost + } + + // "+" indicates term presence is required + // checking for length to ensure that only + // leading "+" are considered + if (char == "+" && lexer.width() === 1) { + lexer.emit(lunr.QueryLexer.PRESENCE) + return lunr.QueryLexer.lexText + } + + // "-" indicates term presence is prohibited + // checking for length to ensure that only + // leading "-" are considered + if (char == "-" && lexer.width() === 1) { + lexer.emit(lunr.QueryLexer.PRESENCE) + return lunr.QueryLexer.lexText + } + + if (char.match(lunr.QueryLexer.termSeparator)) { + return lunr.QueryLexer.lexTerm + } + } +} + +lunr.QueryParser = function (str, query) { + this.lexer = new lunr.QueryLexer (str) + this.query = query + this.currentClause = {} + this.lexemeIdx = 0 +} + +lunr.QueryParser.prototype.parse = function () { + this.lexer.run() + this.lexemes = this.lexer.lexemes + + var state = lunr.QueryParser.parseClause + + while (state) { + state = state(this) + } + + return this.query +} + +lunr.QueryParser.prototype.peekLexeme = function () { + return this.lexemes[this.lexemeIdx] +} + +lunr.QueryParser.prototype.consumeLexeme = function () { + var lexeme = this.peekLexeme() + this.lexemeIdx += 1 + return lexeme +} + +lunr.QueryParser.prototype.nextClause = function () { + var completedClause = this.currentClause + this.query.clause(completedClause) + this.currentClause = {} +} + +lunr.QueryParser.parseClause = function (parser) { + var lexeme = parser.peekLexeme() + + if (lexeme == undefined) { + return + } + + switch (lexeme.type) { + case lunr.QueryLexer.PRESENCE: + return lunr.QueryParser.parsePresence + case lunr.QueryLexer.FIELD: + return lunr.QueryParser.parseField + case lunr.QueryLexer.TERM: + return lunr.QueryParser.parseTerm + default: + var errorMessage = "expected either a field or a term, found " + lexeme.type + + if (lexeme.str.length >= 1) { + errorMessage += " with value '" + lexeme.str + "'" + } + + throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end) + } +} + +lunr.QueryParser.parsePresence = function (parser) { + var lexeme = parser.consumeLexeme() + + if (lexeme == undefined) { + return + } + + switch (lexeme.str) { + case "-": + parser.currentClause.presence = lunr.Query.presence.PROHIBITED + break + case "+": + parser.currentClause.presence = lunr.Query.presence.REQUIRED + break + default: + var errorMessage = "unrecognised presence operator'" + lexeme.str + "'" + throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end) + } + + var nextLexeme = parser.peekLexeme() + + if (nextLexeme == undefined) { + var errorMessage = "expecting term or field, found nothing" + throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end) + } + + switch (nextLexeme.type) { + case lunr.QueryLexer.FIELD: + return lunr.QueryParser.parseField + case lunr.QueryLexer.TERM: + return lunr.QueryParser.parseTerm + default: + var errorMessage = "expecting term or field, found '" + nextLexeme.type + "'" + throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end) + } +} + +lunr.QueryParser.parseField = function (parser) { + var lexeme = parser.consumeLexeme() + + if (lexeme == undefined) { + return + } + + if (parser.query.allFields.indexOf(lexeme.str) == -1) { + var possibleFields = parser.query.allFields.map(function (f) { return "'" + f + "'" }).join(', '), + errorMessage = "unrecognised field '" + lexeme.str + "', possible fields: " + possibleFields + + throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end) + } + + parser.currentClause.fields = [lexeme.str] + + var nextLexeme = parser.peekLexeme() + + if (nextLexeme == undefined) { + var errorMessage = "expecting term, found nothing" + throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end) + } + + switch (nextLexeme.type) { + case lunr.QueryLexer.TERM: + return lunr.QueryParser.parseTerm + default: + var errorMessage = "expecting term, found '" + nextLexeme.type + "'" + throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end) + } +} + +lunr.QueryParser.parseTerm = function (parser) { + var lexeme = parser.consumeLexeme() + + if (lexeme == undefined) { + return + } + + parser.currentClause.term = lexeme.str.toLowerCase() + + if (lexeme.str.indexOf("*") != -1) { + parser.currentClause.usePipeline = false + } + + var nextLexeme = parser.peekLexeme() + + if (nextLexeme == undefined) { + parser.nextClause() + return + } + + switch (nextLexeme.type) { + case lunr.QueryLexer.TERM: + parser.nextClause() + return lunr.QueryParser.parseTerm + case lunr.QueryLexer.FIELD: + parser.nextClause() + return lunr.QueryParser.parseField + case lunr.QueryLexer.EDIT_DISTANCE: + return lunr.QueryParser.parseEditDistance + case lunr.QueryLexer.BOOST: + return lunr.QueryParser.parseBoost + case lunr.QueryLexer.PRESENCE: + parser.nextClause() + return lunr.QueryParser.parsePresence + default: + var errorMessage = "Unexpected lexeme type '" + nextLexeme.type + "'" + throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end) + } +} + +lunr.QueryParser.parseEditDistance = function (parser) { + var lexeme = parser.consumeLexeme() + + if (lexeme == undefined) { + return + } + + var editDistance = parseInt(lexeme.str, 10) + + if (isNaN(editDistance)) { + var errorMessage = "edit distance must be numeric" + throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end) + } + + parser.currentClause.editDistance = editDistance + + var nextLexeme = parser.peekLexeme() + + if (nextLexeme == undefined) { + parser.nextClause() + return + } + + switch (nextLexeme.type) { + case lunr.QueryLexer.TERM: + parser.nextClause() + return lunr.QueryParser.parseTerm + case lunr.QueryLexer.FIELD: + parser.nextClause() + return lunr.QueryParser.parseField + case lunr.QueryLexer.EDIT_DISTANCE: + return lunr.QueryParser.parseEditDistance + case lunr.QueryLexer.BOOST: + return lunr.QueryParser.parseBoost + case lunr.QueryLexer.PRESENCE: + parser.nextClause() + return lunr.QueryParser.parsePresence + default: + var errorMessage = "Unexpected lexeme type '" + nextLexeme.type + "'" + throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end) + } +} + +lunr.QueryParser.parseBoost = function (parser) { + var lexeme = parser.consumeLexeme() + + if (lexeme == undefined) { + return + } + + var boost = parseInt(lexeme.str, 10) + + if (isNaN(boost)) { + var errorMessage = "boost must be numeric" + throw new lunr.QueryParseError (errorMessage, lexeme.start, lexeme.end) + } + + parser.currentClause.boost = boost + + var nextLexeme = parser.peekLexeme() + + if (nextLexeme == undefined) { + parser.nextClause() + return + } + + switch (nextLexeme.type) { + case lunr.QueryLexer.TERM: + parser.nextClause() + return lunr.QueryParser.parseTerm + case lunr.QueryLexer.FIELD: + parser.nextClause() + return lunr.QueryParser.parseField + case lunr.QueryLexer.EDIT_DISTANCE: + return lunr.QueryParser.parseEditDistance + case lunr.QueryLexer.BOOST: + return lunr.QueryParser.parseBoost + case lunr.QueryLexer.PRESENCE: + parser.nextClause() + return lunr.QueryParser.parsePresence + default: + var errorMessage = "Unexpected lexeme type '" + nextLexeme.type + "'" + throw new lunr.QueryParseError (errorMessage, nextLexeme.start, nextLexeme.end) + } +} + + /** + * export the module via AMD, CommonJS or as a browser global + * Export code from https://github.com/umdjs/umd/blob/master/returnExports.js + */ + ;(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define(factory) + } else if (typeof exports === 'object') { + /** + * Node. Does not work with strict CommonJS, but + * only CommonJS-like enviroments that support module.exports, + * like Node. + */ + module.exports = factory() + } else { + // Browser globals (root is window) + root.lunr = factory() + } + }(this, function () { + /** + * Just return a value to define the module export. + * This example returns an object, but the module + * can return a function as the exported value. + */ + return lunr + })) +})(); diff --git a/node_modules/lunr/lunr.min.js b/node_modules/lunr/lunr.min.js new file mode 100644 index 0000000..cdc94cd --- /dev/null +++ b/node_modules/lunr/lunr.min.js @@ -0,0 +1,6 @@ +/** + * lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 2.3.9 + * Copyright (C) 2020 Oliver Nightingale + * @license MIT + */ +!function(){var e=function(t){var r=new e.Builder;return r.pipeline.add(e.trimmer,e.stopWordFilter,e.stemmer),r.searchPipeline.add(e.stemmer),t.call(r,r),r.build()};e.version="2.3.9",e.utils={},e.utils.warn=function(e){return function(t){e.console&&console.warn&&console.warn(t)}}(this),e.utils.asString=function(e){return void 0===e||null===e?"":e.toString()},e.utils.clone=function(e){if(null===e||void 0===e)return e;for(var t=Object.create(null),r=Object.keys(e),i=0;i0){var c=e.utils.clone(r)||{};c.position=[a,l],c.index=s.length,s.push(new e.Token(i.slice(a,o),c))}a=o+1}}return s},e.tokenizer.separator=/[\s\-]+/,e.Pipeline=function(){this._stack=[]},e.Pipeline.registeredFunctions=Object.create(null),e.Pipeline.registerFunction=function(t,r){r in this.registeredFunctions&&e.utils.warn("Overwriting existing registered function: "+r),t.label=r,e.Pipeline.registeredFunctions[t.label]=t},e.Pipeline.warnIfFunctionNotRegistered=function(t){var r=t.label&&t.label in this.registeredFunctions;r||e.utils.warn("Function is not registered with pipeline. This may cause problems when serialising the index.\n",t)},e.Pipeline.load=function(t){var r=new e.Pipeline;return t.forEach(function(t){var i=e.Pipeline.registeredFunctions[t];if(!i)throw new Error("Cannot load unregistered function: "+t);r.add(i)}),r},e.Pipeline.prototype.add=function(){var t=Array.prototype.slice.call(arguments);t.forEach(function(t){e.Pipeline.warnIfFunctionNotRegistered(t),this._stack.push(t)},this)},e.Pipeline.prototype.after=function(t,r){e.Pipeline.warnIfFunctionNotRegistered(r);var i=this._stack.indexOf(t);if(i==-1)throw new Error("Cannot find existingFn");i+=1,this._stack.splice(i,0,r)},e.Pipeline.prototype.before=function(t,r){e.Pipeline.warnIfFunctionNotRegistered(r);var i=this._stack.indexOf(t);if(i==-1)throw new Error("Cannot find existingFn");this._stack.splice(i,0,r)},e.Pipeline.prototype.remove=function(e){var t=this._stack.indexOf(e);t!=-1&&this._stack.splice(t,1)},e.Pipeline.prototype.run=function(e){for(var t=this._stack.length,r=0;r1&&(se&&(r=n),s!=e);)i=r-t,n=t+Math.floor(i/2),s=this.elements[2*n];return s==e?2*n:s>e?2*n:sa?l+=2:o==a&&(t+=r[u+1]*i[l+1],u+=2,l+=2);return t},e.Vector.prototype.similarity=function(e){return this.dot(e)/this.magnitude()||0},e.Vector.prototype.toArray=function(){for(var e=new Array(this.elements.length/2),t=1,r=0;t0){var o,a=s.str.charAt(0);a in s.node.edges?o=s.node.edges[a]:(o=new e.TokenSet,s.node.edges[a]=o),1==s.str.length&&(o["final"]=!0),n.push({node:o,editsRemaining:s.editsRemaining,str:s.str.slice(1)})}if(0!=s.editsRemaining){if("*"in s.node.edges)var u=s.node.edges["*"];else{var u=new e.TokenSet;s.node.edges["*"]=u}if(0==s.str.length&&(u["final"]=!0),n.push({node:u,editsRemaining:s.editsRemaining-1,str:s.str}),s.str.length>1&&n.push({node:s.node,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)}),1==s.str.length&&(s.node["final"]=!0),s.str.length>=1){if("*"in s.node.edges)var l=s.node.edges["*"];else{var l=new e.TokenSet;s.node.edges["*"]=l}1==s.str.length&&(l["final"]=!0),n.push({node:l,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)})}if(s.str.length>1){var c,h=s.str.charAt(0),d=s.str.charAt(1);d in s.node.edges?c=s.node.edges[d]:(c=new e.TokenSet,s.node.edges[d]=c),1==s.str.length&&(c["final"]=!0),n.push({node:c,editsRemaining:s.editsRemaining-1,str:h+s.str.slice(2)})}}}return i},e.TokenSet.fromString=function(t){for(var r=new e.TokenSet,i=r,n=0,s=t.length;n=e;t--){var r=this.uncheckedNodes[t],i=r.child.toString();i in this.minimizedNodes?r.parent.edges[r["char"]]=this.minimizedNodes[i]:(r.child._str=i,this.minimizedNodes[i]=r.child),this.uncheckedNodes.pop()}},e.Index=function(e){this.invertedIndex=e.invertedIndex,this.fieldVectors=e.fieldVectors,this.tokenSet=e.tokenSet,this.fields=e.fields,this.pipeline=e.pipeline},e.Index.prototype.search=function(t){return this.query(function(r){var i=new e.QueryParser(t,r);i.parse()})},e.Index.prototype.query=function(t){for(var r=new e.Query(this.fields),i=Object.create(null),n=Object.create(null),s=Object.create(null),o=Object.create(null),a=Object.create(null),u=0;u1?this._b=1:this._b=e},e.Builder.prototype.k1=function(e){this._k1=e},e.Builder.prototype.add=function(t,r){var i=t[this._ref],n=Object.keys(this._fields);this._documents[i]=r||{},this.documentCount+=1;for(var s=0;s=this.length)return e.QueryLexer.EOS;var t=this.str.charAt(this.pos);return this.pos+=1,t},e.QueryLexer.prototype.width=function(){return this.pos-this.start},e.QueryLexer.prototype.ignore=function(){this.start==this.pos&&(this.pos+=1),this.start=this.pos},e.QueryLexer.prototype.backup=function(){this.pos-=1},e.QueryLexer.prototype.acceptDigitRun=function(){var t,r;do t=this.next(),r=t.charCodeAt(0);while(r>47&&r<58);t!=e.QueryLexer.EOS&&this.backup()},e.QueryLexer.prototype.more=function(){return this.pos1&&(t.backup(),t.emit(e.QueryLexer.TERM)),t.ignore(),t.more())return e.QueryLexer.lexText},e.QueryLexer.lexEditDistance=function(t){return t.ignore(),t.acceptDigitRun(),t.emit(e.QueryLexer.EDIT_DISTANCE),e.QueryLexer.lexText},e.QueryLexer.lexBoost=function(t){return t.ignore(),t.acceptDigitRun(),t.emit(e.QueryLexer.BOOST),e.QueryLexer.lexText},e.QueryLexer.lexEOS=function(t){t.width()>0&&t.emit(e.QueryLexer.TERM)},e.QueryLexer.termSeparator=e.tokenizer.separator,e.QueryLexer.lexText=function(t){for(;;){var r=t.next();if(r==e.QueryLexer.EOS)return e.QueryLexer.lexEOS;if(92!=r.charCodeAt(0)){if(":"==r)return e.QueryLexer.lexField;if("~"==r)return t.backup(),t.width()>0&&t.emit(e.QueryLexer.TERM),e.QueryLexer.lexEditDistance;if("^"==r)return t.backup(),t.width()>0&&t.emit(e.QueryLexer.TERM),e.QueryLexer.lexBoost;if("+"==r&&1===t.width())return t.emit(e.QueryLexer.PRESENCE),e.QueryLexer.lexText;if("-"==r&&1===t.width())return t.emit(e.QueryLexer.PRESENCE),e.QueryLexer.lexText;if(r.match(e.QueryLexer.termSeparator))return e.QueryLexer.lexTerm}else t.escapeCharacter()}},e.QueryParser=function(t,r){this.lexer=new e.QueryLexer(t),this.query=r,this.currentClause={},this.lexemeIdx=0},e.QueryParser.prototype.parse=function(){this.lexer.run(),this.lexemes=this.lexer.lexemes;for(var t=e.QueryParser.parseClause;t;)t=t(this);return this.query},e.QueryParser.prototype.peekLexeme=function(){return this.lexemes[this.lexemeIdx]},e.QueryParser.prototype.consumeLexeme=function(){var e=this.peekLexeme();return this.lexemeIdx+=1,e},e.QueryParser.prototype.nextClause=function(){var e=this.currentClause;this.query.clause(e),this.currentClause={}},e.QueryParser.parseClause=function(t){var r=t.peekLexeme();if(void 0!=r)switch(r.type){case e.QueryLexer.PRESENCE:return e.QueryParser.parsePresence;case e.QueryLexer.FIELD:return e.QueryParser.parseField;case e.QueryLexer.TERM:return e.QueryParser.parseTerm;default:var i="expected either a field or a term, found "+r.type;throw r.str.length>=1&&(i+=" with value '"+r.str+"'"),new e.QueryParseError(i,r.start,r.end)}},e.QueryParser.parsePresence=function(t){var r=t.consumeLexeme();if(void 0!=r){switch(r.str){case"-":t.currentClause.presence=e.Query.presence.PROHIBITED;break;case"+":t.currentClause.presence=e.Query.presence.REQUIRED;break;default:var i="unrecognised presence operator'"+r.str+"'";throw new e.QueryParseError(i,r.start,r.end)}var n=t.peekLexeme();if(void 0==n){var i="expecting term or field, found nothing";throw new e.QueryParseError(i,r.start,r.end)}switch(n.type){case e.QueryLexer.FIELD:return e.QueryParser.parseField;case e.QueryLexer.TERM:return e.QueryParser.parseTerm;default:var i="expecting term or field, found '"+n.type+"'";throw new e.QueryParseError(i,n.start,n.end)}}},e.QueryParser.parseField=function(t){var r=t.consumeLexeme();if(void 0!=r){if(t.query.allFields.indexOf(r.str)==-1){var i=t.query.allFields.map(function(e){return"'"+e+"'"}).join(", "),n="unrecognised field '"+r.str+"', possible fields: "+i;throw new e.QueryParseError(n,r.start,r.end)}t.currentClause.fields=[r.str];var s=t.peekLexeme();if(void 0==s){var n="expecting term, found nothing";throw new e.QueryParseError(n,r.start,r.end)}switch(s.type){case e.QueryLexer.TERM:return e.QueryParser.parseTerm;default:var n="expecting term, found '"+s.type+"'";throw new e.QueryParseError(n,s.start,s.end)}}},e.QueryParser.parseTerm=function(t){var r=t.consumeLexeme();if(void 0!=r){t.currentClause.term=r.str.toLowerCase(),r.str.indexOf("*")!=-1&&(t.currentClause.usePipeline=!1);var i=t.peekLexeme();if(void 0==i)return void t.nextClause();switch(i.type){case e.QueryLexer.TERM:return t.nextClause(),e.QueryParser.parseTerm;case e.QueryLexer.FIELD:return t.nextClause(),e.QueryParser.parseField;case e.QueryLexer.EDIT_DISTANCE:return e.QueryParser.parseEditDistance;case e.QueryLexer.BOOST:return e.QueryParser.parseBoost;case e.QueryLexer.PRESENCE:return t.nextClause(),e.QueryParser.parsePresence;default:var n="Unexpected lexeme type '"+i.type+"'";throw new e.QueryParseError(n,i.start,i.end)}}},e.QueryParser.parseEditDistance=function(t){var r=t.consumeLexeme();if(void 0!=r){var i=parseInt(r.str,10);if(isNaN(i)){var n="edit distance must be numeric";throw new e.QueryParseError(n,r.start,r.end)}t.currentClause.editDistance=i;var s=t.peekLexeme();if(void 0==s)return void t.nextClause();switch(s.type){case e.QueryLexer.TERM:return t.nextClause(),e.QueryParser.parseTerm;case e.QueryLexer.FIELD:return t.nextClause(),e.QueryParser.parseField;case e.QueryLexer.EDIT_DISTANCE:return e.QueryParser.parseEditDistance;case e.QueryLexer.BOOST:return e.QueryParser.parseBoost;case e.QueryLexer.PRESENCE:return t.nextClause(),e.QueryParser.parsePresence;default:var n="Unexpected lexeme type '"+s.type+"'";throw new e.QueryParseError(n,s.start,s.end)}}},e.QueryParser.parseBoost=function(t){var r=t.consumeLexeme();if(void 0!=r){var i=parseInt(r.str,10);if(isNaN(i)){var n="boost must be numeric";throw new e.QueryParseError(n,r.start,r.end)}t.currentClause.boost=i;var s=t.peekLexeme();if(void 0==s)return void t.nextClause();switch(s.type){case e.QueryLexer.TERM:return t.nextClause(),e.QueryParser.parseTerm;case e.QueryLexer.FIELD:return t.nextClause(),e.QueryParser.parseField;case e.QueryLexer.EDIT_DISTANCE:return e.QueryParser.parseEditDistance;case e.QueryLexer.BOOST:return e.QueryParser.parseBoost;case e.QueryLexer.PRESENCE:return t.nextClause(),e.QueryParser.parsePresence;default:var n="Unexpected lexeme type '"+s.type+"'";throw new e.QueryParseError(n,s.start,s.end)}}},function(e,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():e.lunr=t()}(this,function(){return e})}(); diff --git a/node_modules/lunr/notes b/node_modules/lunr/notes new file mode 100644 index 0000000..4407f79 --- /dev/null +++ b/node_modules/lunr/notes @@ -0,0 +1,47 @@ +1 - "Mr. Green killed Colonel Mustard in the study with the candlestick. Mr. Green is not a very nice fellow." +2 - "Professor Plumb has a green plant in his study." +3 - "Miss Scarlett watered Professor Plumb's green plant while he was away from his office last week." + +l1 = 19 +l2 = 9 +l3 = 16 + +q1 - "green" +q1 = [0.0, 0.71] + +1 = [0.0, 0.0747] +2 = [0.0, 0.1555] +3 = [0.0, 0.0875] + +green : total count = 4, idf = 0.71 +mr : total count = 2, idf = 1.40 +the : total count = 2, idf = 1.40 +plant : total count = 2, idf = 1.40 + +q2 = "Mr. Green" +q2 = [1.4, 0.71] + +1 = [0.147, 0.0747] +2 = [0, 0.1555] +3 = [0, 0.0875] + +q3 = "the green plant" +q3 = [0.5, 0.25, 0.5] + +1 = [1, 0.5, 0] +2 = [0, 0.25, 0.5] +3 = [0, 0.25, 0.5] + +Inverse Index as a trie +values are {docId: score} where score is the sum of tf across fields, with multipliers applied +when querying calculate the idf and multiply it by the tf + +for a multi term query generate a vector using the idf +find all the documents that match both queries, and generate a tf*idf + +word: { + totalCount: 123, + docs: +} + + diff --git a/node_modules/lunr/package.json b/node_modules/lunr/package.json new file mode 100644 index 0000000..1fe2b80 --- /dev/null +++ b/node_modules/lunr/package.json @@ -0,0 +1,30 @@ +{ + "name": "lunr", + "description": "Simple full-text search in your browser.", + "version": "2.3.9", + "author": "Oliver Nightingale", + "keywords": ["search"], + "homepage": "https://lunrjs.com", + "bugs": "https://github.com/olivernn/lunr.js/issues", + "main": "lunr.js", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/olivernn/lunr.js.git" + }, + "devDependencies": { + "benchmark": "2.1.x", + "chai": "3.5.x", + "eslint-plugin-spellcheck": "0.0.8", + "eslint": "3.4.x", + "jsdoc": "3.5.x", + "mocha": "3.3.x", + "mustache": "2.2.x", + "node-static": "0.7.x", + "uglify-js": "2.6.x", + "word-list": "1.0.x" + }, + "scripts": { + "test": "make test" + } +} diff --git a/node_modules/lunr/perf/builder_perf.js b/node_modules/lunr/perf/builder_perf.js new file mode 100644 index 0000000..a848e35 --- /dev/null +++ b/node_modules/lunr/perf/builder_perf.js @@ -0,0 +1,30 @@ +suite('lunr.Builder', function () { + var documents = [{ + id: 'a', + title: 'Mr. Green kills Colonel Mustard', + body: 'Mr. Green killed Colonel Mustard in the study with the candlestick. Mr. Green is not a very nice fellow.', + wordCount: 19 + },{ + id: 'b', + title: 'Plumb waters plant', + body: 'Professor Plumb has a green plant in his study', + wordCount: 9 + },{ + id: 'c', + title: 'Scarlett helps Professor', + body: 'Miss Scarlett watered Professor Plumbs green plant while he was away from his office last week.', + wordCount: 16 + }] + + this.add('build', function () { + lunr(function () { + this.ref('id') + this.field('title') + this.field('body') + + documents.forEach(function (doc) { + this.add(doc) + }, this) + }) + }) +}) diff --git a/node_modules/lunr/perf/perf_helper.js b/node_modules/lunr/perf/perf_helper.js new file mode 100644 index 0000000..4285249 --- /dev/null +++ b/node_modules/lunr/perf/perf_helper.js @@ -0,0 +1,26 @@ +var lunr = require('../lunr.js'), + Benchmark = require('benchmark'), + wordList = require('word-list'), + fs = require('fs') + +var suite = function (name, fn) { + var s = new Benchmark.Suite(name, { + onStart: function (e) { console.log(e.currentTarget.name) }, + onCycle: function (e) { console.log(" " + String(e.target)) }, + onError: function (e) { console.error(e.target.error) } + }) + + fn.call(s, s) + + s.run() +} + +var words = fs.readFileSync(wordList, 'utf-8') + .split('\n') + .slice(0, 1000) + .sort() + +global.lunr = lunr +global.Benchmark = Benchmark +global.suite = suite +global.words = words diff --git a/node_modules/lunr/perf/pipeline_perf.js b/node_modules/lunr/perf/pipeline_perf.js new file mode 100644 index 0000000..b3e95fa --- /dev/null +++ b/node_modules/lunr/perf/pipeline_perf.js @@ -0,0 +1,43 @@ +suite('lunr.Pipeline', function () { + var tokenToToken = function(token) { + return token + } + + var tokenToTokenArray = function(token) { + return [token, token] + } + + var buildTokens = function(count) { + return words.slice(0, count).map(function(word) { + return new lunr.Token(word) + }) + } + + lunr.Pipeline.registerFunction(tokenToToken, 'tokenToToken') + lunr.Pipeline.registerFunction(tokenToTokenArray, 'tokenToTokenArray') + + var fewTokens = buildTokens(50); + var manyTokens = buildTokens(1000) + + var tokenToTokenPipeline = new lunr.Pipeline + tokenToTokenPipeline.add(tokenToToken) + + var tokenToTokenArrayPipeline = new lunr.Pipeline + tokenToTokenArrayPipeline.add(tokenToTokenArray) + + this.add('few tokens, token -> token', function () { + tokenToTokenPipeline.run(fewTokens) + }) + + this.add('many tokens, token -> token', function () { + tokenToTokenPipeline.run(manyTokens) + }) + + this.add('few tokens, token -> token array', function () { + tokenToTokenArrayPipeline.run(fewTokens) + }) + + this.add('many tokens, token -> token array', function () { + tokenToTokenArrayPipeline.run(manyTokens) + }) +}) diff --git a/node_modules/lunr/perf/query_parser_perf.js b/node_modules/lunr/perf/query_parser_perf.js new file mode 100644 index 0000000..3b75a86 --- /dev/null +++ b/node_modules/lunr/perf/query_parser_perf.js @@ -0,0 +1,24 @@ +suite('lunr.QueryParser', function () { + var parse = function (q) { + var query = new lunr.Query (['title', 'body']), + parser = new lunr.QueryParser(q, query) + + parser.parse() + } + + this.add('simple', function () { + parse('foo bar') + }) + + this.add('field', function () { + parse('title:foo bar') + }) + + this.add('modifier', function () { + parse('foo~2 bar') + }) + + this.add('complex', function () { + parse('title:foo~2^6 bar') + }) +}) diff --git a/node_modules/lunr/perf/search_perf.js b/node_modules/lunr/perf/search_perf.js new file mode 100644 index 0000000..b320a04 --- /dev/null +++ b/node_modules/lunr/perf/search_perf.js @@ -0,0 +1,72 @@ +suite('search', function () { + var documents = [{ + id: 'a', + title: 'Mr. Green kills Colonel Mustard', + body: 'Mr. Green killed Colonel Mustard in the study with the candlestick. Mr. Green is not a very nice fellow.', + wordCount: 19 + },{ + id: 'b', + title: 'Plumb waters plant', + body: 'Professor Plumb has a green plant in his study', + wordCount: 9 + },{ + id: 'c', + title: 'Scarlett helps Professor', + body: 'Miss Scarlett watered Professor Plumbs green plant while he was away from his office last week.', + wordCount: 16 + }] + + var idx = lunr(function () { + this.ref('id') + this.field('title') + this.field('body') + + documents.forEach(function (doc) { + this.add(doc) + }, this) + }) + + this.add('single term', function () { + idx.search('green') + }) + + this.add('multi term', function () { + idx.search('green plant') + }) + + this.add('trailing wildcard', function () { + idx.search('pl*') + }) + + this.add('leading wildcard', function () { + idx.search('*ant') + }) + + this.add('contained wildcard', function () { + idx.search('p*t') + }) + + this.add('with field', function () { + idx.search('title:plant') + }) + + this.add('edit distance', function () { + idx.search('plint~2') + }) + + this.add('typeahead', function () { + idx.query(function (q) { + q.term("pl", { boost: 100, usePipeline: true }) + q.term("pl", { boost: 10, usePipeline: false, wildcard: lunr.Query.wildcard.TRAILING }) + q.term("pl", { boost: 1, editDistance: 1 }) + }) + }) + + this.add('negated query', function () { + idx.search('-plant') + }) + + this.add('required term', function () { + idx.search('green +plant') + }) +}) diff --git a/node_modules/lunr/perf/stemmer_perf.js b/node_modules/lunr/perf/stemmer_perf.js new file mode 100644 index 0000000..3490543 --- /dev/null +++ b/node_modules/lunr/perf/stemmer_perf.js @@ -0,0 +1,7 @@ +suite('lunr.stemmer', function () { + this.add('#call', function () { + for (var i = 0; i < words.length; i++) { + lunr.stemmer(new lunr.Token (words[i])) + } + }) +}) diff --git a/node_modules/lunr/perf/token_set_perf.js b/node_modules/lunr/perf/token_set_perf.js new file mode 100644 index 0000000..ef0f60a --- /dev/null +++ b/node_modules/lunr/perf/token_set_perf.js @@ -0,0 +1,42 @@ +suite('lunr.TokenSet', function () { + var tokenSet = lunr.TokenSet.fromArray([ + 'january', 'february', 'march', 'april', + 'may', 'june', 'july', 'august', + 'september', 'october', 'november', 'december' + ].sort()) + + var noWildcard = lunr.TokenSet.fromString('september') + var withWildcard = lunr.TokenSet.fromString('*ber') + + this.add('.fromArray', function () { + lunr.TokenSet.fromArray(words) + }) + + this.add('.fromString (no wildcard)', function () { + lunr.TokenSet.fromString('javascript') + }) + + this.add('.fromString (with wildcard)', function () { + lunr.TokenSet.fromString('java*cript') + }) + + this.add('.fromFuzzyString', function () { + lunr.TokenSet.fromFuzzyString('javascript', 2) + }) + + this.add('#toArray', function () { + tokenSet.toArray() + }) + + this.add('#toString', function () { + tokenSet.toString() + }) + + this.add('#intersect (no wildcard)', function () { + tokenSet.intersect(noWildcard) + }) + + this.add('#intersect (with wildcard)', function () { + tokenSet.intersect(withWildcard) + }) +}) diff --git a/node_modules/lunr/perf/tokenizer_perf.js b/node_modules/lunr/perf/tokenizer_perf.js new file mode 100644 index 0000000..3890d0d --- /dev/null +++ b/node_modules/lunr/perf/tokenizer_perf.js @@ -0,0 +1,7 @@ +suite('lunr.tokenizer', function () { + var lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum" + + this.add('#call', function () { + lunr.tokenizer(lorem) + }) +}) diff --git a/node_modules/lunr/perf/vector_perf.js b/node_modules/lunr/perf/vector_perf.js new file mode 100644 index 0000000..181cdba --- /dev/null +++ b/node_modules/lunr/perf/vector_perf.js @@ -0,0 +1,31 @@ +suite('lunr.Vector', function () { + var index, val + + var v1 = new lunr.Vector, + v2 = new lunr.Vector + + for (var i = 0; i < 1000; i++) { + index = Math.floor(i + Math.random() * 100) + val = Math.random() * 100 + v1.insert(i, val) + } + + for (var i = 0; i < 1000; i++) { + index = Math.floor(i + Math.random() * 100) + val = Math.random() * 100 + v2.insert(i, val) + } + + this.add('magnitude', function () { + v1.magnitude() + }) + + this.add('dot', function () { + v1.dot(v2) + }) + + this.add('similarity', function () { + v1.similarity(v2) + }) + +}) diff --git a/node_modules/lunr/styles.css b/node_modules/lunr/styles.css new file mode 100644 index 0000000..758bc29 --- /dev/null +++ b/node_modules/lunr/styles.css @@ -0,0 +1,133 @@ +body { + background-color: #081f28; + color: #708284; + font-family: 'Helvetica Neue', Helvetica, sans-serif; + font-size: 18px; +} + +h1 { + margin: 0; + padding: 0; + font-size: 4em; +} + +a { + color: #2076c7; + text-decoration: none; +} + +a:hover { + text-decoration: underline; +} + +h2 { + font-weight: normal; + font-size: 1.2em; +} + +h3 { + font-size: 2.6em; + margin: 0; +} + +h1, h2 { + margin: 0; +} + +header span { + font-size: 0.5em; + font-weight: normal; +} + +nav ul { + margin: 0; + padding: 6px 0 0 3px; +} + +nav li { + list-style: none; + display: inline; + padding-right: 12px; + font-weight: bold; + font-size: 0.9em; +} + +footer ul { + list-style: none; + padding: 12px 0; +} + +footer ul li { + float: left; + margin-right: 40px; + font-size: 0.8em; + font-weight: bold; +} + +.wrap { + width: 960px; + margin: 20px auto 0 auto; +} + +header { + border-top: 4px solid #708284; + padding: 4px 0 12px 0; +} + +article header { + border-top: 2px solid #708284; +} + +pre { + border-radius: 2px; + background-color: #03262f; + padding: 6px 0; + color: #819090; +} + +p { + line-height: 1.4em; +} + +section { + margin-top: 40px; +} + +section.columns { + -webkit-column-count: 2; + -webkit-column-gap: 50px; + -webkit-hyphens: auto; + min-height: 720px; + font-size: 18px; +} + +.download ul { + list-style: none; + padding: 0; +} + +.download li { + font-size: 1.2em; +} + +pre .keyword, pre .special { + font-weight: bold; + color: #2076c7 +} + +pre .string, pre .regexp { + color: #728a06 +} + +pre .class { + color: #A57707; +} + +pre .number { + color: #D11c24 +} + +pre .comment { + color: grey; + font-style: italic; +} diff --git a/node_modules/lunr/test/builder_test.js b/node_modules/lunr/test/builder_test.js new file mode 100644 index 0000000..15afda0 --- /dev/null +++ b/node_modules/lunr/test/builder_test.js @@ -0,0 +1,225 @@ +suite('lunr.Builder', function () { + suite('#add', function () { + setup(function () { + this.builder = new lunr.Builder + }) + + test('field contains terms that clash with object prototype', function () { + this.builder.field('title') + this.builder.add({ id: 'id', title: 'constructor'}) + + assert.deepProperty(this.builder.invertedIndex, 'constructor.title.id') + assert.deepEqual(this.builder.invertedIndex.constructor.title.id, {}) + + assert.equal(this.builder.fieldTermFrequencies['title/id'].constructor, 1) + }) + + test('field name clashes with object prototype', function () { + this.builder.field('constructor') + this.builder.add({ id: 'id', constructor: 'constructor'}) + + assert.deepProperty(this.builder.invertedIndex, 'constructor.constructor.id') + assert.deepEqual(this.builder.invertedIndex.constructor.constructor.id, {}) + }) + + test('document ref clashes with object prototype', function () { + this.builder.field('title') + this.builder.add({ id: 'constructor', title: 'word'}) + + assert.deepProperty(this.builder.invertedIndex, 'word.title.constructor') + assert.deepEqual(this.builder.invertedIndex.word.title.constructor, {}) + }) + + test('token metadata clashes with object prototype', function () { + var pipelineFunction = function (t) { + t.metadata['constructor'] = 'foo' + return t + } + + lunr.Pipeline.registerFunction(pipelineFunction, 'test') + this.builder.pipeline.add(pipelineFunction) + + // the registeredFunctions object is global, this is to prevent + // polluting any other tests. + delete lunr.Pipeline.registeredFunctions.test + + this.builder.metadataWhitelist.push('constructor') + + this.builder.field('title') + this.builder.add({ id: 'id', title: 'word'}) + assert.deepProperty(this.builder.invertedIndex, 'word.title.id.constructor') + assert.deepEqual(this.builder.invertedIndex.word.title.id.constructor, ['foo']) + }) + + test('extracting nested properties from a document', function () { + var extractor = function (d) { return d.person.name } + + this.builder.field('name', { + extractor: extractor + }) + + this.builder.add({ + id: 'id', + person: { + name: 'bob' + } + }) + + assert.deepProperty(this.builder.invertedIndex, 'bob.name.id') + }) + }) + + suite('#field', function () { + test('defining fields to index', function () { + var builder = new lunr.Builder + builder.field('foo') + assert.property(builder._fields, 'foo') + }) + + test('field with illegal characters', function () { + var builder = new lunr.Builder + assert.throws(function () { + builder.field('foo/bar') + }) + }) + }) + + suite('#ref', function () { + test('default reference', function () { + var builder = new lunr.Builder + assert.equal('id', builder._ref) + }) + + test('defining a reference field', function () { + var builder = new lunr.Builder + builder.ref('foo') + assert.equal('foo', builder._ref) + }) + }) + + suite('#b', function () { + test('default value', function () { + var builder = new lunr.Builder + assert.equal(0.75, builder._b) + }) + + test('values less than zero', function () { + var builder = new lunr.Builder + builder.b(-1) + assert.equal(0, builder._b) + }) + + test('values higher than one', function () { + var builder = new lunr.Builder + builder.b(1.5) + assert.equal(1, builder._b) + }) + + test('value within range', function () { + var builder = new lunr.Builder + builder.b(0.5) + assert.equal(0.5, builder._b) + }) + }) + + suite('#k1', function () { + test('default value', function () { + var builder = new lunr.Builder + assert.equal(1.2, builder._k1) + }) + + test('values less than zero', function () { + var builder = new lunr.Builder + builder.k1(1.6) + assert.equal(1.6, builder._k1) + }) + }) + + suite('#use', function () { + setup(function () { + this.builder = new lunr.Builder + }) + + test('calls plugin function', function () { + var wasCalled = false, + plugin = function () { wasCalled = true } + + this.builder.use(plugin) + assert.isTrue(wasCalled) + }) + + test('sets context to the builder instance', function () { + var context = null, + plugin = function () { context = this } + + this.builder.use(plugin) + assert.equal(context, this.builder) + }) + + test('passes builder as first argument', function () { + var arg = null, + plugin = function (a) { arg = a } + + this.builder.use(plugin) + assert.equal(arg, this.builder) + }) + + test('forwards arguments to the plugin', function () { + var args = null, + plugin = function () { args = [].slice.call(arguments) } + + this.builder.use(plugin, 1, 2, 3) + assert.deepEqual(args, [this.builder, 1, 2, 3]) + }) + }) + + suite('#build', function () { + setup(function () { + var builder = new lunr.Builder, + doc = { id: 'id', title: 'test', body: 'missing' } + + builder.ref('id') + builder.field('title') + builder.add(doc) + builder.build() + + this.builder = builder + }) + + test('adds tokens to invertedIndex', function () { + assert.deepProperty(this.builder.invertedIndex, 'test.title.id') + }) + + test('builds a vector space of the document fields', function () { + assert.property(this.builder.fieldVectors, 'title/id') + assert.instanceOf(this.builder.fieldVectors['title/id'], lunr.Vector) + }) + + test('skips fields not defined for indexing', function () { + assert.notProperty(this.builder.invertedIndex, 'missing') + }) + + test('builds a token set for the corpus', function () { + var needle = lunr.TokenSet.fromString('test') + assert.include(this.builder.tokenSet.intersect(needle).toArray(), 'test') + }) + + test('calculates document count', function () { + assert.equal(1, this.builder.documentCount) + }) + + test('calculates average field length', function () { + assert.equal(1, this.builder.averageFieldLength['title']) + }) + + test('index returned', function () { + var builder = new lunr.Builder, + doc = { id: 'id', title: 'test', body: 'missing' } + + builder.ref('id') + builder.field('title') + builder.add(doc) + assert.instanceOf(builder.build(), lunr.Index) + }) + }) +}) diff --git a/node_modules/lunr/test/field_ref_test.js b/node_modules/lunr/test/field_ref_test.js new file mode 100644 index 0000000..ef44e06 --- /dev/null +++ b/node_modules/lunr/test/field_ref_test.js @@ -0,0 +1,35 @@ +suite('lunr.FieldRef', function () { + suite('#toString', function () { + test('combines document ref and field name', function () { + var fieldName = "title", + documentRef = "123", + fieldRef = new lunr.FieldRef (documentRef, fieldName) + + assert.equal(fieldRef.toString(), "title/123") + }) + }) + + suite('.fromString', function () { + test('splits string into parts', function () { + var fieldRef = lunr.FieldRef.fromString("title/123") + + assert.equal(fieldRef.fieldName, "title") + assert.equal(fieldRef.docRef, "123") + }) + + test('docRef contains join character', function () { + var fieldRef = lunr.FieldRef.fromString("title/http://example.com/123") + + assert.equal(fieldRef.fieldName, "title") + assert.equal(fieldRef.docRef, "http://example.com/123") + }) + + test('string does not contain join character', function () { + var s = "docRefOnly" + + assert.throws(function () { + lunr.FieldRef.fromString(s) + }) + }) + }) +}) diff --git a/node_modules/lunr/test/fixtures/stemming_vocab.json b/node_modules/lunr/test/fixtures/stemming_vocab.json new file mode 100644 index 0000000..2b7bdf5 --- /dev/null +++ b/node_modules/lunr/test/fixtures/stemming_vocab.json @@ -0,0 +1 @@ +{"consign":"consign","consigned":"consign","consigning":"consign","consignment":"consign","consist":"consist","consisted":"consist","consistency":"consist","consistent":"consist","consistently":"consist","consisting":"consist","consists":"consist","consolation":"consol","consolations":"consol","consolatory":"consolatori","console":"consol","consoled":"consol","consoles":"consol","consolidate":"consolid","consolidated":"consolid","consolidating":"consolid","consoling":"consol","consols":"consol","consonant":"conson","consort":"consort","consorted":"consort","consorting":"consort","conspicuous":"conspicu","conspicuously":"conspicu","conspiracy":"conspiraci","conspirator":"conspir","conspirators":"conspir","conspire":"conspir","conspired":"conspir","conspiring":"conspir","constable":"constabl","constables":"constabl","constance":"constanc","constancy":"constanc","constant":"constant","knack":"knack","knackeries":"knackeri","knacks":"knack","knag":"knag","knave":"knave","knaves":"knave","knavish":"knavish","kneaded":"knead","kneading":"knead","knee":"knee","kneel":"kneel","kneeled":"kneel","kneeling":"kneel","kneels":"kneel","knees":"knee","knell":"knell","knelt":"knelt","knew":"knew","knick":"knick","knif":"knif","knife":"knife","knight":"knight","knights":"knight","knit":"knit","knits":"knit","knitted":"knit","knitting":"knit","knives":"knive","knob":"knob","knobs":"knob","knock":"knock","knocked":"knock","knocker":"knocker","knockers":"knocker","knocking":"knock","knocks":"knock","knopp":"knopp","knot":"knot","knots":"knot","lay":"lay","try":"tri"} diff --git a/node_modules/lunr/test/index.html b/node_modules/lunr/test/index.html new file mode 100644 index 0000000..cbba1a1 --- /dev/null +++ b/node_modules/lunr/test/index.html @@ -0,0 +1,58 @@ + + + + Mocha Tests + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/node_modules/lunr/test/match_data_test.js b/node_modules/lunr/test/match_data_test.js new file mode 100644 index 0000000..d4c0c04 --- /dev/null +++ b/node_modules/lunr/test/match_data_test.js @@ -0,0 +1,41 @@ +suite('lunr.MatchData', function () { + suite('#combine', function () { + setup(function () { + this.match = new lunr.MatchData('foo', 'title', { + position: [1] + }) + + this.match.combine(new lunr.MatchData('bar', 'title', { + position: [2] + })) + + this.match.combine(new lunr.MatchData('baz', 'body', { + position: [3] + })) + + this.match.combine(new lunr.MatchData('baz', 'body', { + position: [4] + })) + }) + + test('#terms', function () { + assert.sameMembers(['foo', 'bar', 'baz'], Object.keys(this.match.metadata)) + }) + + test('#metadata', function () { + assert.deepEqual(this.match.metadata.foo.title.position, [1]) + assert.deepEqual(this.match.metadata.bar.title.position, [2]) + assert.deepEqual(this.match.metadata.baz.body.position, [3, 4]) + }) + + test('does not mutate source data', function () { + var metadata = { foo: [1] }, + matchData1 = new lunr.MatchData('foo', 'title', metadata), + matchData2 = new lunr.MatchData('foo', 'title', metadata) + + matchData1.combine(matchData2) + + assert.deepEqual(metadata.foo, [1]) + }) + }) +}) diff --git a/node_modules/lunr/test/pipeline_test.js b/node_modules/lunr/test/pipeline_test.js new file mode 100644 index 0000000..f080bf9 --- /dev/null +++ b/node_modules/lunr/test/pipeline_test.js @@ -0,0 +1,280 @@ +suite('lunr.Pipeline', function () { + var noop = function () {} + + setup(function () { + this.existingRegisteredFunctions = lunr.Pipeline.registeredFunctions + this.existingWarnIfFunctionNotRegistered = lunr.Pipeline.warnIfFunctionNotRegistered + + lunr.Pipeline.registeredFunctions = {} + lunr.Pipeline.warnIfFunctionNotRegistered = noop + + this.pipeline = new lunr.Pipeline + }) + + teardown(function () { + lunr.Pipeline.registeredFunctions = this.existingRegisteredFunctions + lunr.Pipeline.warnIfFunctionNotRegistered = this.existingWarnIfFunctionNotRegistered + }) + + suite('#add', function () { + test('add function to pipeline', function () { + this.pipeline.add(noop) + assert.equal(1, this.pipeline._stack.length) + }) + + test('add multiple functions to the pipeline', function () { + this.pipeline.add(noop, noop) + assert.equal(2, this.pipeline._stack.length) + }) + }) + + suite('#remove', function () { + test('function exists in pipeline', function () { + this.pipeline.add(noop) + assert.equal(1, this.pipeline._stack.length) + this.pipeline.remove(noop) + assert.equal(0, this.pipeline._stack.length) + }) + + test('function does not exist in pipeline', function () { + var fn = function () {} + this.pipeline.add(noop) + assert.equal(1, this.pipeline._stack.length) + this.pipeline.remove(fn) + assert.equal(1, this.pipeline._stack.length) + }) + }) + + suite('#before', function () { + var fn = function () {} + + test('other function exists', function () { + this.pipeline.add(noop) + this.pipeline.before(noop, fn) + + assert.deepEqual([fn, noop], this.pipeline._stack) + }) + + test('other function does not exist', function () { + var action = function () { + this.pipeline.before(noop, fn) + } + + assert.throws(action.bind(this)) + assert.equal(0, this.pipeline._stack.length) + }) + }) + + suite('#after', function () { + var fn = function () {} + + test('other function exists', function () { + this.pipeline.add(noop) + this.pipeline.after(noop, fn) + + assert.deepEqual([noop, fn], this.pipeline._stack) + }) + + test('other function does not exist', function () { + var action = function () { + this.pipeline.after(noop, fn) + } + + assert.throws(action.bind(this)) + assert.equal(0, this.pipeline._stack.length) + }) + }) + + suite('#run', function () { + test('calling each function for each token', function () { + var count1 = 0, count2 = 0, + fn1 = function (t) { count1++; return t }, + fn2 = function (t) { count2++; return t } + + this.pipeline.add(fn1, fn2) + this.pipeline.run([1,2,3]) + + assert.equal(3, count1) + assert.equal(3, count2) + }) + + test('passes token to pipeline function', function () { + this.pipeline.add(function (token) { + assert.equal('foo', token) + }) + + this.pipeline.run(['foo']) + }) + + test('passes index to pipeline function', function () { + this.pipeline.add(function (_, index) { + assert.equal(0, index) + }) + + this.pipeline.run(['foo']) + }) + + test('passes entire token array to pipeline function', function () { + this.pipeline.add(function (_, _, tokens) { + assert.deepEqual(['foo'], tokens) + }) + + this.pipeline.run(['foo']) + }) + + test('passes output of one function as input to the next', function () { + this.pipeline.add(function (t) { + return t.toUpperCase() + }) + + this.pipeline.add(function (t) { + assert.equal('FOO', t) + }) + + this.pipeline.run(['foo']) + }) + + test('returns the results of the last function', function () { + this.pipeline.add(function (t) { + return t.toUpperCase() + }) + + assert.deepEqual(['FOO'], this.pipeline.run(['foo'])) + }) + + test('filters out null, undefined and empty string values', function () { + var tokens = [], + output + + // only pass on tokens for even token indexes + // return null for 'foo' + // return undefined for 'bar' + // return '' for 'baz' + this.pipeline.add(function (t, i) { + if (i == 4) { + return null + } else if (i == 5) { + return '' + } if (i % 2) { + return t + } else { + return undefined + } + }) + + this.pipeline.add(function (t) { + tokens.push(t) + return t + }) + + output = this.pipeline.run(['a', 'b', 'c', 'd', 'foo', 'bar', 'baz']) + + assert.sameMembers(['b', 'd'], tokens) + assert.sameMembers(['b', 'd'], output) + }) + + suite('expanding tokens', function () { + test('passed to output', function () { + this.pipeline.add(function (t) { + return [t, t.toUpperCase()] + }) + + assert.sameMembers(["foo", "FOO"], this.pipeline.run(['foo'])) + }) + + test('not passed to same function', function () { + var received = [] + + this.pipeline.add(function (t) { + received.push(t) + return [t, t.toUpperCase()] + }) + + this.pipeline.run(['foo']) + + assert.sameMembers(['foo'], received) + }) + + test('passed to the next pipeline function', function () { + var received = [] + + this.pipeline.add(function (t) { + return [t, t.toUpperCase()] + }) + + this.pipeline.add(function (t) { + received.push(t) + }) + + this.pipeline.run(['foo']) + + assert.sameMembers(['foo', 'FOO'], received) + }) + }) + }) + + suite('#toJSON', function () { + test('returns an array of registered function labels', function () { + var fn = function () {} + + lunr.Pipeline.registerFunction(fn, 'fn') + + this.pipeline.add(fn) + + assert.sameMembers(['fn'], this.pipeline.toJSON()) + }) + }) + + suite('.registerFunction', function () { + setup(function () { + this.fn = function () {} + }) + + test('adds a label property to the function', function () { + lunr.Pipeline.registerFunction(this.fn, 'fn') + + assert.equal('fn', this.fn.label) + }) + + test('adds function to the list of registered functions', function () { + lunr.Pipeline.registerFunction(this.fn, 'fn') + + assert.equal(this.fn, lunr.Pipeline.registeredFunctions['fn']) + }) + }) + + suite('.load', function () { + test('with registered functions', function () { + var fn = function () {}, + serializedPipeline = ['fn'], + pipeline + + lunr.Pipeline.registerFunction(fn, 'fn') + + pipeline = lunr.Pipeline.load(serializedPipeline) + + assert.equal(1, pipeline._stack.length) + assert.equal(fn, pipeline._stack[0]) + }) + + test('with unregisterd functions', function () { + var serializedPipeline = ['fn'] + + assert.throws(function () { + lunr.Pipeline.load(serializedPipeline) + }) + }) + }) + + suite('#reset', function () { + test('empties the stack', function () { + this.pipeline.add(function () {}) + + assert.equal(1, this.pipeline._stack.length) + + this.pipeline.reset() + + assert.equal(0, this.pipeline._stack.length) + }) + }) +}) diff --git a/node_modules/lunr/test/query_lexer_test.js b/node_modules/lunr/test/query_lexer_test.js new file mode 100644 index 0000000..d12cff6 --- /dev/null +++ b/node_modules/lunr/test/query_lexer_test.js @@ -0,0 +1,573 @@ +suite('lunr.QueryLexer', function () { + suite('#run', function () { + + var lex = function (str) { + var lexer = new lunr.QueryLexer(str) + lexer.run() + return lexer + } + + suite('single term', function () { + setup(function () { + this.lexer = lex('foo') + }) + + test('produces 1 lexeme', function () { + assert.lengthOf(this.lexer.lexemes, 1) + }) + + suite('lexeme', function () { + setup(function () { + this.lexeme = this.lexer.lexemes[0] + }) + + test('#type', function () { + assert.equal(lunr.QueryLexer.TERM, this.lexeme.type) + }) + + test('#str', function () { + assert.equal('foo', this.lexeme.str) + }) + + test('#start', function () { + assert.equal(0, this.lexeme.start) + }) + + test('#end', function () { + assert.equal(3, this.lexeme.end) + }) + }) + }) + + // embedded hyphens should not be confused with + // presence operators + suite('single term with hyphen', function () { + setup(function () { + this.lexer = lex('foo-bar') + }) + + test('produces 2 lexeme', function () { + assert.lengthOf(this.lexer.lexemes, 2) + }) + + suite('lexeme', function () { + setup(function () { + this.fooLexeme = this.lexer.lexemes[0] + this.barLexeme = this.lexer.lexemes[1] + }) + + test('#type', function () { + assert.equal(lunr.QueryLexer.TERM, this.fooLexeme.type) + assert.equal(lunr.QueryLexer.TERM, this.barLexeme.type) + }) + + test('#str', function () { + assert.equal('foo', this.fooLexeme.str) + assert.equal('bar', this.barLexeme.str) + }) + + test('#start', function () { + assert.equal(0, this.fooLexeme.start) + assert.equal(4, this.barLexeme.start) + }) + + test('#end', function () { + assert.equal(3, this.fooLexeme.end) + assert.equal(7, this.barLexeme.end) + }) + }) + }) + + suite('term escape char', function () { + setup(function () { + this.lexer = lex("foo\\:bar") + }) + + test('produces 1 lexeme', function () { + assert.lengthOf(this.lexer.lexemes, 1) + }) + + suite('lexeme', function () { + setup(function () { + this.lexeme = this.lexer.lexemes[0] + }) + + test('#type', function () { + assert.equal(lunr.QueryLexer.TERM, this.lexeme.type) + }) + + test('#str', function () { + assert.equal('foo:bar', this.lexeme.str) + }) + + test('#start', function () { + assert.equal(0, this.lexeme.start) + }) + + test('#end', function () { + assert.equal(8, this.lexeme.end) + }) + }) + }) + + suite('multiple terms', function () { + setup(function () { + this.lexer = lex('foo bar') + }) + + test('produces 2 lexems', function () { + assert.lengthOf(this.lexer.lexemes, 2) + }) + + suite('lexemes', function () { + setup(function () { + this.fooLexeme = this.lexer.lexemes[0] + this.barLexeme = this.lexer.lexemes[1] + }) + + test('#type', function () { + assert.equal(lunr.QueryLexer.TERM, this.fooLexeme.type) + assert.equal(lunr.QueryLexer.TERM, this.barLexeme.type) + }) + + test('#str', function () { + assert.equal('foo', this.fooLexeme.str) + assert.equal('bar', this.barLexeme.str) + }) + + test('#start', function () { + assert.equal(0, this.fooLexeme.start) + assert.equal(4, this.barLexeme.start) + }) + + test('#end', function () { + assert.equal(3, this.fooLexeme.end) + assert.equal(7, this.barLexeme.end) + }) + }) + }) + + suite('multiple terms with presence', function () { + setup(function () { + this.lexer = lex('+foo +bar') + }) + + test('produces 2 lexems', function () { + assert.lengthOf(this.lexer.lexemes, 4) + }) + + suite('lexemes', function () { + setup(function () { + this.fooPresenceLexeme = this.lexer.lexemes[0] + this.fooTermLexeme = this.lexer.lexemes[1] + + this.barPresenceLexeme = this.lexer.lexemes[2] + this.barTermLexeme = this.lexer.lexemes[3] + }) + + test('#type', function () { + assert.equal(lunr.QueryLexer.TERM, this.fooTermLexeme.type) + assert.equal(lunr.QueryLexer.TERM, this.barTermLexeme.type) + + assert.equal(lunr.QueryLexer.PRESENCE, this.fooPresenceLexeme.type) + assert.equal(lunr.QueryLexer.PRESENCE, this.barPresenceLexeme.type) + }) + + test('#str', function () { + assert.equal('foo', this.fooTermLexeme.str) + assert.equal('bar', this.barTermLexeme.str) + + assert.equal('+', this.fooPresenceLexeme.str) + assert.equal('+', this.barPresenceLexeme.str) + }) + }) + }) + + suite('multiple terms with presence and fuzz', function () { + setup(function () { + this.lexer = lex('+foo~1 +bar') + }) + + test('produces n lexemes', function () { + assert.lengthOf(this.lexer.lexemes, 5) + }) + + suite('lexemes', function () { + setup(function () { + this.fooPresenceLexeme = this.lexer.lexemes[0] + this.fooTermLexeme = this.lexer.lexemes[1] + this.fooFuzzLexeme = this.lexer.lexemes[2] + this.barPresenceLexeme = this.lexer.lexemes[3] + this.barTermLexeme = this.lexer.lexemes[4] + }) + + test('#type', function () { + assert.equal(lunr.QueryLexer.PRESENCE, this.fooPresenceLexeme.type) + assert.equal(lunr.QueryLexer.TERM, this.fooTermLexeme.type) + assert.equal(lunr.QueryLexer.EDIT_DISTANCE, this.fooFuzzLexeme.type) + assert.equal(lunr.QueryLexer.PRESENCE, this.barPresenceLexeme.type) + assert.equal(lunr.QueryLexer.TERM, this.barTermLexeme.type) + }) + }) + }) + + suite('separator length > 1', function () { + setup(function () { + this.lexer = lex('foo bar') + }) + + test('produces 2 lexems', function () { + assert.lengthOf(this.lexer.lexemes, 2) + }) + + suite('lexemes', function () { + setup(function () { + this.fooLexeme = this.lexer.lexemes[0] + this.barLexeme = this.lexer.lexemes[1] + }) + + test('#type', function () { + assert.equal(lunr.QueryLexer.TERM, this.fooLexeme.type) + assert.equal(lunr.QueryLexer.TERM, this.barLexeme.type) + }) + + test('#str', function () { + assert.equal('foo', this.fooLexeme.str) + assert.equal('bar', this.barLexeme.str) + }) + + test('#start', function () { + assert.equal(0, this.fooLexeme.start) + assert.equal(7, this.barLexeme.start) + }) + + test('#end', function () { + assert.equal(3, this.fooLexeme.end) + assert.equal(10, this.barLexeme.end) + }) + }) + }) + + suite('hyphen (-) considered a seperator', function () { + setup(function () { + this.lexer = lex('foo-bar') + }) + + test('produces 1 lexeme', function () { + assert.lengthOf(this.lexer.lexemes, 2) + }) + }) + + suite('term with field', function () { + setup(function () { + this.lexer = lex('title:foo') + }) + + test('produces 2 lexems', function () { + assert.lengthOf(this.lexer.lexemes, 2) + }) + + suite('lexemes', function () { + setup(function () { + this.fieldLexeme = this.lexer.lexemes[0] + this.termLexeme = this.lexer.lexemes[1] + }) + + test('#type', function () { + assert.equal(lunr.QueryLexer.FIELD, this.fieldLexeme.type) + assert.equal(lunr.QueryLexer.TERM, this.termLexeme.type) + }) + + test('#str', function () { + assert.equal('title', this.fieldLexeme.str) + assert.equal('foo', this.termLexeme.str) + }) + + test('#start', function () { + assert.equal(0, this.fieldLexeme.start) + assert.equal(6, this.termLexeme.start) + }) + + test('#end', function () { + assert.equal(5, this.fieldLexeme.end) + assert.equal(9, this.termLexeme.end) + }) + }) + }) + + suite('term with field with escape char', function () { + setup(function () { + this.lexer = lex("ti\\:tle:foo") + }) + + test('produces 1 lexeme', function () { + assert.lengthOf(this.lexer.lexemes, 2) + }) + + suite('lexeme', function () { + setup(function () { + this.fieldLexeme = this.lexer.lexemes[0] + this.termLexeme = this.lexer.lexemes[1] + }) + + test('#type', function () { + assert.equal(lunr.QueryLexer.FIELD, this.fieldLexeme.type) + assert.equal(lunr.QueryLexer.TERM, this.termLexeme.type) + }) + + test('#str', function () { + assert.equal('ti:tle', this.fieldLexeme.str) + assert.equal('foo', this.termLexeme.str) + }) + + test('#start', function () { + assert.equal(0, this.fieldLexeme.start) + assert.equal(8, this.termLexeme.start) + }) + + test('#end', function () { + assert.equal(7, this.fieldLexeme.end) + assert.equal(11, this.termLexeme.end) + }) + }) + }) + + suite('term with presence required', function () { + setup(function () { + this.lexer = lex('+foo') + }) + + test('produces 2 lexemes', function () { + assert.lengthOf(this.lexer.lexemes, 2) + }) + + suite('lexemes', function () { + setup(function () { + this.presenceLexeme = this.lexer.lexemes[0] + this.termLexeme = this.lexer.lexemes[1] + }) + + test('#type', function () { + assert.equal(lunr.QueryLexer.PRESENCE, this.presenceLexeme.type) + assert.equal(lunr.QueryLexer.TERM, this.termLexeme.type) + }) + + test('#str', function () { + assert.equal('+', this.presenceLexeme.str) + assert.equal('foo', this.termLexeme.str) + }) + + test('#start', function () { + assert.equal(1, this.termLexeme.start) + assert.equal(0, this.presenceLexeme.start) + }) + + test('#end', function () { + assert.equal(4, this.termLexeme.end) + assert.equal(1, this.presenceLexeme.end) + }) + }) + }) + + suite('term with field with presence required', function () { + setup(function () { + this.lexer = lex('+title:foo') + }) + + test('produces 3 lexemes', function () { + assert.lengthOf(this.lexer.lexemes, 3) + }) + + suite('lexemes', function () { + setup(function () { + this.presenceLexeme = this.lexer.lexemes[0] + this.fieldLexeme = this.lexer.lexemes[1] + this.termLexeme = this.lexer.lexemes[2] + }) + + test('#type', function () { + assert.equal(lunr.QueryLexer.PRESENCE, this.presenceLexeme.type) + assert.equal(lunr.QueryLexer.FIELD, this.fieldLexeme.type) + assert.equal(lunr.QueryLexer.TERM, this.termLexeme.type) + }) + + test('#str', function () { + assert.equal('+', this.presenceLexeme.str) + assert.equal('title', this.fieldLexeme.str) + assert.equal('foo', this.termLexeme.str) + }) + + test('#start', function () { + assert.equal(0, this.presenceLexeme.start) + assert.equal(1, this.fieldLexeme.start) + assert.equal(7, this.termLexeme.start) + }) + + test('#end', function () { + assert.equal(1, this.presenceLexeme.end) + assert.equal(6, this.fieldLexeme.end) + assert.equal(10, this.termLexeme.end) + }) + }) + }) + + suite('term with presence prohibited', function () { + setup(function () { + this.lexer = lex('-foo') + }) + + test('produces 2 lexemes', function () { + assert.lengthOf(this.lexer.lexemes, 2) + }) + + suite('lexemes', function () { + setup(function () { + this.presenceLexeme = this.lexer.lexemes[0] + this.termLexeme = this.lexer.lexemes[1] + }) + + test('#type', function () { + assert.equal(lunr.QueryLexer.PRESENCE, this.presenceLexeme.type) + assert.equal(lunr.QueryLexer.TERM, this.termLexeme.type) + }) + + test('#str', function () { + assert.equal('-', this.presenceLexeme.str) + assert.equal('foo', this.termLexeme.str) + }) + + test('#start', function () { + assert.equal(1, this.termLexeme.start) + assert.equal(0, this.presenceLexeme.start) + }) + + test('#end', function () { + assert.equal(4, this.termLexeme.end) + assert.equal(1, this.presenceLexeme.end) + }) + }) + }) + + suite('term with edit distance', function () { + setup(function () { + this.lexer = lex('foo~2') + }) + + test('produces 2 lexems', function () { + assert.lengthOf(this.lexer.lexemes, 2) + }) + + suite('lexemes', function () { + setup(function () { + this.termLexeme = this.lexer.lexemes[0] + this.editDistanceLexeme = this.lexer.lexemes[1] + }) + + test('#type', function () { + assert.equal(lunr.QueryLexer.TERM, this.termLexeme.type) + assert.equal(lunr.QueryLexer.EDIT_DISTANCE, this.editDistanceLexeme.type) + }) + + test('#str', function () { + assert.equal('foo', this.termLexeme.str) + assert.equal('2', this.editDistanceLexeme.str) + }) + + test('#start', function () { + assert.equal(0, this.termLexeme.start) + assert.equal(4, this.editDistanceLexeme.start) + }) + + test('#end', function () { + assert.equal(3, this.termLexeme.end) + assert.equal(5, this.editDistanceLexeme.end) + }) + }) + }) + + suite('term with boost', function () { + setup(function () { + this.lexer = lex('foo^10') + }) + + test('produces 2 lexems', function () { + assert.lengthOf(this.lexer.lexemes, 2) + }) + + suite('lexemes', function () { + setup(function () { + this.termLexeme = this.lexer.lexemes[0] + this.boostLexeme = this.lexer.lexemes[1] + }) + + test('#type', function () { + assert.equal(lunr.QueryLexer.TERM, this.termLexeme.type) + assert.equal(lunr.QueryLexer.BOOST, this.boostLexeme.type) + }) + + test('#str', function () { + assert.equal('foo', this.termLexeme.str) + assert.equal('10', this.boostLexeme.str) + }) + + test('#start', function () { + assert.equal(0, this.termLexeme.start) + assert.equal(4, this.boostLexeme.start) + }) + + test('#end', function () { + assert.equal(3, this.termLexeme.end) + assert.equal(6, this.boostLexeme.end) + }) + }) + }) + + suite('term with field, boost and edit distance', function () { + setup(function () { + this.lexer = lex('title:foo^10~5') + }) + + test('produces 4 lexems', function () { + assert.lengthOf(this.lexer.lexemes, 4) + }) + + suite('lexemes', function () { + setup(function () { + this.fieldLexeme = this.lexer.lexemes[0] + this.termLexeme = this.lexer.lexemes[1] + this.boostLexeme = this.lexer.lexemes[2] + this.editDistanceLexeme = this.lexer.lexemes[3] + }) + + test('#type', function () { + assert.equal(lunr.QueryLexer.FIELD, this.fieldLexeme.type) + assert.equal(lunr.QueryLexer.TERM, this.termLexeme.type) + assert.equal(lunr.QueryLexer.BOOST, this.boostLexeme.type) + assert.equal(lunr.QueryLexer.EDIT_DISTANCE, this.editDistanceLexeme.type) + }) + + test('#str', function () { + assert.equal('title', this.fieldLexeme.str) + assert.equal('foo', this.termLexeme.str) + assert.equal('10', this.boostLexeme.str) + assert.equal('5', this.editDistanceLexeme.str) + }) + + test('#start', function () { + assert.equal(0, this.fieldLexeme.start) + assert.equal(6, this.termLexeme.start) + assert.equal(10, this.boostLexeme.start) + assert.equal(13, this.editDistanceLexeme.start) + }) + + test('#end', function () { + assert.equal(5, this.fieldLexeme.end) + assert.equal(9, this.termLexeme.end) + assert.equal(12, this.boostLexeme.end) + assert.equal(14, this.editDistanceLexeme.end) + }) + }) + }) + + }) +}) diff --git a/node_modules/lunr/test/query_parser_test.js b/node_modules/lunr/test/query_parser_test.js new file mode 100644 index 0000000..a17ca1a --- /dev/null +++ b/node_modules/lunr/test/query_parser_test.js @@ -0,0 +1,552 @@ +suite('lunr.QueryParser', function () { + var parse = function (q) { + var query = new lunr.Query (['title', 'body']), + parser = new lunr.QueryParser(q, query) + + parser.parse() + + return query.clauses + } + + suite('#parse', function () { + suite('single term', function () { + setup(function () { + this.clauses = parse('foo') + }) + + test('has 1 clause', function () { + assert.lengthOf(this.clauses, 1) + }) + + suite('clauses', function () { + setup(function () { + this.clause = this.clauses[0] + }) + + test('term', function () { + assert.equal('foo', this.clause.term) + }) + + test('fields', function () { + assert.sameMembers(['title', 'body'], this.clause.fields) + }) + + test('presence', function () { + assert.equal(lunr.Query.presence.OPTIONAL, this.clause.presence) + }) + + test('usePipeline', function () { + assert.ok(this.clause.usePipeline) + }) + }) + }) + + suite('single term, uppercase', function () { + setup(function () { + this.clauses = parse('FOO') + }) + + test('has 1 clause', function () { + assert.lengthOf(this.clauses, 1) + }) + + suite('clauses', function () { + setup(function () { + this.clause = this.clauses[0] + }) + + test('term', function () { + assert.equal('foo', this.clause.term) + }) + + test('fields', function () { + assert.sameMembers(['title', 'body'], this.clause.fields) + }) + + test('usePipeline', function () { + assert.ok(this.clause.usePipeline) + }) + }) + }) + + suite('single term with wildcard', function () { + setup(function () { + this.clauses = parse('fo*') + }) + + test('has 1 clause', function () { + assert.lengthOf(this.clauses, 1) + }) + + suite('clauses', function () { + setup(function () { + this.clause = this.clauses[0] + }) + + test('#term', function () { + assert.equal('fo*', this.clause.term) + }) + + test('#usePipeline', function () { + assert.ok(!this.clause.usePipeline) + }) + }) + }) + + suite('multiple terms', function () { + setup(function () { + this.clauses = parse('foo bar') + }) + + test('has 2 clause', function () { + assert.lengthOf(this.clauses, 2) + }) + + suite('clauses', function () { + test('#term', function () { + assert.equal('foo', this.clauses[0].term) + assert.equal('bar', this.clauses[1].term) + }) + }) + }) + + suite('multiple terms with presence', function () { + setup(function () { + this.clauses = parse('+foo +bar') + }) + + test('has 2 clause', function () { + assert.lengthOf(this.clauses, 2) + }) + }) + + suite('edit distance followed by presence', function () { + setup(function () { + this.clauses = parse('foo~10 +bar') + }) + + test('has 2 clause', function () { + assert.lengthOf(this.clauses, 2) + }) + + suite('clauses', function () { + setup(function () { + this.fooClause = this.clauses[0] + this.barClause = this.clauses[1] + }) + + test('#term', function () { + assert.equal('foo', this.fooClause.term) + assert.equal('bar', this.barClause.term) + }) + + test('#presence', function () { + assert.equal(lunr.Query.presence.OPTIONAL, this.fooClause.presence) + assert.equal(lunr.Query.presence.REQUIRED, this.barClause.presence) + }) + + test('#editDistance', function () { + assert.equal(10, this.fooClause.editDistance) + // It feels dirty asserting that something is undefined + // but there is no Optional so this is what we are reduced to + assert.isUndefined(this.barClause.editDistance) + }) + }) + }) + + suite('boost followed by presence', function () { + setup(function () { + this.clauses = parse('foo^10 +bar') + }) + + test('has 2 clause', function () { + assert.lengthOf(this.clauses, 2) + }) + + suite('clauses', function () { + setup(function () { + this.fooClause = this.clauses[0] + this.barClause = this.clauses[1] + }) + + test('#term', function () { + assert.equal('foo', this.fooClause.term) + assert.equal('bar', this.barClause.term) + }) + + test('#presence', function () { + assert.equal(lunr.Query.presence.OPTIONAL, this.fooClause.presence) + assert.equal(lunr.Query.presence.REQUIRED, this.barClause.presence) + }) + + test('#boost', function () { + assert.equal(10, this.fooClause.boost) + assert.equal(1, this.barClause.boost) + }) + }) + }) + + suite('field without a term', function () { + test('fails with lunr.QueryParseError', function () { + assert.throws(function () { parse('title:') }, lunr.QueryParseError) + }) + }) + + suite('unknown field', function () { + test('fails with lunr.QueryParseError', function () { + assert.throws(function () { parse('unknown:foo') }, lunr.QueryParseError) + }) + }) + + suite('term with field', function () { + setup(function () { + this.clauses = parse('title:foo') + }) + + test('has 1 clause', function () { + assert.lengthOf(this.clauses, 1) + }) + + test('clause contains only scoped field', function () { + assert.sameMembers(this.clauses[0].fields, ['title']) + }) + }) + + suite('uppercase field with uppercase term', function () { + setup(function () { + // Using a different query to the rest of the tests + // so that only this test has to worry about an upcase field name + var query = new lunr.Query (['TITLE']), + parser = new lunr.QueryParser("TITLE:FOO", query) + + parser.parse() + + this.clauses = query.clauses + }) + + test('has 1 clause', function () { + assert.lengthOf(this.clauses, 1) + }) + + test('clause contains downcased term', function () { + assert.equal(this.clauses[0].term, 'foo') + }) + + test('clause contains only scoped field', function () { + assert.sameMembers(this.clauses[0].fields, ['TITLE']) + }) + }) + + suite('multiple terms scoped to different fields', function () { + setup(function () { + this.clauses = parse('title:foo body:bar') + }) + + test('has 2 clauses', function () { + assert.lengthOf(this.clauses, 2) + }) + + test('fields', function () { + assert.sameMembers(['title'], this.clauses[0].fields) + assert.sameMembers(['body'], this.clauses[1].fields) + }) + + test('terms', function () { + assert.equal('foo', this.clauses[0].term) + assert.equal('bar', this.clauses[1].term) + }) + }) + + suite('single term with edit distance', function () { + setup(function () { + this.clauses = parse('foo~2') + }) + + test('has 1 clause', function () { + assert.lengthOf(this.clauses, 1) + }) + + test('term', function () { + assert.equal('foo', this.clauses[0].term) + }) + + test('editDistance', function () { + assert.equal(2, this.clauses[0].editDistance) + }) + + test('fields', function () { + assert.sameMembers(['title', 'body'], this.clauses[0].fields) + }) + }) + + suite('multiple terms with edit distance', function () { + setup(function () { + this.clauses = parse('foo~2 bar~3') + }) + + test('has 2 clauses', function () { + assert.lengthOf(this.clauses, 2) + }) + + test('term', function () { + assert.equal('foo', this.clauses[0].term) + assert.equal('bar', this.clauses[1].term) + }) + + test('editDistance', function () { + assert.equal(2, this.clauses[0].editDistance) + assert.equal(3, this.clauses[1].editDistance) + }) + + test('fields', function () { + assert.sameMembers(['title', 'body'], this.clauses[0].fields) + assert.sameMembers(['title', 'body'], this.clauses[1].fields) + }) + }) + + suite('single term scoped to field with edit distance', function () { + setup(function () { + this.clauses = parse('title:foo~2') + }) + + test('has 1 clause', function () { + assert.lengthOf(this.clauses, 1) + }) + + test('term', function () { + assert.equal('foo', this.clauses[0].term) + }) + + test('editDistance', function () { + assert.equal(2, this.clauses[0].editDistance) + }) + + test('fields', function () { + assert.sameMembers(['title'], this.clauses[0].fields) + }) + }) + + suite('non-numeric edit distance', function () { + test('throws lunr.QueryParseError', function () { + assert.throws(function () { parse('foo~a') }, lunr.QueryParseError) + }) + }) + + suite('edit distance without a term', function () { + test('throws lunr.QueryParseError', function () { + assert.throws(function () { parse('~2') }, lunr.QueryParseError) + }) + }) + + suite('single term with boost', function () { + setup(function () { + this.clauses = parse('foo^2') + }) + + test('has 1 clause', function () { + assert.lengthOf(this.clauses, 1) + }) + + test('term', function () { + assert.equal('foo', this.clauses[0].term) + }) + + test('boost', function () { + assert.equal(2, this.clauses[0].boost) + }) + + test('fields', function () { + assert.sameMembers(['title', 'body'], this.clauses[0].fields) + }) + }) + + suite('non-numeric boost', function () { + test('throws lunr.QueryParseError', function () { + assert.throws(function () { parse('foo^a') }, lunr.QueryParseError) + }) + }) + + suite('boost without a term', function () { + test('throws lunr.QueryParseError', function () { + assert.throws(function () { parse('^2') }, lunr.QueryParseError) + }) + }) + + suite('multiple terms with boost', function () { + setup(function () { + this.clauses = parse('foo^2 bar^3') + }) + + test('has 2 clauses', function () { + assert.lengthOf(this.clauses, 2) + }) + + test('term', function () { + assert.equal('foo', this.clauses[0].term) + assert.equal('bar', this.clauses[1].term) + }) + + test('boost', function () { + assert.equal(2, this.clauses[0].boost) + assert.equal(3, this.clauses[1].boost) + }) + + test('fields', function () { + assert.sameMembers(['title', 'body'], this.clauses[0].fields) + assert.sameMembers(['title', 'body'], this.clauses[1].fields) + }) + }) + + suite('term scoped by field with boost', function () { + setup(function () { + this.clauses = parse('title:foo^2') + }) + + test('has 1 clause', function () { + assert.lengthOf(this.clauses, 1) + }) + + test('term', function () { + assert.equal('foo', this.clauses[0].term) + }) + + test('boost', function () { + assert.equal(2, this.clauses[0].boost) + }) + + test('fields', function () { + assert.sameMembers(['title'], this.clauses[0].fields) + }) + }) + + suite('term with presence required', function () { + setup(function () { + this.clauses = parse('+foo') + }) + + test('has 1 clauses', function () { + assert.lengthOf(this.clauses, 1) + }) + + test('term', function () { + assert.equal('foo', this.clauses[0].term) + }) + + test('boost', function () { + assert.equal(1, this.clauses[0].boost) + }) + + test('fields', function () { + assert.sameMembers(['title', 'body'], this.clauses[0].fields) + }) + + test('presence', function () { + assert.equal(lunr.Query.presence.REQUIRED, this.clauses[0].presence) + }) + }) + + suite('term with presence prohibited', function () { + setup(function () { + this.clauses = parse('-foo') + }) + + test('has 1 clauses', function () { + assert.lengthOf(this.clauses, 1) + }) + + test('term', function () { + assert.equal('foo', this.clauses[0].term) + }) + + test('boost', function () { + assert.equal(1, this.clauses[0].boost) + }) + + test('fields', function () { + assert.sameMembers(['title', 'body'], this.clauses[0].fields) + }) + + test('presence', function () { + assert.equal(lunr.Query.presence.PROHIBITED, this.clauses[0].presence) + }) + }) + + suite('term scoped by field with presence required', function () { + setup(function () { + this.clauses = parse('+title:foo') + }) + + test('has 1 clauses', function () { + assert.lengthOf(this.clauses, 1) + }) + + test('term', function () { + assert.equal('foo', this.clauses[0].term) + }) + + test('boost', function () { + assert.equal(1, this.clauses[0].boost) + }) + + test('fields', function () { + assert.sameMembers(['title'], this.clauses[0].fields) + }) + + test('presence', function () { + assert.equal(lunr.Query.presence.REQUIRED, this.clauses[0].presence) + }) + }) + + suite('term scoped by field with presence prohibited', function () { + setup(function () { + this.clauses = parse('-title:foo') + }) + + test('has 1 clauses', function () { + assert.lengthOf(this.clauses, 1) + }) + + test('term', function () { + assert.equal('foo', this.clauses[0].term) + }) + + test('boost', function () { + assert.equal(1, this.clauses[0].boost) + }) + + test('fields', function () { + assert.sameMembers(['title'], this.clauses[0].fields) + }) + + test('presence', function () { + assert.equal(lunr.Query.presence.PROHIBITED, this.clauses[0].presence) + }) + }) + }) + + suite('term with boost and edit distance', function () { + setup(function () { + this.clauses = parse('foo^2~3') + }) + + test('has 1 clause', function () { + assert.lengthOf(this.clauses, 1) + }) + + test('term', function () { + assert.equal('foo', this.clauses[0].term) + }) + + test('editDistance', function () { + assert.equal(3, this.clauses[0].editDistance) + }) + + test('boost', function () { + assert.equal(2, this.clauses[0].boost) + }) + + test('fields', function () { + assert.sameMembers(['title', 'body'], this.clauses[0].fields) + }) + }) +}) diff --git a/node_modules/lunr/test/query_test.js b/node_modules/lunr/test/query_test.js new file mode 100644 index 0000000..a47b110 --- /dev/null +++ b/node_modules/lunr/test/query_test.js @@ -0,0 +1,244 @@ +suite('lunr.Query', function () { + var allFields = ['title', 'body'] + + suite('#term', function () { + setup(function () { + this.query = new lunr.Query (allFields) + }) + + suite('single string term', function () { + setup(function () { + this.query.term('foo') + }) + + test('adds a single clause', function () { + assert.equal(this.query.clauses.length, 1) + }) + + test('clause has the correct term', function () { + assert.equal(this.query.clauses[0].term, 'foo') + }) + }) + + suite('single token term', function () { + setup(function () { + this.query.term(new lunr.Token('foo')) + }) + + test('adds a single clause', function () { + assert.equal(this.query.clauses.length, 1) + }) + + test('clause has the correct term', function () { + assert.equal(this.query.clauses[0].term, 'foo') + }) + }) + + suite('multiple string terms', function () { + setup(function () { + this.query.term(['foo', 'bar']) + }) + + test('adds a single clause', function () { + assert.equal(this.query.clauses.length, 2) + }) + + test('clause has the correct term', function () { + var terms = this.query.clauses.map(function (c) { return c.term }) + assert.sameMembers(terms, ['foo', 'bar']) + }) + }) + + suite('multiple string terms with options', function () { + setup(function () { + this.query.term(['foo', 'bar'], { usePipeline: false }) + }) + + test('clause has the correct term', function () { + var terms = this.query.clauses.map(function (c) { return c.term }) + assert.sameMembers(terms, ['foo', 'bar']) + }) + }) + + suite('multiple token terms', function () { + setup(function () { + this.query.term(lunr.tokenizer('foo bar')) + }) + + test('adds a single clause', function () { + assert.equal(this.query.clauses.length, 2) + }) + + test('clause has the correct term', function () { + var terms = this.query.clauses.map(function (c) { return c.term }) + assert.sameMembers(terms, ['foo', 'bar']) + }) + }) + }) + + suite('#clause', function () { + setup(function () { + this.query = new lunr.Query (allFields) + }) + + suite('defaults', function () { + setup(function () { + this.query.clause({term: 'foo'}) + this.clause = this.query.clauses[0] + }) + + test('fields', function () { + assert.sameMembers(this.clause.fields, allFields) + }) + + test('boost', function () { + assert.equal(this.clause.boost, 1) + }) + + test('usePipeline', function () { + assert.isTrue(this.clause.usePipeline) + }) + }) + + suite('specified', function () { + setup(function () { + this.query.clause({ + term: 'foo', + boost: 10, + fields: ['title'], + usePipeline: false + }) + + this.clause = this.query.clauses[0] + }) + + test('fields', function () { + assert.sameMembers(this.clause.fields, ['title']) + }) + + test('boost', function () { + assert.equal(this.clause.boost, 10) + }) + + test('usePipeline', function () { + assert.isFalse(this.clause.usePipeline) + }) + }) + + suite('wildcards', function () { + suite('none', function () { + setup(function () { + this.query.clause({ + term: 'foo', + wildcard: lunr.Query.wildcard.NONE + }) + + this.clause = this.query.clauses[0] + }) + + test('no wildcard', function () { + assert.equal(this.clause.term, 'foo') + }) + }) + + suite('leading', function () { + setup(function () { + this.query.clause({ + term: 'foo', + wildcard: lunr.Query.wildcard.LEADING + }) + + this.clause = this.query.clauses[0] + }) + + test('adds wildcard', function () { + assert.equal(this.clause.term, '*foo') + }) + }) + + suite('trailing', function () { + setup(function () { + this.query.clause({ + term: 'foo', + wildcard: lunr.Query.wildcard.TRAILING + }) + + this.clause = this.query.clauses[0] + }) + + test('adds wildcard', function () { + assert.equal(this.clause.term, 'foo*') + }) + }) + + suite('leading and trailing', function () { + setup(function () { + this.query.clause({ + term: 'foo', + wildcard: lunr.Query.wildcard.TRAILING | lunr.Query.wildcard.LEADING + }) + + this.clause = this.query.clauses[0] + }) + + test('adds wildcards', function () { + assert.equal(this.clause.term, '*foo*') + }) + }) + + suite('existing', function () { + setup(function () { + this.query.clause({ + term: '*foo*', + wildcard: lunr.Query.wildcard.TRAILING | lunr.Query.wildcard.LEADING + }) + + this.clause = this.query.clauses[0] + }) + + test('no additional wildcards', function () { + assert.equal(this.clause.term, '*foo*') + }) + }) + }) + }) + + suite('#isNegated', function () { + setup(function () { + this.query = new lunr.Query (allFields) + }) + + suite('all prohibited', function () { + setup(function () { + this.query.term('foo', { presence: lunr.Query.presence.PROHIBITED }) + this.query.term('bar', { presence: lunr.Query.presence.PROHIBITED }) + }) + + test('is negated', function () { + assert.isTrue(this.query.isNegated()) + }) + }) + + suite('some prohibited', function () { + setup(function () { + this.query.term('foo', { presence: lunr.Query.presence.PROHIBITED }) + this.query.term('bar', { presence: lunr.Query.presence.REQUIRED }) + }) + + test('is negated', function () { + assert.isFalse(this.query.isNegated()) + }) + }) + + suite('none prohibited', function () { + setup(function () { + this.query.term('foo', { presence: lunr.Query.presence.OPTIONAL }) + this.query.term('bar', { presence: lunr.Query.presence.REQUIRED }) + }) + + test('is negated', function () { + assert.isFalse(this.query.isNegated()) + }) + }) + }) +}) diff --git a/node_modules/lunr/test/search_test.js b/node_modules/lunr/test/search_test.js new file mode 100644 index 0000000..a652fe9 --- /dev/null +++ b/node_modules/lunr/test/search_test.js @@ -0,0 +1,1100 @@ +suite('search', function () { + setup(function () { + this.documents = [{ + id: 'a', + title: 'Mr. Green kills Colonel Mustard', + body: 'Mr. Green killed Colonel Mustard in the study with the candlestick. Mr. Green is not a very nice fellow.', + wordCount: 19 + },{ + id: 'b', + title: 'Plumb waters plant', + body: 'Professor Plumb has a green plant in his study', + wordCount: 9 + },{ + id: 'c', + title: 'Scarlett helps Professor', + body: 'Miss Scarlett watered Professor Plumbs green plant while he was away from his office last week.', + wordCount: 16 + }] + }) + + suite('with build-time field boosts', function () { + setup(function () { + var self = this + + this.idx = lunr(function () { + this.ref('id') + this.field('title') + this.field('body', { boost: 10 }) + + self.documents.forEach(function (document) { + this.add(document) + }, this) + }) + }) + + suite('no query boosts', function () { + var assertions = function () { + test('document b ranks highest', function () { + assert.equal('b', this.results[0].ref) + }) + } + + suite('#search', function () { + setup(function () { + this.results = this.idx.search('professor') + }) + + assertions() + }) + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('professor') + }) + }) + + assertions() + }) + }) + }) + + suite('with build-time document boost', function () { + setup(function () { + var self = this + + this.idx = lunr(function () { + this.ref('id') + this.field('title') + this.field('body') + + self.documents.forEach(function (document) { + var boost = document.id == 'c' ? 10 : 1 + this.add(document, { boost: boost }) + }, this) + }) + }) + + suite('no query boost', function () { + var assertions = function () { + test('document c ranks highest', function () { + assert.equal('c', this.results[0].ref) + }) + } + + suite('#search', function () { + setup(function () { + this.results = this.idx.search('plumb') + }) + + assertions() + }) + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('plumb') + }) + }) + + assertions() + }) + }) + + suite('with query boost', function () { + var assertions = function () { + test('document b ranks highest', function () { + assert.equal('b', this.results[0].ref) + }) + } + + suite('#search', function () { + setup(function () { + this.results = this.idx.search('green study^10') + }) + + assertions() + }) + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('green') + q.term('study', { boost: 10 }) + }) + }) + + assertions() + }) + }) + }) + + suite('without build-time boosts', function () { + setup(function () { + var self = this + + this.idx = lunr(function () { + this.ref('id') + this.field('title') + this.field('body') + + self.documents.forEach(function (document) { + this.add(document) + }, this) + }) + }) + + suite('single term search', function () { + suite('one match', function () { + var assertions = function () { + test('one result returned', function () { + assert.lengthOf(this.results, 1) + }) + + test('document c matches', function () { + assert.equal('c', this.results[0].ref) + }) + + test('matching term', function () { + assert.sameMembers(['scarlett'], Object.keys(this.results[0].matchData.metadata)) + }) + } + + suite('#seach', function () { + setup(function () { + this.results = this.idx.search('scarlett') + }) + + assertions() + }) + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('scarlett') + }) + }) + + assertions() + }) + }) + + suite('no match', function () { + setup(function () { + this.results = this.idx.search('foo') + }) + + test('no matches', function () { + assert.lengthOf(this.results, 0) + }) + }) + + suite('multiple matches', function () { + setup(function () { + this.results = this.idx.search('plant') + }) + + test('has two matches', function () { + assert.lengthOf(this.results, 2) + }) + + test('sorted by relevance', function () { + assert.equal('b', this.results[0].ref) + assert.equal('c', this.results[1].ref) + }) + }) + + suite('pipeline processing', function () { + // study would be stemmed to studi, tokens + // are stemmed by default on index and must + // also be stemmed on search to match + suite('enabled (default)', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.clause({term: 'study', usePipeline: true}) + }) + }) + + test('has two matches', function () { + assert.lengthOf(this.results, 2) + }) + + test('sorted by relevance', function () { + assert.equal('b', this.results[0].ref) + assert.equal('a', this.results[1].ref) + }) + }) + + suite('disabled', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.clause({term: 'study', usePipeline: false}) + }) + }) + + test('no matches', function () { + assert.lengthOf(this.results, 0) + }) + }) + }) + }) + + suite('multiple terms', function () { + suite('all terms match', function () { + setup(function () { + this.results = this.idx.search('fellow candlestick') + }) + + test('has one match', function () { + assert.lengthOf(this.results, 1) + }) + + test('correct document returned', function () { + assert.equal('a', this.results[0].ref) + }) + + test('matched terms returned', function () { + assert.sameMembers(['fellow', 'candlestick'], Object.keys(this.results[0].matchData.metadata)) + assert.sameMembers(['body'], Object.keys(this.results[0].matchData.metadata['fellow'])); + assert.sameMembers(['body'], Object.keys(this.results[0].matchData.metadata['candlestick'])); + }) + }) + + suite('one term matches', function () { + setup(function () { + this.results = this.idx.search('week foo') + }) + + test('has one match', function () { + assert.lengthOf(this.results, 1) + }) + + test('correct document returned', function () { + assert.equal('c', this.results[0].ref) + }) + + test('only matching terms returned', function () { + assert.sameMembers(['week'], Object.keys(this.results[0].matchData.metadata)) + }) + }) + + suite('duplicate query terms', function () { + // https://github.com/olivernn/lunr.js/issues/256 + // previously this would throw a duplicate index error + // because the query vector already contained an entry + // for the term 'fellow' + test('no errors', function () { + var idx = this.idx + assert.doesNotThrow(function () { + idx.search('fellow candlestick foo bar green plant fellow') + }) + }) + }) + + suite('documents with all terms score higher', function () { + setup(function () { + this.results = this.idx.search('candlestick green') + }) + + test('has three matches', function () { + assert.lengthOf(this.results, 3) + }) + + test('correct documents returned', function () { + var matchingDocuments = this.results.map(function (r) { + return r.ref + }) + assert.sameMembers(['a', 'b', 'c'], matchingDocuments) + }) + + test('documents with all terms score highest', function () { + assert.equal('a', this.results[0].ref) + }) + + test('matching terms are returned', function () { + assert.sameMembers(['candlestick', 'green'], Object.keys(this.results[0].matchData.metadata)) + assert.sameMembers(['green'], Object.keys(this.results[1].matchData.metadata)) + assert.sameMembers(['green'], Object.keys(this.results[2].matchData.metadata)) + }) + }) + + suite('no terms match', function () { + setup(function () { + this.results = this.idx.search('foo bar') + }) + + test('no matches', function () { + assert.lengthOf(this.results, 0) + }) + }) + + suite('corpus terms are stemmed', function () { + setup(function () { + this.results = this.idx.search('water') + }) + + test('matches two documents', function () { + assert.lengthOf(this.results, 2) + }) + + test('matches correct documents', function () { + var matchingDocuments = this.results.map(function (r) { + return r.ref + }) + assert.sameMembers(['b', 'c'], matchingDocuments) + }) + }) + + suite('field scoped terms', function () { + suite('only matches on scoped field', function () { + setup(function () { + this.results = this.idx.search('title:plant') + }) + + test('one result returned', function () { + assert.lengthOf(this.results, 1) + }) + + test('returns the correct document', function () { + assert.equal('b', this.results[0].ref) + }) + + test('match data', function () { + assert.sameMembers(['plant'], Object.keys(this.results[0].matchData.metadata)) + }) + }) + + suite('no matching terms', function () { + setup(function () { + this.results = this.idx.search('title:candlestick') + }) + + test('no results returned', function () { + assert.lengthOf(this.results, 0) + }) + }) + }) + + suite('wildcard matching', function () { + suite('trailing wildcard', function () { + suite('no matches', function () { + setup(function () { + this.results = this.idx.search('fo*') + }) + + test('no results returned', function () { + assert.lengthOf(this.results, 0) + }) + }) + + suite('one match', function () { + setup(function () { + this.results = this.idx.search('candle*') + }) + + test('one result returned', function () { + assert.lengthOf(this.results, 1) + }) + + test('correct document matched', function () { + assert.equal('a', this.results[0].ref) + }) + + test('matching terms returned', function () { + assert.sameMembers(['candlestick'], Object.keys(this.results[0].matchData.metadata)) + }) + }) + + suite('multiple terms match', function () { + setup(function () { + this.results = this.idx.search('pl*') + }) + + test('two results returned', function () { + assert.lengthOf(this.results, 2) + }) + + test('correct documents matched', function () { + var matchingDocuments = this.results.map(function (r) { + return r.ref + }) + assert.sameMembers(['b', 'c'], matchingDocuments) + }) + + test('matching terms returned', function () { + assert.sameMembers(['plumb', 'plant'], Object.keys(this.results[0].matchData.metadata)) + assert.sameMembers(['plumb', 'plant'], Object.keys(this.results[1].matchData.metadata)) + }) + }) + }) + }) + }) + + suite('wildcard matching', function () { + suite('trailing wildcard', function () { + suite('no matches found', function () { + setup(function () { + this.results = this.idx.search('fo*') + }) + + test('no results returned', function () { + assert.lengthOf(this.results, 0) + }) + }) + + suite('results found', function () { + setup(function () { + this.results = this.idx.search('pl*') + }) + + test('two results returned', function () { + assert.lengthOf(this.results, 2) + }) + + test('matching documents returned', function () { + assert.equal('b', this.results[0].ref) + assert.equal('c', this.results[1].ref) + }) + + test('matching terms returned', function () { + assert.sameMembers(['plant', 'plumb'], Object.keys(this.results[0].matchData.metadata)) + assert.sameMembers(['plant', 'plumb'], Object.keys(this.results[1].matchData.metadata)) + }) + }) + }) + + suite('leading wildcard', function () { + suite('no results found', function () { + setup(function () { + this.results = this.idx.search('*oo') + }) + + test('no results found', function () { + assert.lengthOf(this.results, 0) + }) + }) + + suite('results found', function () { + setup(function () { + this.results = this.idx.search('*ant') + }) + + test('two results found', function () { + assert.lengthOf(this.results, 2) + }) + + test('matching documents returned', function () { + assert.equal('b', this.results[0].ref) + assert.equal('c', this.results[1].ref) + }) + + test('matching terms returned', function () { + assert.sameMembers(['plant'], Object.keys(this.results[0].matchData.metadata)) + assert.sameMembers(['plant'], Object.keys(this.results[1].matchData.metadata)) + }) + }) + }) + + suite('contained wildcard', function () { + suite('no results found', function () { + setup(function () { + this.results = this.idx.search('f*o') + }) + + test('no results found', function () { + assert.lengthOf(this.results, 0) + }) + }) + + suite('results found', function () { + setup(function () { + this.results = this.idx.search('pl*nt') + }) + + test('two results found', function () { + assert.lengthOf(this.results, 2) + }) + + test('matching documents returned', function () { + assert.equal('b', this.results[0].ref) + assert.equal('c', this.results[1].ref) + }) + + test('matching terms returned', function () { + assert.sameMembers(['plant'], Object.keys(this.results[0].matchData.metadata)) + assert.sameMembers(['plant'], Object.keys(this.results[1].matchData.metadata)) + }) + }) + }) + }) + + suite('edit distance', function () { + suite('no results found', function () { + setup(function () { + this.results = this.idx.search('foo~1') + }) + + test('no results returned', function () { + assert.lengthOf(this.results, 0) + }) + }) + + suite('results found', function () { + setup(function () { + this.results = this.idx.search('plont~1') + }) + + test('two results found', function () { + assert.lengthOf(this.results, 2) + }) + + test('matching documents returned', function () { + assert.equal('b', this.results[0].ref) + assert.equal('c', this.results[1].ref) + }) + + test('matching terms returned', function () { + assert.sameMembers(['plant'], Object.keys(this.results[0].matchData.metadata)) + assert.sameMembers(['plant'], Object.keys(this.results[1].matchData.metadata)) + }) + }) + }) + + suite('searching by field', function () { + suite('unknown field', function () { + test('throws lunr.QueryParseError', function () { + assert.throws(function () { + this.idx.search('unknown-field:plant') + }.bind(this), lunr.QueryParseError) + }) + }) + + suite('no results found', function () { + setup(function () { + this.results = this.idx.search('title:candlestick') + }) + + test('no results found', function () { + assert.lengthOf(this.results, 0) + }) + }) + + suite('results found', function () { + setup(function () { + this.results = this.idx.search('title:plant') + }) + + test('one results found', function () { + assert.lengthOf(this.results, 1) + }) + + test('matching documents returned', function () { + assert.equal('b', this.results[0].ref) + }) + + test('matching terms returned', function () { + assert.sameMembers(['plant'], Object.keys(this.results[0].matchData.metadata)) + }) + }) + }) + + suite('term boosts', function () { + suite('no results found', function () { + setup(function () { + this.results = this.idx.search('foo^10') + }) + + test('no results found', function () { + assert.lengthOf(this.results, 0) + }) + }) + + suite('results found', function () { + setup(function () { + this.results = this.idx.search('scarlett candlestick^5') + }) + + test('two results found', function () { + assert.lengthOf(this.results, 2) + }) + + test('matching documents returned', function () { + assert.equal('a', this.results[0].ref) + assert.equal('c', this.results[1].ref) + }) + + test('matching terms returned', function () { + assert.sameMembers(['candlestick'], Object.keys(this.results[0].matchData.metadata)) + assert.sameMembers(['scarlett'], Object.keys(this.results[1].matchData.metadata)) + }) + }) + }) + + suite('typeahead style search', function () { + suite('no results found', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term("xyz", { boost: 100, usePipeline: true }) + q.term("xyz", { boost: 10, usePipeline: false, wildcard: lunr.Query.wildcard.TRAILING }) + q.term("xyz", { boost: 1, editDistance: 1 }) + }) + }) + + test('no results found', function () { + assert.lengthOf(this.results, 0) + }) + }) + + suite('results found', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term("pl", { boost: 100, usePipeline: true }) + q.term("pl", { boost: 10, usePipeline: false, wildcard: lunr.Query.wildcard.TRAILING }) + q.term("pl", { boost: 1, editDistance: 1 }) + }) + }) + + test('two results found', function () { + assert.lengthOf(this.results, 2) + }) + + test('matching documents returned', function () { + assert.equal('b', this.results[0].ref) + assert.equal('c', this.results[1].ref) + }) + + test('matching terms returned', function () { + assert.sameMembers(['plumb', 'plant'], Object.keys(this.results[0].matchData.metadata)) + assert.sameMembers(['plumb', 'plant'], Object.keys(this.results[1].matchData.metadata)) + }) + }) + }) + + suite('term presence', function () { + suite('prohibited', function () { + suite('match', function () { + var assertions = function () { + test('two results found', function () { + assert.lengthOf(this.results, 2) + }) + + test('matching documents returned', function () { + assert.equal('b', this.results[0].ref) + assert.equal('c', this.results[1].ref) + }) + + test('matching terms returned', function () { + assert.sameMembers(['green'], Object.keys(this.results[0].matchData.metadata)) + assert.sameMembers(['green'], Object.keys(this.results[1].matchData.metadata)) + }) + } + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('candlestick', { presence: lunr.Query.presence.PROHIBITED }) + q.term('green', { presence: lunr.Query.presence.OPTIONAL }) + }) + }) + + assertions() + }) + + suite('#search', function () { + setup(function () { + this.results = this.idx.search('-candlestick green') + }) + + assertions() + }) + }) + + suite('no match', function () { + var assertions = function () { + test('no matches', function () { + assert.lengthOf(this.results, 0) + }) + } + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('green', { presence: lunr.Query.presence.PROHIBITED }) + }) + }) + + assertions() + }) + + suite('#search', function () { + setup(function () { + this.results = this.idx.search('-green') + }) + + assertions() + }) + }) + + suite('negated query no match', function () { + var assertions = function () { + test('all documents returned', function () { + assert.lengthOf(this.results, 3) + }) + + test('all results have same score', function () { + assert.isTrue(this.results.every(function (r) { return r.score === 0 })) + }) + } + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('qwertyuiop', { presence: lunr.Query.presence.PROHIBITED }) + }) + }) + + assertions() + }) + + suite('#search', function () { + setup(function () { + this.results = this.idx.search("-qwertyuiop") + }) + + assertions() + }) + }) + + suite('negated query some match', function () { + var assertions = function () { + test('all documents returned', function () { + assert.lengthOf(this.results, 1) + }) + + test('all results have same score', function () { + assert.isTrue(this.results.every(function (r) { return r.score === 0 })) + }) + + test('matching documents returned', function () { + assert.equal('a', this.results[0].ref) + }) + } + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('plant', { presence: lunr.Query.presence.PROHIBITED }) + }) + }) + + assertions() + }) + + suite('#search', function () { + setup(function () { + this.results = this.idx.search("-plant") + }) + + assertions() + }) + }) + + suite('field match', function () { + var assertions = function () { + test('one result found', function () { + assert.lengthOf(this.results, 1) + }) + + test('matching documents returned', function () { + assert.equal('c', this.results[0].ref) + }) + + test('matching terms returned', function () { + assert.sameMembers(['plumb'], Object.keys(this.results[0].matchData.metadata)) + }) + } + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('plant', { presence: lunr.Query.presence.PROHIBITED, fields: ['title'] }) + q.term('plumb', { presence: lunr.Query.presence.OPTIONAL }) + }) + }) + + assertions() + }) + + suite('#search', function () { + setup(function () { + this.results = this.idx.search('-title:plant plumb') + }) + + assertions() + }) + }) + }) + + suite('required', function () { + suite('match', function () { + var assertions = function () { + test('one result found', function () { + assert.lengthOf(this.results, 1) + }) + + test('matching documents returned', function () { + assert.equal('a', this.results[0].ref) + }) + + test('matching terms returned', function () { + assert.sameMembers(['candlestick', 'green'], Object.keys(this.results[0].matchData.metadata)) + }) + } + + suite('#search', function () { + setup(function () { + this.results = this.idx.search("+candlestick green") + }) + + assertions() + }) + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('candlestick', { presence: lunr.Query.presence.REQUIRED }) + q.term('green', { presence: lunr.Query.presence.OPTIONAL }) + }) + }) + + assertions() + }) + }) + + suite('no match', function () { + var assertions = function () { + test('no matches', function () { + assert.lengthOf(this.results, 0) + }) + } + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('mustard', { presence: lunr.Query.presence.REQUIRED }) + q.term('plant', { presence: lunr.Query.presence.REQUIRED }) + }) + }) + + assertions() + }) + + suite('#search', function () { + setup(function () { + this.results = this.idx.search('+mustard +plant') + }) + + assertions() + }) + }) + + suite('no matching term', function () { + var assertions = function () { + test('no matches', function () { + assert.lengthOf(this.results, 0) + }) + } + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('qwertyuiop', { presence: lunr.Query.presence.REQUIRED }) + q.term('green', { presence: lunr.Query.presence.OPTIONAL }) + }) + }) + + assertions() + }) + + suite('#search', function () { + setup(function () { + this.results = this.idx.search('+qwertyuiop green') + }) + + assertions() + }) + }) + + suite('field match', function () { + var assertions = function () { + test('one result found', function () { + assert.lengthOf(this.results, 1) + }) + + test('matching documents returned', function () { + assert.equal('b', this.results[0].ref) + }) + + test('matching terms returned', function () { + assert.sameMembers(['plant', 'green'], Object.keys(this.results[0].matchData.metadata)) + }) + } + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('plant', { presence: lunr.Query.presence.REQUIRED, fields: ['title'] }) + q.term('green', { presence: lunr.Query.presence.OPTIONAL }) + }) + }) + + assertions() + }) + + suite('#search', function () { + setup(function () { + this.results = this.idx.search('+title:plant green') + }) + + assertions() + }) + }) + + suite('field and non field match', function () { + var assertions = function () { + test('one result found', function () { + assert.lengthOf(this.results, 1) + }) + + test('matching documents returned', function () { + assert.equal('b', this.results[0].ref) + }) + + test('matching terms returned', function () { + assert.sameMembers(['plant', 'green'], Object.keys(this.results[0].matchData.metadata)) + }) + } + + suite('#search', function () { + setup(function () { + this.results = this.idx.search('+title:plant +green') + }) + + assertions() + }) + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('plant', { fields: ['title'], presence: lunr.Query.presence.REQUIRED }) + q.term('green', { presence: lunr.Query.presence.REQUIRED }) + }) + }) + + assertions() + }) + }) + + suite('different fields', function () { + var assertions = function () { + test('one result found', function () { + assert.lengthOf(this.results, 1) + }) + + test('matching documents returned', function () { + assert.equal('b', this.results[0].ref) + }) + + test('matching terms returned', function () { + assert.sameMembers(['studi', 'plant'], Object.keys(this.results[0].matchData.metadata)) + }) + } + + suite('#search', function () { + setup(function () { + this.results = this.idx.search('+title:plant +body:study') + }) + + assertions() + }) + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('plant', { fields: ['title'], presence: lunr.Query.presence.REQUIRED }) + q.term('study', { fields: ['body'], presence: lunr.Query.presence.REQUIRED }) + }) + }) + + assertions() + }) + }) + + suite('different fields one without match', function () { + var assertions = function () { + test('no matches', function () { + assert.lengthOf(this.results, 0) + }) + } + + suite('#search', function () { + setup(function () { + this.results = this.idx.search('+title:plant +body:qwertyuiop') + }) + + assertions() + }) + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('plant', { fields: ['title'], presence: lunr.Query.presence.REQUIRED }) + q.term('qwertyuiop', { fields: ['body'], presence: lunr.Query.presence.REQUIRED }) + }) + }) + + assertions() + }) + }) + }) + + suite('combined', function () { + var assertions = function () { + test('one result found', function () { + assert.lengthOf(this.results, 1) + }) + + test('matching documents returned', function () { + assert.equal('b', this.results[0].ref) + }) + + test('matching terms returned', function () { + assert.sameMembers(['plant', 'green'], Object.keys(this.results[0].matchData.metadata)) + }) + } + + suite('#query', function () { + setup(function () { + this.results = this.idx.query(function (q) { + q.term('plant', { presence: lunr.Query.presence.REQUIRED }) + q.term('green', { presence: lunr.Query.presence.OPTIONAL }) + q.term('office', { presence: lunr.Query.presence.PROHIBITED }) + }) + }) + + assertions() + }) + + suite('#search', function () { + setup(function () { + this.results = this.idx.search('+plant green -office') + }) + + assertions() + }) + + }) + }) + }) +}) diff --git a/node_modules/lunr/test/serialization_test.js b/node_modules/lunr/test/serialization_test.js new file mode 100644 index 0000000..2e31295 --- /dev/null +++ b/node_modules/lunr/test/serialization_test.js @@ -0,0 +1,53 @@ +suite('serialization', function () { + setup(function () { + var documents = [{ + id: 'a', + title: 'Mr. Green kills Colonel Mustard', + body: 'Mr. Green killed Colonel Mustard in the study with the candlestick. Mr. Green is not a very nice fellow.', + wordCount: 19 + },{ + id: 'b', + title: 'Plumb waters plant', + body: 'Professor Plumb has a green plant in his study', + wordCount: 9 + },{ + id: 'c', + title: 'Scarlett helps Professor', + body: 'Miss Scarlett watered Professor Plumbs green plant while he was away from his office last week.', + wordCount: 16 + },{ + id: 'd', + title: 'All about JavaScript', + body: 'JavaScript objects have a special __proto__ property', + wordCount: 7 + }] + + this.idx = lunr(function () { + this.ref('id') + this.field('title') + this.field('body') + + documents.forEach(function (document) { + this.add(document) + }, this) + }) + + this.serializedIdx = JSON.stringify(this.idx) + this.loadedIdx = lunr.Index.load(JSON.parse(this.serializedIdx)) + }) + + test('search', function () { + var idxResults = this.idx.search('green'), + serializedResults = this.loadedIdx.search('green') + + assert.deepEqual(idxResults, serializedResults) + }) + + test('__proto__ double serialization', function () { + var doubleLoadedIdx = lunr.Index.load(JSON.parse(JSON.stringify(this.loadedIdx))), + idxResults = this.idx.search('__proto__'), + doubleSerializedResults = doubleLoadedIdx.search('__proto__') + + assert.deepEqual(idxResults, doubleSerializedResults) + }) +}) diff --git a/node_modules/lunr/test/set_test.js b/node_modules/lunr/test/set_test.js new file mode 100644 index 0000000..a65657a --- /dev/null +++ b/node_modules/lunr/test/set_test.js @@ -0,0 +1,147 @@ +suite('lunr.Set', function () { + suite('#contains', function () { + suite('complete set', function () { + test('returns true', function () { + assert.isOk(lunr.Set.complete.contains('foo')) + }) + }) + + suite('empty set', function () { + test('returns false', function () { + assert.isNotOk(lunr.Set.empty.contains('foo')) + }) + }) + + suite('populated set', function () { + setup(function () { + this.set = new lunr.Set (['foo']) + }) + + test('element contained in set', function () { + assert.isOk(this.set.contains('foo')) + }) + + test('element not contained in set', function () { + assert.isNotOk(this.set.contains('bar')) + }) + }) + }) + + suite('#union', function () { + setup(function () { + this.set = new lunr.Set(['foo']) + }) + + suite('complete set', function () { + test('union is complete', function () { + var result = lunr.Set.complete.union(this.set) + assert.isOk(result.contains('foo')) + assert.isOk(result.contains('bar')) + }) + }) + + suite('empty set', function () { + test('contains element', function () { + var result = lunr.Set.empty.union(this.set) + assert.isOk(result.contains('foo')) + assert.isNotOk(result.contains('bar')) + }) + }) + + suite('populated set', function () { + suite('with other populated set', function () { + test('contains both elements', function () { + var target = new lunr.Set (['bar']) + var result = target.union(this.set) + + assert.isOk(result.contains('foo')) + assert.isOk(result.contains('bar')) + assert.isNotOk(result.contains('baz')) + }) + }) + + suite('with empty set', function () { + test('contains all elements', function () { + var target = new lunr.Set (['bar']) + var result = target.union(lunr.Set.empty) + + assert.isOk(result.contains('bar')) + assert.isNotOk(result.contains('baz')) + }) + }) + + suite('with complete set', function () { + test('contains all elements', function () { + var target = new lunr.Set (['bar']) + var result = target.union(lunr.Set.complete) + + assert.isOk(result.contains('foo')) + assert.isOk(result.contains('bar')) + assert.isOk(result.contains('baz')) + }) + }) + }) + }) + + suite('#intersect', function () { + setup(function () { + this.set = new lunr.Set(['foo']) + }) + + suite('complete set', function () { + test('contains element', function () { + var result = lunr.Set.complete.intersect(this.set) + assert.isOk(result.contains('foo')) + assert.isNotOk(result.contains('bar')) + }) + }) + + suite('empty set', function () { + test('does not contain element', function () { + var result = lunr.Set.empty.intersect(this.set) + assert.isNotOk(result.contains('foo')) + }) + }) + + suite('populated set', function () { + suite('no intersection', function () { + test('does not contain intersection elements', function () { + var target = new lunr.Set (['bar']) + var result = target.intersect(this.set) + + assert.isNotOk(result.contains('foo')) + assert.isNotOk(result.contains('bar')) + }) + }) + + suite('intersection', function () { + test('contains intersection elements', function () { + var target = new lunr.Set (['foo', 'bar']) + var result = target.intersect(this.set) + + assert.isOk(result.contains('foo')) + assert.isNotOk(result.contains('bar')) + }) + }) + + suite('with empty set', function () { + test('returns empty set', function () { + var target = new lunr.Set(['foo']), + result = target.intersect(lunr.Set.empty) + + assert.isNotOk(result.contains('foo')) + }) + }) + + suite('with complete set', function () { + test('returns populated set', function () { + var target = new lunr.Set(['foo']), + result = target.intersect(lunr.Set.complete) + + assert.isOk(result.contains('foo')) + assert.isNotOk(result.contains('bar')) + }) + }) + }) + }) +}) diff --git a/node_modules/lunr/test/stemmer_test.js b/node_modules/lunr/test/stemmer_test.js new file mode 100644 index 0000000..2c9723f --- /dev/null +++ b/node_modules/lunr/test/stemmer_test.js @@ -0,0 +1,26 @@ +suite('lunr.stemmer', function () { + test('reduces words to their stem', function (done) { + withFixture('stemming_vocab.json', function (err, fixture) { + if (err != null) { + throw err + } + + var testData = JSON.parse(fixture) + + Object.keys(testData).forEach(function (word) { + var expected = testData[word], + token = new lunr.Token(word), + result = lunr.stemmer(token).toString() + + assert.equal(expected, result) + }) + + done() + }) + }) + + test('is a registered pipeline function', function () { + assert.equal('stemmer', lunr.stemmer.label) + assert.equal(lunr.stemmer, lunr.Pipeline.registeredFunctions['stemmer']) + }) +}) diff --git a/node_modules/lunr/test/stop_word_filter_test.js b/node_modules/lunr/test/stop_word_filter_test.js new file mode 100644 index 0000000..d16e51b --- /dev/null +++ b/node_modules/lunr/test/stop_word_filter_test.js @@ -0,0 +1,30 @@ +suite('lunr.stopWordFilter', function () { + test('filters stop words', function () { + var stopWords = ['the', 'and', 'but', 'than', 'when'] + + stopWords.forEach(function (word) { + assert.isUndefined(lunr.stopWordFilter(word)) + }) + }) + + test('ignores non stop words', function () { + var nonStopWords = ['interesting', 'words', 'pass', 'through'] + + nonStopWords.forEach(function (word) { + assert.equal(word, lunr.stopWordFilter(word)) + }) + }) + + test('ignores properties of Object.prototype', function () { + var nonStopWords = ['constructor', 'hasOwnProperty', 'toString', 'valueOf'] + + nonStopWords.forEach(function (word) { + assert.equal(word, lunr.stopWordFilter(word)) + }) + }) + + test('is a registered pipeline function', function () { + assert.equal('stopWordFilter', lunr.stopWordFilter.label) + assert.equal(lunr.stopWordFilter, lunr.Pipeline.registeredFunctions['stopWordFilter']) + }) +}) diff --git a/node_modules/lunr/test/test_helper.js b/node_modules/lunr/test/test_helper.js new file mode 100644 index 0000000..5beb3e3 --- /dev/null +++ b/node_modules/lunr/test/test_helper.js @@ -0,0 +1,13 @@ +var lunr = require('../lunr.js'), + assert = require('chai').assert, + fs = require('fs'), + path = require('path') + +var withFixture = function (name, fn) { + var fixturePath = path.join('test', 'fixtures', name) + fs.readFile(fixturePath, fn) +} + +global.lunr = lunr +global.assert = assert +global.withFixture = withFixture diff --git a/node_modules/lunr/test/token_set_test.js b/node_modules/lunr/test/token_set_test.js new file mode 100644 index 0000000..c2f45b6 --- /dev/null +++ b/node_modules/lunr/test/token_set_test.js @@ -0,0 +1,327 @@ +suite('lunr.TokenSet', function () { + suite('#toString', function () { + test('includes node finality', function () { + var nonFinal = new lunr.TokenSet, + final = new lunr.TokenSet, + otherFinal = new lunr.TokenSet + + final.final = true + otherFinal.final = true + + assert.notEqual(nonFinal.toString(), final.toString()) + assert.equal(otherFinal.toString(), final.toString()) + }) + + test('includes all edges', function () { + var zeroEdges = new lunr.TokenSet, + oneEdge = new lunr.TokenSet, + twoEdges = new lunr.TokenSet + + oneEdge.edges['a'] = 1 + twoEdges.edges['a'] = 1 + twoEdges.edges['b'] = 1 + + assert.notEqual(zeroEdges.toString(), oneEdge.toString()) + assert.notEqual(twoEdges.toString(), oneEdge.toString()) + assert.notEqual(twoEdges.toString(), zeroEdges.toString()) + }) + + test('includes edge id', function () { + var childA = new lunr.TokenSet, + childB = new lunr.TokenSet, + parentA = new lunr.TokenSet, + parentB = new lunr.TokenSet, + parentC = new lunr.TokenSet + + parentA.edges['a'] = childA + parentB.edges['a'] = childB + parentC.edges['a'] = childB + + assert.equal(parentB.toString(), parentC.toString()) + assert.notEqual(parentA.toString(), parentC.toString()) + assert.notEqual(parentA.toString(), parentB.toString()) + }) + }) + + suite('.fromString', function () { + test('without wildcard', function () { + lunr.TokenSet._nextId = 1 + var x = lunr.TokenSet.fromString('a') + + assert.equal(x.toString(), '0a2') + assert.isOk(x.edges['a'].final) + }) + + test('with trailing wildcard', function () { + var x = lunr.TokenSet.fromString('a*'), + wild = x.edges['a'].edges['*'] + + // a state reached by a wildcard has + // an edge with a wildcard to itself. + // the resulting automota is + // non-determenistic + assert.equal(wild, wild.edges['*']) + assert.isOk(wild.final) + }) + }) + + suite('.fromArray', function () { + test('with unsorted array', function () { + assert.throws(function () { + lunr.TokenSet.fromArray(['z', 'a']) + }) + }) + + test('with sorted array', function () { + var tokenSet = lunr.TokenSet.fromArray(['a', 'z']) + + assert.deepEqual(['a', 'z'], tokenSet.toArray().sort()) + }) + + test('is minimal', function () { + var tokenSet = lunr.TokenSet.fromArray(['ac', 'dc']), + acNode = tokenSet.edges['a'].edges['c'], + dcNode = tokenSet.edges['d'].edges['c'] + + assert.deepEqual(acNode, dcNode) + }) + }) + + suite('#toArray', function () { + test('includes all words', function () { + var words = ['bat', 'cat'], + tokenSet = lunr.TokenSet.fromArray(words) + + assert.sameMembers(words, tokenSet.toArray()) + }) + + test('includes single words', function () { + var word = 'bat', + tokenSet = lunr.TokenSet.fromString(word) + + assert.sameMembers([word], tokenSet.toArray()) + }) + }) + + suite('#intersect', function () { + test('no intersection', function () { + var x = lunr.TokenSet.fromString('cat'), + y = lunr.TokenSet.fromString('bar'), + z = x.intersect(y) + + assert.equal(0, z.toArray().length) + }) + + test('simple intersection', function () { + var x = lunr.TokenSet.fromString('cat'), + y = lunr.TokenSet.fromString('cat'), + z = x.intersect(y) + + assert.sameMembers(['cat'], z.toArray()) + }) + + test('trailing wildcard intersection', function () { + var x = lunr.TokenSet.fromString('cat'), + y = lunr.TokenSet.fromString('c*'), + z = x.intersect(y) + + assert.sameMembers(['cat'], z.toArray()) + }) + + test('trailing wildcard no intersection', function () { + var x = lunr.TokenSet.fromString('cat'), + y = lunr.TokenSet.fromString('b*'), + z = x.intersect(y) + + assert.equal(0, z.toArray().length) + }) + + test('leading wildcard intersection', function () { + var x = lunr.TokenSet.fromString('cat'), + y = lunr.TokenSet.fromString('*t'), + z = x.intersect(y) + + assert.sameMembers(['cat'], z.toArray()) + }) + + test('leading wildcard backtracking intersection', function () { + var x = lunr.TokenSet.fromString('aaacbab'), + y = lunr.TokenSet.fromString('*ab'), + z = x.intersect(y) + + assert.sameMembers(['aaacbab'], z.toArray()) + }) + + test('leading wildcard no intersection', function () { + var x = lunr.TokenSet.fromString('cat'), + y = lunr.TokenSet.fromString('*r'), + z = x.intersect(y) + + assert.equal(0, z.toArray().length) + }) + + test('leading wildcard backtracking no intersection', function () { + var x = lunr.TokenSet.fromString('aaabdcbc'), + y = lunr.TokenSet.fromString('*abc'), + z = x.intersect(y) + + assert.equal(0, z.toArray().length) + }) + + test('contained wildcard intersection', function () { + var x = lunr.TokenSet.fromString('foo'), + y = lunr.TokenSet.fromString('f*o'), + z = x.intersect(y) + + assert.sameMembers(['foo'], z.toArray()) + }) + + test('contained wildcard backtracking intersection', function () { + var x = lunr.TokenSet.fromString('ababc'), + y = lunr.TokenSet.fromString('a*bc'), + z = x.intersect(y) + + assert.sameMembers(['ababc'], z.toArray()) + }) + + test('contained wildcard no intersection', function () { + var x = lunr.TokenSet.fromString('foo'), + y = lunr.TokenSet.fromString('b*r'), + z = x.intersect(y) + + assert.equal(0, z.toArray().length) + }) + + test('contained wildcard backtracking no intersection', function () { + var x = lunr.TokenSet.fromString('ababc'), + y = lunr.TokenSet.fromString('a*ac'), + z = x.intersect(y) + + assert.equal(0, z.toArray().length) + }) + + test('wildcard matches zero or more characters', function () { + var x = lunr.TokenSet.fromString('foo'), + y = lunr.TokenSet.fromString('foo*'), + z = x.intersect(y) + + assert.sameMembers(['foo'], z.toArray()) + }) + + // This test is intended to prevent 'bugs' that have lead to these + // kind of intersections taking a _very_ long time. The assertion + // is not of interest, just that the test does not timeout. + test('catastrophic backtracking with leading characters', function () { + var x = lunr.TokenSet.fromString('fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'), + y = lunr.TokenSet.fromString('*ff'), + z = x.intersect(y) + + assert.equal(1, z.toArray().length) + }) + + test('leading and trailing backtracking intersection', function () { + var x = lunr.TokenSet.fromString('acbaabab'), + y = lunr.TokenSet.fromString('*ab*'), + z = x.intersect(y) + + assert.sameMembers(['acbaabab'], z.toArray()) + }) + + test('multiple contained wildcard backtracking', function () { + var x = lunr.TokenSet.fromString('acbaabab'), + y = lunr.TokenSet.fromString('a*ba*b'), + z = x.intersect(y) + + assert.sameMembers(['acbaabab'], z.toArray()) + }) + + test('intersect with fuzzy string substitution', function () { + var x1 = lunr.TokenSet.fromString('bar'), + x2 = lunr.TokenSet.fromString('cur'), + x3 = lunr.TokenSet.fromString('cat'), + x4 = lunr.TokenSet.fromString('car'), + x5 = lunr.TokenSet.fromString('foo'), + y = lunr.TokenSet.fromFuzzyString('car', 1) + + assert.sameMembers(x1.intersect(y).toArray(), ["bar"]) + assert.sameMembers(x2.intersect(y).toArray(), ["cur"]) + assert.sameMembers(x3.intersect(y).toArray(), ["cat"]) + assert.sameMembers(x4.intersect(y).toArray(), ["car"]) + assert.equal(x5.intersect(y).toArray().length, 0) + }) + + test('intersect with fuzzy string deletion', function () { + var x1 = lunr.TokenSet.fromString('ar'), + x2 = lunr.TokenSet.fromString('br'), + x3 = lunr.TokenSet.fromString('ba'), + x4 = lunr.TokenSet.fromString('bar'), + x5 = lunr.TokenSet.fromString('foo'), + y = lunr.TokenSet.fromFuzzyString('bar', 1) + + assert.sameMembers(x1.intersect(y).toArray(), ["ar"]) + assert.sameMembers(x2.intersect(y).toArray(), ["br"]) + assert.sameMembers(x3.intersect(y).toArray(), ["ba"]) + assert.sameMembers(x4.intersect(y).toArray(), ["bar"]) + assert.equal(x5.intersect(y).toArray().length, 0) + }) + + test('intersect with fuzzy string insertion', function () { + var x1 = lunr.TokenSet.fromString('bbar'), + x2 = lunr.TokenSet.fromString('baar'), + x3 = lunr.TokenSet.fromString('barr'), + x4 = lunr.TokenSet.fromString('bar'), + x5 = lunr.TokenSet.fromString('ba'), + x6 = lunr.TokenSet.fromString('foo'), + x7 = lunr.TokenSet.fromString('bara'), + y = lunr.TokenSet.fromFuzzyString('bar', 1) + + assert.sameMembers(x1.intersect(y).toArray(), ["bbar"]) + assert.sameMembers(x2.intersect(y).toArray(), ["baar"]) + assert.sameMembers(x3.intersect(y).toArray(), ["barr"]) + assert.sameMembers(x4.intersect(y).toArray(), ["bar"]) + assert.sameMembers(x5.intersect(y).toArray(), ["ba"]) + assert.equal(x6.intersect(y).toArray().length, 0) + assert.sameMembers(x7.intersect(y).toArray(), ["bara"]) + }) + + test('intersect with fuzzy string transpose', function () { + var x1 = lunr.TokenSet.fromString('abr'), + x2 = lunr.TokenSet.fromString('bra'), + x3 = lunr.TokenSet.fromString('foo'), + y = lunr.TokenSet.fromFuzzyString('bar', 1) + + assert.sameMembers(x1.intersect(y).toArray(), ["abr"]) + assert.sameMembers(x2.intersect(y).toArray(), ["bra"]) + assert.equal(x3.intersect(y).toArray().length, 0) + }) + + test('fuzzy string insertion', function () { + var x = lunr.TokenSet.fromString('abcxx'), + y = lunr.TokenSet.fromFuzzyString('abc', 2) + + assert.sameMembers(x.intersect(y).toArray(), ['abcxx']) + }) + + test('fuzzy string substitution', function () { + var x = lunr.TokenSet.fromString('axx'), + y = lunr.TokenSet.fromFuzzyString('abc', 2) + + assert.sameMembers(x.intersect(y).toArray(), ['axx']) + }) + + test('fuzzy string deletion', function () { + var x = lunr.TokenSet.fromString('a'), + y = lunr.TokenSet.fromFuzzyString('abc', 2) + + assert.sameMembers(x.intersect(y).toArray(), ['a']) + }) + + test('fuzzy string transpose', function () { + var x = lunr.TokenSet.fromString('bca'), + y = lunr.TokenSet.fromFuzzyString('abc', 2) + + assert.sameMembers(x.intersect(y).toArray(), ['bca']) + }) + + }) +}) diff --git a/node_modules/lunr/test/token_test.js b/node_modules/lunr/test/token_test.js new file mode 100644 index 0000000..caf739b --- /dev/null +++ b/node_modules/lunr/test/token_test.js @@ -0,0 +1,60 @@ +suite('lunr.Token', function () { + suite('#toString', function () { + test('converts the token to a string', function () { + var token = new lunr.Token('foo') + assert.equal('foo', token.toString()) + }) + }) + + suite('#metadata', function () { + test('can attach arbitrary metadata', function () { + var token = new lunr.Token('foo', { length: 3 }) + assert.equal(3, token.metadata.length) + }) + }) + + suite('#update', function () { + test('can update the token value', function () { + var token = new lunr.Token('foo') + + token.update(function (s) { + return s.toUpperCase() + }) + + assert.equal('FOO', token.toString()) + }) + + test('metadata is yielded when updating', function () { + var metadata = { bar: true }, + token = new lunr.Token('foo', metadata), + yieldedMetadata + + token.update(function (_, md) { + yieldedMetadata = md + }) + + assert.equal(metadata, yieldedMetadata) + }) + }) + + suite('#clone', function () { + var token = new lunr.Token('foo', { bar: true }) + + test('clones value', function () { + assert.equal(token.toString(), token.clone().toString()) + }) + + test('clones metadata', function () { + assert.equal(token.metadata, token.clone().metadata) + }) + + test('clone and modify', function () { + var clone = token.clone(function (s) { + return s.toUpperCase() + }) + + assert.equal('FOO', clone.toString()) + assert.equal(token.metadata, clone.metadata) + }) + }) +}) diff --git a/node_modules/lunr/test/tokenizer_test.js b/node_modules/lunr/test/tokenizer_test.js new file mode 100644 index 0000000..acb619a --- /dev/null +++ b/node_modules/lunr/test/tokenizer_test.js @@ -0,0 +1,114 @@ +suite('lunr.tokenizer', function () { + var toString = function (o) { return o.toString() } + + test('splitting into tokens', function () { + var tokens = lunr.tokenizer('foo bar baz') + .map(toString) + + assert.sameMembers(['foo', 'bar', 'baz'], tokens) + }) + + test('downcases tokens', function () { + var tokens = lunr.tokenizer('Foo BAR BAZ') + .map(toString) + + assert.sameMembers(['foo', 'bar', 'baz'], tokens) + }) + + test('array of strings', function () { + var tokens = lunr.tokenizer(['foo', 'bar', 'baz']) + .map(toString) + + assert.sameMembers(['foo', 'bar', 'baz'], tokens) + }) + + test('undefined is converted to empty string', function () { + var tokens = lunr.tokenizer(['foo', undefined, 'baz']) + .map(toString) + + assert.sameMembers(['foo', '', 'baz'], tokens) + }) + + test('null is converted to empty string', function () { + var tokens = lunr.tokenizer(['foo', null, 'baz']) + .map(toString) + + assert.sameMembers(['foo', '', 'baz'], tokens) + }) + + test('multiple white space is stripped', function () { + var tokens = lunr.tokenizer(' foo bar baz ') + .map(toString) + + assert.sameMembers(['foo', 'bar', 'baz'], tokens) + }) + + test('handling null-like arguments', function () { + assert.lengthOf(lunr.tokenizer(), 0) + assert.lengthOf(lunr.tokenizer(undefined), 0) + assert.lengthOf(lunr.tokenizer(null), 0) + }) + + test('converting a date to tokens', function () { + var date = new Date(Date.UTC(2013, 0, 1, 12)) + + // NOTE: slicing here to prevent asserting on parts + // of the date that might be affected by the timezone + // the test is running in. + assert.sameMembers(['tue', 'jan', '01', '2013'], lunr.tokenizer(date).slice(0, 4).map(toString)) + }) + + test('converting a number to tokens', function () { + assert.equal('41', lunr.tokenizer(41).map(toString)) + }) + + test('converting a boolean to tokens', function () { + assert.equal('false', lunr.tokenizer(false).map(toString)) + }) + + test('converting an object to tokens', function () { + var obj = { + toString: function () { return 'custom object' } + } + + assert.sameMembers(lunr.tokenizer(obj).map(toString), ['custom', 'object']) + }) + + test('splits strings with hyphens', function () { + assert.sameMembers(lunr.tokenizer('foo-bar').map(toString), ['foo', 'bar']) + }) + + test('splits strings with hyphens and spaces', function () { + assert.sameMembers(lunr.tokenizer('foo - bar').map(toString), ['foo', 'bar']) + }) + + test('tracking the token index', function () { + var tokens = lunr.tokenizer('foo bar') + assert.equal(tokens[0].metadata.index, 0) + assert.equal(tokens[1].metadata.index, 1) + }) + + test('tracking the token position', function () { + var tokens = lunr.tokenizer('foo bar') + assert.deepEqual(tokens[0].metadata.position, [0, 3]) + assert.deepEqual(tokens[1].metadata.position, [4, 3]) + }) + + test('tracking the token position with additional left-hand whitespace', function () { + var tokens = lunr.tokenizer(' foo bar') + assert.deepEqual(tokens[0].metadata.position, [1, 3]) + assert.deepEqual(tokens[1].metadata.position, [5, 3]) + }) + + test('tracking the token position with additional right-hand whitespace', function () { + var tokens = lunr.tokenizer('foo bar ') + assert.deepEqual(tokens[0].metadata.position, [0, 3]) + assert.deepEqual(tokens[1].metadata.position, [4, 3]) + }) + + test('providing additional metadata', function () { + var tokens = lunr.tokenizer('foo bar', { 'hurp': 'durp' }) + assert.deepEqual(tokens[0].metadata.hurp, 'durp') + assert.deepEqual(tokens[1].metadata.hurp, 'durp') + }) +}) diff --git a/node_modules/lunr/test/trimmer_test.js b/node_modules/lunr/test/trimmer_test.js new file mode 100644 index 0000000..2f87d59 --- /dev/null +++ b/node_modules/lunr/test/trimmer_test.js @@ -0,0 +1,29 @@ +suite('lunr.trimmer', function () { + test('latin characters', function () { + var token = new lunr.Token ('hello') + assert.equal(lunr.trimmer(token).toString(), token.toString()) + }) + + suite('punctuation', function () { + var trimmerTest = function (description, str, expected) { + test(description, function () { + var token = new lunr.Token(str), + trimmed = lunr.trimmer(token).toString() + + assert.equal(expected, trimmed) + }) + } + + trimmerTest('full stop', 'hello.', 'hello') + trimmerTest('inner apostrophe', "it's", "it's") + trimmerTest('trailing apostrophe', "james'", 'james') + trimmerTest('exclamation mark', 'stop!', 'stop') + trimmerTest('comma', 'first,', 'first') + trimmerTest('brackets', '[tag]', 'tag') + }) + + test('is a registered pipeline function', function () { + assert.equal(lunr.trimmer.label, 'trimmer') + assert.equal(lunr.Pipeline.registeredFunctions['trimmer'], lunr.trimmer) + }) +}) diff --git a/node_modules/lunr/test/utils_test.js b/node_modules/lunr/test/utils_test.js new file mode 100644 index 0000000..f6bc7e5 --- /dev/null +++ b/node_modules/lunr/test/utils_test.js @@ -0,0 +1,77 @@ +suite('lunr.utils', function () { + suite('#clone', function () { + var subject = function (obj) { + setup(function () { + this.obj = obj + this.clone = lunr.utils.clone(obj) + }) + } + + suite('handles null', function () { + subject(null) + + test('returns null', function () { + assert.equal(null, this.clone) + assert.equal(this.obj, this.clone) + }) + }) + + suite('handles undefined', function () { + subject(undefined) + + test('returns null', function () { + assert.equal(undefined, this.clone) + assert.equal(this.obj, this.clone) + }) + }) + + suite('object with primatives', function () { + subject({ + number: 1, + string: 'foo', + bool: true + }) + + test('clones number correctly', function () { + assert.equal(this.obj.number, this.clone.number) + }) + + test('clones string correctly', function () { + assert.equal(this.obj.string, this.clone.string) + }) + + test('clones bool correctly', function () { + assert.equal(this.obj.bool, this.clone.bool) + }) + }) + + suite('object with array property', function () { + subject({ + array: [1, 2, 3] + }) + + test('clones array correctly', function () { + assert.deepEqual(this.obj.array, this.clone.array) + }) + + test('mutations on clone do not affect orginial', function () { + this.clone.array.push(4) + assert.notDeepEqual(this.obj.array, this.clone.array) + assert.equal(this.obj.array.length, 3) + assert.equal(this.clone.array.length, 4) + }) + }) + + suite('nested object', function () { + test('throws type error', function () { + assert.throws(function () { + lunr.utils.clone({ + 'foo': { + 'bar': 1 + } + }) + }, TypeError) + }) + }) + }) +}) diff --git a/node_modules/lunr/test/vector_test.js b/node_modules/lunr/test/vector_test.js new file mode 100644 index 0000000..9628ccf --- /dev/null +++ b/node_modules/lunr/test/vector_test.js @@ -0,0 +1,154 @@ +suite('lunr.Vector', function () { + var vectorFromArgs = function () { + var vector = new lunr.Vector + + Array.prototype.slice.call(arguments) + .forEach(function (el, i) { + vector.insert(i, el) + }) + + return vector + } + + suite('#magnitude', function () { + test('calculates magnitude of a vector', function () { + var vector = vectorFromArgs(4,5,6) + assert.equal(Math.sqrt(77), vector.magnitude()) + }) + }) + + suite('#dot', function () { + test('calculates dot product of two vectors', function () { + var v1 = vectorFromArgs(1, 3, -5), + v2 = vectorFromArgs(4, -2, -1) + + assert.equal(3, v1.dot(v2)) + }) + }) + + suite('#similarity', function () { + test('calculates the similarity between two vectors', function () { + var v1 = vectorFromArgs(1, 3, -5), + v2 = vectorFromArgs(4, -2, -1) + + assert.approximately(v1.similarity(v2), 0.5, 0.01) + }) + + test('empty vector', function () { + var vEmpty = new lunr.Vector, + v1 = vectorFromArgs(1) + + assert.equal(0, vEmpty.similarity(v1)) + assert.equal(0, v1.similarity(vEmpty)) + }) + + test('non-overlapping vector', function () { + var v1 = new lunr.Vector([1, 1]), + v2 = new lunr.Vector([2, 1]) + + assert.equal(0, v1.similarity(v2)) + assert.equal(0, v2.similarity(v1)) + }) + }) + + suite('#insert', function () { + test('invalidates magnitude cache', function () { + var vector = vectorFromArgs(4,5,6) + + assert.equal(Math.sqrt(77), vector.magnitude()) + + vector.insert(3, 7) + + assert.equal(Math.sqrt(126), vector.magnitude()) + }) + + test('keeps items in index specified order', function () { + var vector = new lunr.Vector + + vector.insert(2, 4) + vector.insert(1, 5) + vector.insert(0, 6) + + assert.deepEqual([6,5,4], vector.toArray()) + }) + + test('fails when duplicate entry', function () { + var vector = vectorFromArgs(4, 5, 6) + assert.throws(function () { vector.insert(0, 44) }) + }) + }) + + suite('#upsert', function () { + test('invalidates magnitude cache', function () { + var vector = vectorFromArgs(4,5,6) + + assert.equal(Math.sqrt(77), vector.magnitude()) + + vector.upsert(3, 7) + + assert.equal(Math.sqrt(126), vector.magnitude()) + }) + + test('keeps items in index specified order', function () { + var vector = new lunr.Vector + + vector.upsert(2, 4) + vector.upsert(1, 5) + vector.upsert(0, 6) + + assert.deepEqual([6,5,4], vector.toArray()) + }) + + test('calls fn for value on duplicate', function () { + var vector = vectorFromArgs(4, 5, 6) + vector.upsert(0, 4, function (current, passed) { return current + passed }) + assert.deepEqual([8, 5, 6], vector.toArray()) + }) + }) + + suite('#positionForIndex', function () { + var vector = new lunr.Vector ([ + 1, 'a', + 2, 'b', + 4, 'c', + 7, 'd', + 11, 'e' + ]) + + test('at the beginning', function () { + assert.equal(0, vector.positionForIndex(0)) + }) + + test('at the end', function () { + assert.equal(10, vector.positionForIndex(20)) + }) + + test('consecutive', function () { + assert.equal(4, vector.positionForIndex(3)) + }) + + test('non-consecutive gap after', function () { + assert.equal(6, vector.positionForIndex(5)) + }) + + test('non-consecutive gap before', function () { + assert.equal(6, vector.positionForIndex(6)) + }) + + test('non-consecutive gave before and after', function () { + assert.equal(8, vector.positionForIndex(9)) + }) + + test('duplicate at the beginning', function () { + assert.equal(0, vector.positionForIndex(1)) + }) + + test('duplicate at the end', function () { + assert.equal(8, vector.positionForIndex(11)) + }) + + test('duplicate consecutive', function () { + assert.equal(4, vector.positionForIndex(4)) + }) + }) +}) diff --git a/node_modules/markdown-it/LICENSE b/node_modules/markdown-it/LICENSE new file mode 100644 index 0000000..7ffa058 --- /dev/null +++ b/node_modules/markdown-it/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2014 Vitaly Puzrin, Alex Kocharin. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/markdown-it/README.md b/node_modules/markdown-it/README.md new file mode 100644 index 0000000..6c79f3c --- /dev/null +++ b/node_modules/markdown-it/README.md @@ -0,0 +1,324 @@ +# markdown-it + +[![CI](https://github.com/markdown-it/markdown-it/actions/workflows/ci.yml/badge.svg)](https://github.com/markdown-it/markdown-it/actions/workflows/ci.yml) +[![NPM version](https://img.shields.io/npm/v/markdown-it.svg?style=flat)](https://www.npmjs.org/package/markdown-it) +[![Coverage Status](https://coveralls.io/repos/markdown-it/markdown-it/badge.svg?branch=master&service=github)](https://coveralls.io/github/markdown-it/markdown-it?branch=master) +[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/markdown-it/markdown-it) + +> Markdown parser done right. Fast and easy to extend. + +__[Live demo](https://markdown-it.github.io)__ + +- Follows the __[CommonMark spec](http://spec.commonmark.org/)__ + adds syntax extensions & sugar (URL autolinking, typographer). +- Configurable syntax! You can add new rules and even replace existing ones. +- High speed. +- [Safe](https://github.com/markdown-it/markdown-it/tree/master/docs/security.md) by default. +- Community-written __[plugins](https://www.npmjs.org/browse/keyword/markdown-it-plugin)__ and [other packages](https://www.npmjs.org/browse/keyword/markdown-it) on npm. + +__Table of content__ + +- [Install](#install) +- [Usage examples](#usage-examples) + - [Simple](#simple) + - [Init with presets and options](#init-with-presets-and-options) + - [Plugins load](#plugins-load) + - [Syntax highlighting](#syntax-highlighting) + - [Linkify](#linkify) +- [API](#api) +- [Syntax extensions](#syntax-extensions) + - [Manage rules](#manage-rules) +- [Benchmark](#benchmark) +- [markdown-it for enterprise](#markdown-it-for-enterprise) +- [Authors](#authors) +- [References / Thanks](#references--thanks) + +## Install + +**node.js**: + +```bash +npm install markdown-it +``` + +**browser (CDN):** + +- [jsDeliver CDN](http://www.jsdelivr.com/#!markdown-it "jsDelivr CDN") +- [cdnjs.com CDN](https://cdnjs.com/libraries/markdown-it "cdnjs.com") + + +## Usage examples + +See also: + +- __[API documentation](https://markdown-it.github.io/markdown-it/)__ - for more + info and examples. +- [Development info](https://github.com/markdown-it/markdown-it/tree/master/docs) - + for plugins writers. + + +### Simple + +```js +// node.js +// can use `require('markdown-it')` for CJS +import markdownit from 'markdown-it' +const md = markdownit() +const result = md.render('# markdown-it rulezz!'); + +// browser with UMD build, added to "window" on script load +// Note, there is no dash in "markdownit". +const md = window.markdownit(); +const result = md.render('# markdown-it rulezz!'); +``` + +Single line rendering, without paragraph wrap: + +```js +import markdownit from 'markdown-it' +const md = markdownit() +const result = md.renderInline('__markdown-it__ rulezz!'); +``` + + +### Init with presets and options + +(*) presets define combinations of active rules and options. Can be +`"commonmark"`, `"zero"` or `"default"` (if skipped). See +[API docs](https://markdown-it.github.io/markdown-it/#MarkdownIt.new) for more details. + +```js +import markdownit from 'markdown-it' + +// commonmark mode +const md = markdownit('commonmark') + +// default mode +const md = markdownit() + +// enable everything +const md = markdownit({ + html: true, + linkify: true, + typographer: true +}) + +// full options list (defaults) +const md = markdownit({ + // Enable HTML tags in source + html: false, + + // Use '/' to close single tags (
). + // This is only for full CommonMark compatibility. + xhtmlOut: false, + + // Convert '\n' in paragraphs into
+ breaks: false, + + // CSS language prefix for fenced blocks. Can be + // useful for external highlighters. + langPrefix: 'language-', + + // Autoconvert URL-like text to links + linkify: false, + + // Enable some language-neutral replacement + quotes beautification + // For the full list of replacements, see https://github.com/markdown-it/markdown-it/blob/master/lib/rules_core/replacements.mjs + typographer: false, + + // Double + single quotes replacement pairs, when typographer enabled, + // and smartquotes on. Could be either a String or an Array. + // + // For example, you can use '«»„“' for Russian, '„“‚‘' for German, + // and ['«\xA0', '\xA0»', '‹\xA0', '\xA0›'] for French (including nbsp). + quotes: '“”‘’', + + // Highlighter function. Should return escaped HTML, + // or '' if the source string is not changed and should be escaped externally. + // If result starts with ` or ``): + +```js +import markdownit from 'markdown-it' +import hljs from 'highlight.js' // https://highlightjs.org + +// Actual default values +const md = markdownit({ + highlight: function (str, lang) { + if (lang && hljs.getLanguage(lang)) { + try { + return '
' +
+               hljs.highlight(str, { language: lang, ignoreIllegals: true }).value +
+               '
'; + } catch (__) {} + } + + return '
' + md.utils.escapeHtml(str) + '
'; + } +}); +``` + +### Linkify + +`linkify: true` uses [linkify-it](https://github.com/markdown-it/linkify-it). To +configure linkify-it, access the linkify instance through `md.linkify`: + +```js +md.linkify.set({ fuzzyEmail: false }); // disables converting email to link +``` + + +## API + +__[API documentation](https://markdown-it.github.io/markdown-it/)__ + +If you are going to write plugins, please take a look at +[Development info](https://github.com/markdown-it/markdown-it/tree/master/docs). + + +## Syntax extensions + +Embedded (enabled by default): + +- [Tables](https://help.github.com/articles/organizing-information-with-tables/) (GFM) +- [Strikethrough](https://help.github.com/articles/basic-writing-and-formatting-syntax/#styling-text) (GFM) + +Via plugins: + +- [subscript](https://github.com/markdown-it/markdown-it-sub) +- [superscript](https://github.com/markdown-it/markdown-it-sup) +- [footnote](https://github.com/markdown-it/markdown-it-footnote) +- [definition list](https://github.com/markdown-it/markdown-it-deflist) +- [abbreviation](https://github.com/markdown-it/markdown-it-abbr) +- [emoji](https://github.com/markdown-it/markdown-it-emoji) +- [custom container](https://github.com/markdown-it/markdown-it-container) +- [insert](https://github.com/markdown-it/markdown-it-ins) +- [mark](https://github.com/markdown-it/markdown-it-mark) +- ... and [others](https://www.npmjs.org/browse/keyword/markdown-it-plugin) + + +### Manage rules + +By default all rules are enabled, but can be restricted by options. On plugin +load all its rules are enabled automatically. + +```js +import markdownit from 'markdown-it' + +// Activate/deactivate rules, with currying +const md = markdownit() + .disable(['link', 'image']) + .enable(['link']) + .enable('image'); + +// Enable everything +const md = markdownit({ + html: true, + linkify: true, + typographer: true, +}); +``` + +You can find all rules in sources: + +- [`parser_core.mjs`](lib/parser_core.mjs) +- [`parser_block.mjs`](lib/parser_block.mjs) +- [`parser_inline.mjs`](lib/parser_inline.mjs) + + +## Benchmark + +Here is the result of readme parse at MB Pro Retina 2013 (2.4 GHz): + +```bash +npm run benchmark-deps +benchmark/benchmark.mjs readme + +Selected samples: (1 of 28) + > README + +Sample: README.md (7774 bytes) + > commonmark-reference x 1,222 ops/sec ±0.96% (97 runs sampled) + > current x 743 ops/sec ±0.84% (97 runs sampled) + > current-commonmark x 1,568 ops/sec ±0.84% (98 runs sampled) + > marked x 1,587 ops/sec ±4.31% (93 runs sampled) +``` + +__Note.__ CommonMark version runs with [simplified link normalizers](https://github.com/markdown-it/markdown-it/blob/master/benchmark/implementations/current-commonmark/index.mjs) +for more "honest" compare. Difference is ≈1.5×. + +As you can see, `markdown-it` doesn't pay with speed for its flexibility. +Slowdown of "full" version caused by additional features not available in +other implementations. + + +## markdown-it for enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of `markdown-it` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-markdown-it?utm_source=npm-markdown-it&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) + + +## Authors + +- Alex Kocharin [github/rlidwka](https://github.com/rlidwka) +- Vitaly Puzrin [github/puzrin](https://github.com/puzrin) + +_markdown-it_ is the result of the decision of the authors who contributed to +99% of the _Remarkable_ code to move to a project with the same authorship but +new leadership (Vitaly and Alex). It's not a fork. + +## References / Thanks + +Big thanks to [John MacFarlane](https://github.com/jgm) for his work on the +CommonMark spec and reference implementations. His work saved us a lot of time +during this project's development. + +**Related Links:** + +- https://github.com/jgm/CommonMark - reference CommonMark implementations in C & JS, + also contains latest spec & online demo. +- http://talk.commonmark.org - CommonMark forum, good place to collaborate + developers' efforts. + +**Ports** + +- [motion-markdown-it](https://github.com/digitalmoksha/motion-markdown-it) - Ruby/RubyMotion +- [markdown-it-py](https://github.com/ExecutableBookProject/markdown-it-py)- Python diff --git a/node_modules/markdown-it/bin/markdown-it.mjs b/node_modules/markdown-it/bin/markdown-it.mjs new file mode 100755 index 0000000..84626f1 --- /dev/null +++ b/node_modules/markdown-it/bin/markdown-it.mjs @@ -0,0 +1,107 @@ +#!/usr/bin/env node +/* eslint no-console:0 */ + +import fs from 'node:fs' +import argparse from 'argparse' +import markdownit from '../index.mjs' + +const cli = new argparse.ArgumentParser({ + prog: 'markdown-it', + add_help: true +}) + +cli.add_argument('-v', '--version', { + action: 'version', + version: JSON.parse(fs.readFileSync(new URL('../package.json', import.meta.url))).version +}) + +cli.add_argument('--no-html', { + help: 'Disable embedded HTML', + action: 'store_true' +}) + +cli.add_argument('-l', '--linkify', { + help: 'Autolink text', + action: 'store_true' +}) + +cli.add_argument('-t', '--typographer', { + help: 'Enable smartquotes and other typographic replacements', + action: 'store_true' +}) + +cli.add_argument('--trace', { + help: 'Show stack trace on error', + action: 'store_true' +}) + +cli.add_argument('file', { + help: 'File to read', + nargs: '?', + default: '-' +}) + +cli.add_argument('-o', '--output', { + help: 'File to write', + default: '-' +}) + +const options = cli.parse_args() + +function readFile (filename, encoding, callback) { + if (options.file === '-') { + // read from stdin + const chunks = [] + + process.stdin.on('data', function (chunk) { chunks.push(chunk) }) + + process.stdin.on('end', function () { + return callback(null, Buffer.concat(chunks).toString(encoding)) + }) + } else { + fs.readFile(filename, encoding, callback) + } +} + +readFile(options.file, 'utf8', function (err, input) { + let output + + if (err) { + if (err.code === 'ENOENT') { + console.error('File not found: ' + options.file) + process.exit(2) + } + + console.error( + (options.trace && err.stack) || + err.message || + String(err)) + + process.exit(1) + } + + const md = markdownit({ + html: !options.no_html, + xhtmlOut: false, + typographer: options.typographer, + linkify: options.linkify + }) + + try { + output = md.render(input) + } catch (e) { + console.error( + (options.trace && e.stack) || + e.message || + String(e)) + + process.exit(1) + } + + if (options.output === '-') { + // write to stdout + process.stdout.write(output) + } else { + fs.writeFileSync(options.output, output) + } +}) diff --git a/node_modules/markdown-it/index.mjs b/node_modules/markdown-it/index.mjs new file mode 100644 index 0000000..f7ba45f --- /dev/null +++ b/node_modules/markdown-it/index.mjs @@ -0,0 +1 @@ +export { default } from './lib/index.mjs' diff --git a/node_modules/markdown-it/package.json b/node_modules/markdown-it/package.json new file mode 100644 index 0000000..4d55a3e --- /dev/null +++ b/node_modules/markdown-it/package.json @@ -0,0 +1,92 @@ +{ + "name": "markdown-it", + "version": "14.1.0", + "description": "Markdown-it - modern pluggable markdown parser.", + "keywords": [ + "markdown", + "parser", + "commonmark", + "markdown-it", + "markdown-it-plugin" + ], + "repository": "markdown-it/markdown-it", + "license": "MIT", + "main": "dist/index.cjs.js", + "module": "index.mjs", + "exports": { + ".": { + "import": "./index.mjs", + "require": "./dist/index.cjs.js" + }, + "./*": { + "require": "./*", + "import": "./*" + } + }, + "bin": { + "markdown-it": "bin/markdown-it.mjs" + }, + "scripts": { + "lint": "eslint .", + "test": "npm run lint && CJS_ONLY=1 npm run build && c8 --exclude dist --exclude test -r text -r html -r lcov mocha && node support/specsplit.mjs", + "doc": "node support/build_doc.mjs", + "gh-doc": "npm run doc && gh-pages -d apidoc -f", + "demo": "npm run lint && node support/build_demo.mjs", + "gh-demo": "npm run demo && gh-pages -d demo -f -b master -r git@github.com:markdown-it/markdown-it.github.io.git", + "build": "rollup -c support/rollup.config.mjs", + "benchmark-deps": "npm install --prefix benchmark/extra/ -g marked@0.3.6 commonmark@0.26.0 markdown-it/markdown-it.git#2.2.1", + "specsplit": "support/specsplit.mjs good -o test/fixtures/commonmark/good.txt && support/specsplit.mjs bad -o test/fixtures/commonmark/bad.txt && support/specsplit.mjs", + "todo": "grep 'TODO' -n -r ./lib 2>/dev/null", + "prepublishOnly": "npm test && npm run build && npm run gh-demo && npm run gh-doc" + }, + "files": [ + "index.mjs", + "lib/", + "dist/" + ], + "dependencies": { + "argparse": "^2.0.1", + "entities": "^4.4.0", + "linkify-it": "^5.0.0", + "mdurl": "^2.0.0", + "punycode.js": "^2.3.1", + "uc.micro": "^2.1.0" + }, + "devDependencies": { + "@rollup/plugin-babel": "^6.0.4", + "@rollup/plugin-commonjs": "^25.0.7", + "@rollup/plugin-node-resolve": "^15.2.3", + "@rollup/plugin-terser": "^0.4.4", + "ansi": "^0.3.0", + "benchmark": "~2.1.0", + "c8": "^8.0.1", + "chai": "^4.2.0", + "eslint": "^8.4.1", + "eslint-config-standard": "^17.1.0", + "express": "^4.14.0", + "gh-pages": "^6.1.0", + "highlight.js": "^11.9.0", + "jest-worker": "^29.7.0", + "markdown-it-abbr": "^2.0.0", + "markdown-it-container": "^4.0.0", + "markdown-it-deflist": "^3.0.0", + "markdown-it-emoji": "^3.0.0", + "markdown-it-footnote": "^4.0.0", + "markdown-it-for-inline": "^2.0.1", + "markdown-it-ins": "^4.0.0", + "markdown-it-mark": "^4.0.0", + "markdown-it-sub": "^2.0.0", + "markdown-it-sup": "^2.0.0", + "markdown-it-testgen": "^0.1.3", + "mocha": "^10.2.0", + "ndoc": "^6.0.0", + "needle": "^3.0.0", + "rollup": "^4.5.0", + "shelljs": "^0.8.4", + "supertest": "^6.0.1" + }, + "mocha": { + "inline-diffs": true, + "timeout": 60000 + } +} diff --git a/node_modules/mdurl/LICENSE b/node_modules/mdurl/LICENSE new file mode 100644 index 0000000..3b2c7bf --- /dev/null +++ b/node_modules/mdurl/LICENSE @@ -0,0 +1,45 @@ +Copyright (c) 2015 Vitaly Puzrin, Alex Kocharin. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------------------- + +.parse() is based on Joyent's node.js `url` code: + +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/mdurl/README.md b/node_modules/mdurl/README.md new file mode 100644 index 0000000..c7f9e95 --- /dev/null +++ b/node_modules/mdurl/README.md @@ -0,0 +1,102 @@ +# mdurl + +[![CI](https://github.com/markdown-it/mdurl/actions/workflows/ci.yml/badge.svg)](https://github.com/markdown-it/mdurl/actions/workflows/ci.yml) +[![NPM version](https://img.shields.io/npm/v/mdurl.svg?style=flat)](https://www.npmjs.org/package/mdurl) + +> URL utilities for [markdown-it](https://github.com/markdown-it/markdown-it) parser. + + +## API + +### .encode(str [, exclude, keepEncoded]) -> String + +Percent-encode a string, avoiding double encoding. Don't touch `/a-zA-Z0-9/` + +excluded chars + `/%[a-fA-F0-9]{2}/` (if not disabled). Broken surrorates are +replaced with `U+FFFD`. + +Params: + +- __str__ - input string. +- __exclude__ - optional, `;/?:@&=+$,-_.!~*'()#`. Additional chars to keep intact + (except `/a-zA-Z0-9/`). +- __keepEncoded__ - optional, `true`. By default it skips already encoded sequences + (`/%[a-fA-F0-9]{2}/`). If set to `false`, `%` will be encoded. + + +### encode.defaultChars, encode.componentChars + +You can use these constants as second argument to `encode` function. + + - `encode.defaultChars` is the same exclude set as in the standard `encodeURI()` function + - `encode.componentChars` is the same exclude set as in the `encodeURIComponent()` function + +For example, `encode('something', encode.componentChars, true)` is roughly the equivalent of +the `encodeURIComponent()` function (except `encode()` doesn't throw). + + +### .decode(str [, exclude]) -> String + +Decode percent-encoded string. Invalid percent-encoded sequences (e.g. `%2G`) +are left as is. Invalid UTF-8 characters are replaced with `U+FFFD`. + + +Params: + +- __str__ - input string. +- __exclude__ - set of characters to leave encoded, optional, `;/?:@&=+$,#`. + + +### decode.defaultChars, decode.componentChars + +You can use these constants as second argument to `decode` function. + + - `decode.defaultChars` is the same exclude set as in the standard `decodeURI()` function + - `decode.componentChars` is the same exclude set as in the `decodeURIComponent()` function + +For example, `decode('something', decode.defaultChars)` has the same behavior as +`decodeURI('something')` on a correctly encoded input. + + +### .parse(url, slashesDenoteHost) -> urlObs + +Parse url string. Similar to node's [url.parse](http://nodejs.org/api/url.html#url_url_parse_urlstr_parsequerystring_slashesdenotehost), but without any +normalizations and query string parse. + + - __url__ - input url (string) + - __slashesDenoteHost__ - if url starts with `//`, expect a hostname after it. Optional, `false`. + +Result (hash): + +- protocol +- slashes +- auth +- port +- hostname +- hash +- search +- pathname + +Difference with node's `url`: + +1. No leading slash in paths, e.g. in `url.parse('http://foo?bar')` pathname is + ``, not `/` +2. Backslashes are not replaced with slashes, so `http:\\example.org\` is + treated like a relative path +3. Trailing colon is treated like a part of the path, i.e. in + `http://example.org:foo` pathname is `:foo` +4. Nothing is URL-encoded in the resulting object, (in joyent/node some chars + in auth and paths are encoded) +5. `url.parse()` does not have `parseQueryString` argument +6. Removed extraneous result properties: `host`, `path`, `query`, etc., + which can be constructed using other parts of the url. + + +### .format(urlObject) + +Format an object previously obtained with `.parse()` function. Similar to node's +[url.format](http://nodejs.org/api/url.html#url_url_format_urlobj). + + +## License + +[MIT](https://github.com/markdown-it/mdurl/blob/master/LICENSE) diff --git a/node_modules/mdurl/index.mjs b/node_modules/mdurl/index.mjs new file mode 100644 index 0000000..fd78c37 --- /dev/null +++ b/node_modules/mdurl/index.mjs @@ -0,0 +1,11 @@ +import decode from './lib/decode.mjs' +import encode from './lib/encode.mjs' +import format from './lib/format.mjs' +import parse from './lib/parse.mjs' + +export { + decode, + encode, + format, + parse +} diff --git a/node_modules/mdurl/package.json b/node_modules/mdurl/package.json new file mode 100644 index 0000000..6e89beb --- /dev/null +++ b/node_modules/mdurl/package.json @@ -0,0 +1,37 @@ +{ + "name": "mdurl", + "version": "2.0.0", + "description": "URL utilities for markdown-it", + "repository": "markdown-it/mdurl", + "license": "MIT", + "main": "build/index.cjs.js", + "module": "index.mjs", + "exports": { + ".": { + "require": "./build/index.cjs.js", + "import": "./index.mjs" + }, + "./*": { + "require": "./*", + "import": "./*" + } + }, + "scripts": { + "lint": "eslint .", + "build": "rollup -c", + "test": "npm run lint && npm run build && c8 --exclude build --exclude test -r text -r html -r lcov mocha", + "prepublishOnly": "npm run lint && npm run build" + }, + "files": [ + "index.mjs", + "lib/", + "build/" + ], + "devDependencies": { + "c8": "^8.0.1", + "eslint": "^8.54.0", + "eslint-config-standard": "^17.1.0", + "mocha": "^10.2.0", + "rollup": "^4.6.1" + } +} diff --git a/node_modules/minimatch/LICENSE b/node_modules/minimatch/LICENSE new file mode 100644 index 0000000..1493534 --- /dev/null +++ b/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/minimatch/README.md b/node_modules/minimatch/README.md new file mode 100644 index 0000000..3c97a02 --- /dev/null +++ b/node_modules/minimatch/README.md @@ -0,0 +1,454 @@ +# minimatch + +A minimal matching utility. + +This is the matching library used internally by npm. + +It works by converting glob expressions into JavaScript `RegExp` +objects. + +## Usage + +```js +// hybrid module, load with require() or import +import { minimatch } from 'minimatch' +// or: +const { minimatch } = require('minimatch') + +minimatch('bar.foo', '*.foo') // true! +minimatch('bar.foo', '*.bar') // false! +minimatch('bar.foo', '*.+(bar|foo)', { debug: true }) // true, and noisy! +``` + +## Features + +Supports these glob features: + +- Brace Expansion +- Extended glob matching +- "Globstar" `**` matching +- [Posix character + classes](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html), + like `[[:alpha:]]`, supporting the full range of Unicode + characters. For example, `[[:alpha:]]` will match against + `'é'`, though `[a-zA-Z]` will not. Collating symbol and set + matching is not supported, so `[[=e=]]` will _not_ match `'é'` + and `[[.ch.]]` will not match `'ch'` in locales where `ch` is + considered a single character. + +See: + +- `man sh` +- `man bash` [Pattern + Matching](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html) +- `man 3 fnmatch` +- `man 5 gitignore` + +## Windows + +**Please only use forward-slashes in glob expressions.** + +Though windows uses either `/` or `\` as its path separator, only `/` +characters are used by this glob implementation. You must use +forward-slashes **only** in glob expressions. Back-slashes in patterns +will always be interpreted as escape characters, not path separators. + +Note that `\` or `/` _will_ be interpreted as path separators in paths on +Windows, and will match against `/` in glob expressions. + +So just always use `/` in patterns. + +### UNC Paths + +On Windows, UNC paths like `//?/c:/...` or +`//ComputerName/Share/...` are handled specially. + +- Patterns starting with a double-slash followed by some + non-slash characters will preserve their double-slash. As a + result, a pattern like `//*` will match `//x`, but not `/x`. +- Patterns staring with `//?/:` will _not_ treat + the `?` as a wildcard character. Instead, it will be treated + as a normal string. +- Patterns starting with `//?/:/...` will match + file paths starting with `:/...`, and vice versa, + as if the `//?/` was not present. This behavior only is + present when the drive letters are a case-insensitive match to + one another. The remaining portions of the path/pattern are + compared case sensitively, unless `nocase:true` is set. + +Note that specifying a UNC path using `\` characters as path +separators is always allowed in the file path argument, but only +allowed in the pattern argument when `windowsPathsNoEscape: true` +is set in the options. + +## Minimatch Class + +Create a minimatch object by instantiating the `minimatch.Minimatch` class. + +```javascript +var Minimatch = require('minimatch').Minimatch +var mm = new Minimatch(pattern, options) +``` + +### Properties + +- `pattern` The original pattern the minimatch object represents. +- `options` The options supplied to the constructor. +- `set` A 2-dimensional array of regexp or string expressions. + Each row in the + array corresponds to a brace-expanded pattern. Each item in the row + corresponds to a single path-part. For example, the pattern + `{a,b/c}/d` would expand to a set of patterns like: + + [ [ a, d ] + , [ b, c, d ] ] + + If a portion of the pattern doesn't have any "magic" in it + (that is, it's something like `"foo"` rather than `fo*o?`), then it + will be left as a string rather than converted to a regular + expression. + +- `regexp` Created by the `makeRe` method. A single regular expression + expressing the entire pattern. This is useful in cases where you wish + to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled. +- `negate` True if the pattern is negated. +- `comment` True if the pattern is a comment. +- `empty` True if the pattern is `""`. + +### Methods + +- `makeRe()` Generate the `regexp` member if necessary, and return it. + Will return `false` if the pattern is invalid. +- `match(fname)` Return true if the filename matches the pattern, or + false otherwise. +- `matchOne(fileArray, patternArray, partial)` Take a `/`-split + filename, and match it against a single row in the `regExpSet`. This + method is mainly for internal use, but is exposed so that it can be + used by a glob-walker that needs to avoid excessive filesystem calls. +- `hasMagic()` Returns true if the parsed pattern contains any + magic characters. Returns false if all comparator parts are + string literals. If the `magicalBraces` option is set on the + constructor, then it will consider brace expansions which are + not otherwise magical to be magic. If not set, then a pattern + like `a{b,c}d` will return `false`, because neither `abd` nor + `acd` contain any special glob characters. + + This does **not** mean that the pattern string can be used as a + literal filename, as it may contain magic glob characters that + are escaped. For example, the pattern `\\*` or `[*]` would not + be considered to have magic, as the matching portion parses to + the literal string `'*'` and would match a path named `'*'`, + not `'\\*'` or `'[*]'`. The `minimatch.unescape()` method may + be used to remove escape characters. + +All other methods are internal, and will be called as necessary. + +### minimatch(path, pattern, options) + +Main export. Tests a path against the pattern using the options. + +```javascript +var isJS = minimatch(file, '*.js', { matchBase: true }) +``` + +### minimatch.filter(pattern, options) + +Returns a function that tests its +supplied argument, suitable for use with `Array.filter`. Example: + +```javascript +var javascripts = fileList.filter(minimatch.filter('*.js', { matchBase: true })) +``` + +### minimatch.escape(pattern, options = {}) + +Escape all magic characters in a glob pattern, so that it will +only ever match literal strings + +If the `windowsPathsNoEscape` option is used, then characters are +escaped by wrapping in `[]`, because a magic character wrapped in +a character class can only be satisfied by that exact character. + +Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot +be escaped or unescaped. + +### minimatch.unescape(pattern, options = {}) + +Un-escape a glob string that may contain some escaped characters. + +If the `windowsPathsNoEscape` option is used, then square-brace +escapes are removed, but not backslash escapes. For example, it +will turn the string `'[*]'` into `*`, but it will not turn +`'\\*'` into `'*'`, because `\` is a path separator in +`windowsPathsNoEscape` mode. + +When `windowsPathsNoEscape` is not set, then both brace escapes +and backslash escapes are removed. + +Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot +be escaped or unescaped. + +### minimatch.match(list, pattern, options) + +Match against the list of +files, in the style of fnmatch or glob. If nothing is matched, and +options.nonull is set, then return a list containing the pattern itself. + +```javascript +var javascripts = minimatch.match(fileList, '*.js', { matchBase: true }) +``` + +### minimatch.makeRe(pattern, options) + +Make a regular expression object from the pattern. + +## Options + +All options are `false` by default. + +### debug + +Dump a ton of stuff to stderr. + +### nobrace + +Do not expand `{a,b}` and `{1..3}` brace sets. + +### noglobstar + +Disable `**` matching against multiple folder names. + +### dot + +Allow patterns to match filenames starting with a period, even if +the pattern does not explicitly have a period in that spot. + +Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot` +is set. + +### noext + +Disable "extglob" style patterns like `+(a|b)`. + +### nocase + +Perform a case-insensitive match. + +### nocaseMagicOnly + +When used with `{nocase: true}`, create regular expressions that +are case-insensitive, but leave string match portions untouched. +Has no effect when used without `{nocase: true}` + +Useful when some other form of case-insensitive matching is used, +or if the original string representation is useful in some other +way. + +### nonull + +When a match is not found by `minimatch.match`, return a list containing +the pattern itself if this option is set. When not set, an empty list +is returned if there are no matches. + +### magicalBraces + +This only affects the results of the `Minimatch.hasMagic` method. + +If the pattern contains brace expansions, such as `a{b,c}d`, but +no other magic characters, then the `Minimatch.hasMagic()` method +will return `false` by default. When this option set, it will +return `true` for brace expansion as well as other magic glob +characters. + +### matchBase + +If set, then patterns without slashes will be matched +against the basename of the path if it contains slashes. For example, +`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. + +### nocomment + +Suppress the behavior of treating `#` at the start of a pattern as a +comment. + +### nonegate + +Suppress the behavior of treating a leading `!` character as negation. + +### flipNegate + +Returns from negate expressions the same as if they were not negated. +(Ie, true on a hit, false on a miss.) + +### partial + +Compare a partial path to a pattern. As long as the parts of the path that +are present are not contradicted by the pattern, it will be treated as a +match. This is useful in applications where you're walking through a +folder structure, and don't yet have the full path, but want to ensure that +you do not walk down paths that can never be a match. + +For example, + +```js +minimatch('/a/b', '/a/*/c/d', { partial: true }) // true, might be /a/b/c/d +minimatch('/a/b', '/**/d', { partial: true }) // true, might be /a/b/.../d +minimatch('/x/y/z', '/a/**/z', { partial: true }) // false, because x !== a +``` + +### windowsPathsNoEscape + +Use `\\` as a path separator _only_, and _never_ as an escape +character. If set, all `\\` characters are replaced with `/` in +the pattern. Note that this makes it **impossible** to match +against paths containing literal glob pattern characters, but +allows matching with patterns constructed using `path.join()` and +`path.resolve()` on Windows platforms, mimicking the (buggy!) +behavior of earlier versions on Windows. Please use with +caution, and be mindful of [the caveat about Windows +paths](#windows). + +For legacy reasons, this is also set if +`options.allowWindowsEscape` is set to the exact value `false`. + +### windowsNoMagicRoot + +When a pattern starts with a UNC path or drive letter, and in +`nocase:true` mode, do not convert the root portions of the +pattern into a case-insensitive regular expression, and instead +leave them as strings. + +This is the default when the platform is `win32` and +`nocase:true` is set. + +### preserveMultipleSlashes + +By default, multiple `/` characters (other than the leading `//` +in a UNC path, see "UNC Paths" above) are treated as a single +`/`. + +That is, a pattern like `a///b` will match the file path `a/b`. + +Set `preserveMultipleSlashes: true` to suppress this behavior. + +### optimizationLevel + +A number indicating the level of optimization that should be done +to the pattern prior to parsing and using it for matches. + +Globstar parts `**` are always converted to `*` when `noglobstar` +is set, and multiple adjacent `**` parts are converted into a +single `**` (ie, `a/**/**/b` will be treated as `a/**/b`, as this +is equivalent in all cases). + +- `0` - Make no further changes. In this mode, `.` and `..` are + maintained in the pattern, meaning that they must also appear + in the same position in the test path string. Eg, a pattern + like `a/*/../c` will match the string `a/b/../c` but not the + string `a/c`. +- `1` - (default) Remove cases where a double-dot `..` follows a + pattern portion that is not `**`, `.`, `..`, or empty `''`. For + example, the pattern `./a/b/../*` is converted to `./a/*`, and + so it will match the path string `./a/c`, but not the path + string `./a/b/../c`. Dots and empty path portions in the + pattern are preserved. +- `2` (or higher) - Much more aggressive optimizations, suitable + for use with file-walking cases: + + - Remove cases where a double-dot `..` follows a pattern + portion that is not `**`, `.`, or empty `''`. Remove empty + and `.` portions of the pattern, where safe to do so (ie, + anywhere other than the last position, the first position, or + the second position in a pattern starting with `/`, as this + may indicate a UNC path on Windows). + - Convert patterns containing `
/**/../

/` into the + equivalent `

/{..,**}/

/`, where `

` is a + a pattern portion other than `.`, `..`, `**`, or empty + `''`. + - Dedupe patterns where a `**` portion is present in one and + omitted in another, and it is not the final path portion, and + they are otherwise equivalent. So `{a/**/b,a/b}` becomes + `a/**/b`, because `**` matches against an empty path portion. + - Dedupe patterns where a `*` portion is present in one, and a + non-dot pattern other than `**`, `.`, `..`, or `''` is in the + same position in the other. So `a/{*,x}/b` becomes `a/*/b`, + because `*` can match against `x`. + + While these optimizations improve the performance of + file-walking use cases such as [glob](http://npm.im/glob) (ie, + the reason this module exists), there are cases where it will + fail to match a literal string that would have been matched in + optimization level 1 or 0. + + Specifically, while the `Minimatch.match()` method will + optimize the file path string in the same ways, resulting in + the same matches, it will fail when tested with the regular + expression provided by `Minimatch.makeRe()`, unless the path + string is first processed with + `minimatch.levelTwoFileOptimize()` or similar. + +### platform + +When set to `win32`, this will trigger all windows-specific +behaviors (special handling for UNC paths, and treating `\` as +separators in file paths for comparison.) + +Defaults to the value of `process.platform`. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a +worthwhile goal, some discrepancies exist between minimatch and +other implementations. Some are intentional, and some are +unavoidable. + +If the pattern starts with a `!` character, then it is negated. Set the +`nonegate` flag to suppress this behavior, and treat leading `!` +characters normally. This is perhaps relevant if you wish to start the +pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` +characters at the start of a pattern will negate the pattern multiple +times. + +If a pattern starts with `#`, then it is treated as a comment, and +will not match anything. Use `\#` to match a literal `#` at the +start of a line, or set the `nocomment` flag to suppress this behavior. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.1, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then minimatch.match returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. + +Negated extglob patterns are handled as closely as possible to +Bash semantics, but there are some cases with negative extglobs +which are exceedingly difficult to express in a JavaScript +regular expression. In particular the negated pattern +`!(*|)*` will in bash match anything that does +not start with ``. However, +`!(*)*` _will_ match paths starting with +``, because the empty string can match against +the negated portion. In this library, `!(*|)*` +will _not_ match any pattern starting with ``, due to a +difference in precisely which patterns are considered "greedy" in +Regular Expressions vs bash path expansion. This may be fixable, +but not without incurring some complexity and performance costs, +and the trade-off seems to not be worth pursuing. + +Note that `fnmatch(3)` in libc is an extremely naive string comparison +matcher, which does not do anything special for slashes. This library is +designed to be used in glob searching and file walkers, and so it does do +special things with `/`. Thus, `foo*` will not match `foo/bar` in this +library, even though it would in `fnmatch(3)`. diff --git a/node_modules/minimatch/package.json b/node_modules/minimatch/package.json new file mode 100644 index 0000000..01fc48e --- /dev/null +++ b/node_modules/minimatch/package.json @@ -0,0 +1,82 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "minimatch", + "description": "a glob matcher in javascript", + "version": "9.0.5", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "prepare": "tshy", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "tap", + "snap": "tap", + "format": "prettier --write . --loglevel warn", + "benchmark": "node benchmark/index.js", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "devDependencies": { + "@types/brace-expansion": "^1.1.0", + "@types/node": "^18.15.11", + "@types/tap": "^15.0.8", + "eslint-config-prettier": "^8.6.0", + "mkdirp": "1", + "prettier": "^2.8.2", + "tap": "^18.7.2", + "ts-node": "^10.9.1", + "tshy": "^1.12.0", + "typedoc": "^0.23.21", + "typescript": "^4.9.3" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "license": "ISC", + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "type": "module" +} diff --git a/node_modules/punycode.js/LICENSE-MIT.txt b/node_modules/punycode.js/LICENSE-MIT.txt new file mode 100644 index 0000000..a41e0a7 --- /dev/null +++ b/node_modules/punycode.js/LICENSE-MIT.txt @@ -0,0 +1,20 @@ +Copyright Mathias Bynens + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/punycode.js/README.md b/node_modules/punycode.js/README.md new file mode 100644 index 0000000..f611016 --- /dev/null +++ b/node_modules/punycode.js/README.md @@ -0,0 +1,148 @@ +# Punycode.js [![punycode on npm](https://img.shields.io/npm/v/punycode)](https://www.npmjs.com/package/punycode) [![](https://data.jsdelivr.com/v1/package/npm/punycode/badge)](https://www.jsdelivr.com/package/npm/punycode) + +Punycode.js is a robust Punycode converter that fully complies to [RFC 3492](https://tools.ietf.org/html/rfc3492) and [RFC 5891](https://tools.ietf.org/html/rfc5891). + +This JavaScript library is the result of comparing, optimizing and documenting different open-source implementations of the Punycode algorithm: + +* [The C example code from RFC 3492](https://tools.ietf.org/html/rfc3492#appendix-C) +* [`punycode.c` by _Markus W. Scherer_ (IBM)](http://opensource.apple.com/source/ICU/ICU-400.42/icuSources/common/punycode.c) +* [`punycode.c` by _Ben Noordhuis_](https://github.com/bnoordhuis/punycode/blob/master/punycode.c) +* [JavaScript implementation by _some_](http://stackoverflow.com/questions/183485/can-anyone-recommend-a-good-free-javascript-for-punycode-to-unicode-conversion/301287#301287) +* [`punycode.js` by _Ben Noordhuis_](https://github.com/joyent/node/blob/426298c8c1c0d5b5224ac3658c41e7c2a3fe9377/lib/punycode.js) (note: [not fully compliant](https://github.com/joyent/node/issues/2072)) + +This project was [bundled](https://github.com/joyent/node/blob/master/lib/punycode.js) with Node.js from [v0.6.2+](https://github.com/joyent/node/compare/975f1930b1...61e796decc) until [v7](https://github.com/nodejs/node/pull/7941) (soft-deprecated). + +This project provides a CommonJS module that uses ES2015+ features and JavaScript module, which work in modern Node.js versions and browsers. For the old Punycode.js version that offers the same functionality in a UMD build with support for older pre-ES2015 runtimes, including Rhino, Ringo, and Narwhal, see [v1.4.1](https://github.com/mathiasbynens/punycode.js/releases/tag/v1.4.1). + +## Installation + +Via [npm](https://www.npmjs.com/): + +```bash +npm install punycode --save +``` + +In [Node.js](https://nodejs.org/): + +> ⚠️ Note that userland modules don't hide core modules. +> For example, `require('punycode')` still imports the deprecated core module even if you executed `npm install punycode`. +> Use `require('punycode/')` to import userland modules rather than core modules. + +```js +const punycode = require('punycode/'); +``` + +## API + +### `punycode.decode(string)` + +Converts a Punycode string of ASCII symbols to a string of Unicode symbols. + +```js +// decode domain name parts +punycode.decode('maana-pta'); // 'mañana' +punycode.decode('--dqo34k'); // '☃-⌘' +``` + +### `punycode.encode(string)` + +Converts a string of Unicode symbols to a Punycode string of ASCII symbols. + +```js +// encode domain name parts +punycode.encode('mañana'); // 'maana-pta' +punycode.encode('☃-⌘'); // '--dqo34k' +``` + +### `punycode.toUnicode(input)` + +Converts a Punycode string representing a domain name or an email address to Unicode. Only the Punycoded parts of the input will be converted, i.e. it doesn’t matter if you call it on a string that has already been converted to Unicode. + +```js +// decode domain names +punycode.toUnicode('xn--maana-pta.com'); +// → 'mañana.com' +punycode.toUnicode('xn----dqo34k.com'); +// → '☃-⌘.com' + +// decode email addresses +punycode.toUnicode('джумла@xn--p-8sbkgc5ag7bhce.xn--ba-lmcq'); +// → 'джумла@джpумлатест.bрфa' +``` + +### `punycode.toASCII(input)` + +Converts a lowercased Unicode string representing a domain name or an email address to Punycode. Only the non-ASCII parts of the input will be converted, i.e. it doesn’t matter if you call it with a domain that’s already in ASCII. + +```js +// encode domain names +punycode.toASCII('mañana.com'); +// → 'xn--maana-pta.com' +punycode.toASCII('☃-⌘.com'); +// → 'xn----dqo34k.com' + +// encode email addresses +punycode.toASCII('джумла@джpумлатест.bрфa'); +// → 'джумла@xn--p-8sbkgc5ag7bhce.xn--ba-lmcq' +``` + +### `punycode.ucs2` + +#### `punycode.ucs2.decode(string)` + +Creates an array containing the numeric code point values of each Unicode symbol in the string. While [JavaScript uses UCS-2 internally](https://mathiasbynens.be/notes/javascript-encoding), this function will convert a pair of surrogate halves (each of which UCS-2 exposes as separate characters) into a single code point, matching UTF-16. + +```js +punycode.ucs2.decode('abc'); +// → [0x61, 0x62, 0x63] +// surrogate pair for U+1D306 TETRAGRAM FOR CENTRE: +punycode.ucs2.decode('\uD834\uDF06'); +// → [0x1D306] +``` + +#### `punycode.ucs2.encode(codePoints)` + +Creates a string based on an array of numeric code point values. + +```js +punycode.ucs2.encode([0x61, 0x62, 0x63]); +// → 'abc' +punycode.ucs2.encode([0x1D306]); +// → '\uD834\uDF06' +``` + +### `punycode.version` + +A string representing the current Punycode.js version number. + +## For maintainers + +### How to publish a new release + +1. On the `main` branch, bump the version number in `package.json`: + + ```sh + npm version patch -m 'Release v%s' + ``` + + Instead of `patch`, use `minor` or `major` [as needed](https://semver.org/). + + Note that this produces a Git commit + tag. + +1. Push the release commit and tag: + + ```sh + git push && git push --tags + ``` + + Our CI then automatically publishes the new release to npm, under both the [`punycode`](https://www.npmjs.com/package/punycode) and [`punycode.js`](https://www.npmjs.com/package/punycode.js) names. + +## Author + +| [![twitter/mathias](https://gravatar.com/avatar/24e08a9ea84deb17ae121074d0f17125?s=70)](https://twitter.com/mathias "Follow @mathias on Twitter") | +|---| +| [Mathias Bynens](https://mathiasbynens.be/) | + +## License + +Punycode.js is available under the [MIT](https://mths.be/mit) license. diff --git a/node_modules/punycode.js/package.json b/node_modules/punycode.js/package.json new file mode 100644 index 0000000..7794798 --- /dev/null +++ b/node_modules/punycode.js/package.json @@ -0,0 +1,58 @@ +{ + "name": "punycode.js", + "version": "2.3.1", + "description": "A robust Punycode converter that fully complies to RFC 3492 and RFC 5891, and works on nearly all JavaScript platforms.", + "homepage": "https://mths.be/punycode", + "main": "punycode.js", + "jsnext:main": "punycode.es6.js", + "module": "punycode.es6.js", + "engines": { + "node": ">=6" + }, + "keywords": [ + "punycode", + "unicode", + "idn", + "idna", + "dns", + "url", + "domain" + ], + "license": "MIT", + "author": { + "name": "Mathias Bynens", + "url": "https://mathiasbynens.be/" + }, + "contributors": [ + { + "name": "Mathias Bynens", + "url": "https://mathiasbynens.be/" + } + ], + "repository": { + "type": "git", + "url": "https://github.com/mathiasbynens/punycode.js.git" + }, + "bugs": "https://github.com/mathiasbynens/punycode.js/issues", + "files": [ + "LICENSE-MIT.txt", + "punycode.js", + "punycode.es6.js" + ], + "scripts": { + "test": "mocha tests", + "build": "node scripts/prepublish.js" + }, + "devDependencies": { + "codecov": "^3.8.3", + "nyc": "^15.1.0", + "mocha": "^10.2.0" + }, + "jspm": { + "map": { + "./punycode.js": { + "node": "@node/punycode" + } + } + } +} diff --git a/node_modules/punycode.js/punycode.es6.js b/node_modules/punycode.js/punycode.es6.js new file mode 100644 index 0000000..dadece2 --- /dev/null +++ b/node_modules/punycode.js/punycode.es6.js @@ -0,0 +1,444 @@ +'use strict'; + +/** Highest positive signed 32-bit float value */ +const maxInt = 2147483647; // aka. 0x7FFFFFFF or 2^31-1 + +/** Bootstring parameters */ +const base = 36; +const tMin = 1; +const tMax = 26; +const skew = 38; +const damp = 700; +const initialBias = 72; +const initialN = 128; // 0x80 +const delimiter = '-'; // '\x2D' + +/** Regular expressions */ +const regexPunycode = /^xn--/; +const regexNonASCII = /[^\0-\x7F]/; // Note: U+007F DEL is excluded too. +const regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g; // RFC 3490 separators + +/** Error messages */ +const errors = { + 'overflow': 'Overflow: input needs wider integers to process', + 'not-basic': 'Illegal input >= 0x80 (not a basic code point)', + 'invalid-input': 'Invalid input' +}; + +/** Convenience shortcuts */ +const baseMinusTMin = base - tMin; +const floor = Math.floor; +const stringFromCharCode = String.fromCharCode; + +/*--------------------------------------------------------------------------*/ + +/** + * A generic error utility function. + * @private + * @param {String} type The error type. + * @returns {Error} Throws a `RangeError` with the applicable error message. + */ +function error(type) { + throw new RangeError(errors[type]); +} + +/** + * A generic `Array#map` utility function. + * @private + * @param {Array} array The array to iterate over. + * @param {Function} callback The function that gets called for every array + * item. + * @returns {Array} A new array of values returned by the callback function. + */ +function map(array, callback) { + const result = []; + let length = array.length; + while (length--) { + result[length] = callback(array[length]); + } + return result; +} + +/** + * A simple `Array#map`-like wrapper to work with domain name strings or email + * addresses. + * @private + * @param {String} domain The domain name or email address. + * @param {Function} callback The function that gets called for every + * character. + * @returns {String} A new string of characters returned by the callback + * function. + */ +function mapDomain(domain, callback) { + const parts = domain.split('@'); + let result = ''; + if (parts.length > 1) { + // In email addresses, only the domain name should be punycoded. Leave + // the local part (i.e. everything up to `@`) intact. + result = parts[0] + '@'; + domain = parts[1]; + } + // Avoid `split(regex)` for IE8 compatibility. See #17. + domain = domain.replace(regexSeparators, '\x2E'); + const labels = domain.split('.'); + const encoded = map(labels, callback).join('.'); + return result + encoded; +} + +/** + * Creates an array containing the numeric code points of each Unicode + * character in the string. While JavaScript uses UCS-2 internally, + * this function will convert a pair of surrogate halves (each of which + * UCS-2 exposes as separate characters) into a single code point, + * matching UTF-16. + * @see `punycode.ucs2.encode` + * @see + * @memberOf punycode.ucs2 + * @name decode + * @param {String} string The Unicode input string (UCS-2). + * @returns {Array} The new array of code points. + */ +function ucs2decode(string) { + const output = []; + let counter = 0; + const length = string.length; + while (counter < length) { + const value = string.charCodeAt(counter++); + if (value >= 0xD800 && value <= 0xDBFF && counter < length) { + // It's a high surrogate, and there is a next character. + const extra = string.charCodeAt(counter++); + if ((extra & 0xFC00) == 0xDC00) { // Low surrogate. + output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000); + } else { + // It's an unmatched surrogate; only append this code unit, in case the + // next code unit is the high surrogate of a surrogate pair. + output.push(value); + counter--; + } + } else { + output.push(value); + } + } + return output; +} + +/** + * Creates a string based on an array of numeric code points. + * @see `punycode.ucs2.decode` + * @memberOf punycode.ucs2 + * @name encode + * @param {Array} codePoints The array of numeric code points. + * @returns {String} The new Unicode string (UCS-2). + */ +const ucs2encode = codePoints => String.fromCodePoint(...codePoints); + +/** + * Converts a basic code point into a digit/integer. + * @see `digitToBasic()` + * @private + * @param {Number} codePoint The basic numeric code point value. + * @returns {Number} The numeric value of a basic code point (for use in + * representing integers) in the range `0` to `base - 1`, or `base` if + * the code point does not represent a value. + */ +const basicToDigit = function(codePoint) { + if (codePoint >= 0x30 && codePoint < 0x3A) { + return 26 + (codePoint - 0x30); + } + if (codePoint >= 0x41 && codePoint < 0x5B) { + return codePoint - 0x41; + } + if (codePoint >= 0x61 && codePoint < 0x7B) { + return codePoint - 0x61; + } + return base; +}; + +/** + * Converts a digit/integer into a basic code point. + * @see `basicToDigit()` + * @private + * @param {Number} digit The numeric value of a basic code point. + * @returns {Number} The basic code point whose value (when used for + * representing integers) is `digit`, which needs to be in the range + * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is + * used; else, the lowercase form is used. The behavior is undefined + * if `flag` is non-zero and `digit` has no uppercase form. + */ +const digitToBasic = function(digit, flag) { + // 0..25 map to ASCII a..z or A..Z + // 26..35 map to ASCII 0..9 + return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5); +}; + +/** + * Bias adaptation function as per section 3.4 of RFC 3492. + * https://tools.ietf.org/html/rfc3492#section-3.4 + * @private + */ +const adapt = function(delta, numPoints, firstTime) { + let k = 0; + delta = firstTime ? floor(delta / damp) : delta >> 1; + delta += floor(delta / numPoints); + for (/* no initialization */; delta > baseMinusTMin * tMax >> 1; k += base) { + delta = floor(delta / baseMinusTMin); + } + return floor(k + (baseMinusTMin + 1) * delta / (delta + skew)); +}; + +/** + * Converts a Punycode string of ASCII-only symbols to a string of Unicode + * symbols. + * @memberOf punycode + * @param {String} input The Punycode string of ASCII-only symbols. + * @returns {String} The resulting string of Unicode symbols. + */ +const decode = function(input) { + // Don't use UCS-2. + const output = []; + const inputLength = input.length; + let i = 0; + let n = initialN; + let bias = initialBias; + + // Handle the basic code points: let `basic` be the number of input code + // points before the last delimiter, or `0` if there is none, then copy + // the first basic code points to the output. + + let basic = input.lastIndexOf(delimiter); + if (basic < 0) { + basic = 0; + } + + for (let j = 0; j < basic; ++j) { + // if it's not a basic code point + if (input.charCodeAt(j) >= 0x80) { + error('not-basic'); + } + output.push(input.charCodeAt(j)); + } + + // Main decoding loop: start just after the last delimiter if any basic code + // points were copied; start at the beginning otherwise. + + for (let index = basic > 0 ? basic + 1 : 0; index < inputLength; /* no final expression */) { + + // `index` is the index of the next character to be consumed. + // Decode a generalized variable-length integer into `delta`, + // which gets added to `i`. The overflow checking is easier + // if we increase `i` as we go, then subtract off its starting + // value at the end to obtain `delta`. + const oldi = i; + for (let w = 1, k = base; /* no condition */; k += base) { + + if (index >= inputLength) { + error('invalid-input'); + } + + const digit = basicToDigit(input.charCodeAt(index++)); + + if (digit >= base) { + error('invalid-input'); + } + if (digit > floor((maxInt - i) / w)) { + error('overflow'); + } + + i += digit * w; + const t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + + if (digit < t) { + break; + } + + const baseMinusT = base - t; + if (w > floor(maxInt / baseMinusT)) { + error('overflow'); + } + + w *= baseMinusT; + + } + + const out = output.length + 1; + bias = adapt(i - oldi, out, oldi == 0); + + // `i` was supposed to wrap around from `out` to `0`, + // incrementing `n` each time, so we'll fix that now: + if (floor(i / out) > maxInt - n) { + error('overflow'); + } + + n += floor(i / out); + i %= out; + + // Insert `n` at position `i` of the output. + output.splice(i++, 0, n); + + } + + return String.fromCodePoint(...output); +}; + +/** + * Converts a string of Unicode symbols (e.g. a domain name label) to a + * Punycode string of ASCII-only symbols. + * @memberOf punycode + * @param {String} input The string of Unicode symbols. + * @returns {String} The resulting Punycode string of ASCII-only symbols. + */ +const encode = function(input) { + const output = []; + + // Convert the input in UCS-2 to an array of Unicode code points. + input = ucs2decode(input); + + // Cache the length. + const inputLength = input.length; + + // Initialize the state. + let n = initialN; + let delta = 0; + let bias = initialBias; + + // Handle the basic code points. + for (const currentValue of input) { + if (currentValue < 0x80) { + output.push(stringFromCharCode(currentValue)); + } + } + + const basicLength = output.length; + let handledCPCount = basicLength; + + // `handledCPCount` is the number of code points that have been handled; + // `basicLength` is the number of basic code points. + + // Finish the basic string with a delimiter unless it's empty. + if (basicLength) { + output.push(delimiter); + } + + // Main encoding loop: + while (handledCPCount < inputLength) { + + // All non-basic code points < n have been handled already. Find the next + // larger one: + let m = maxInt; + for (const currentValue of input) { + if (currentValue >= n && currentValue < m) { + m = currentValue; + } + } + + // Increase `delta` enough to advance the decoder's state to , + // but guard against overflow. + const handledCPCountPlusOne = handledCPCount + 1; + if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) { + error('overflow'); + } + + delta += (m - n) * handledCPCountPlusOne; + n = m; + + for (const currentValue of input) { + if (currentValue < n && ++delta > maxInt) { + error('overflow'); + } + if (currentValue === n) { + // Represent delta as a generalized variable-length integer. + let q = delta; + for (let k = base; /* no condition */; k += base) { + const t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + if (q < t) { + break; + } + const qMinusT = q - t; + const baseMinusT = base - t; + output.push( + stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0)) + ); + q = floor(qMinusT / baseMinusT); + } + + output.push(stringFromCharCode(digitToBasic(q, 0))); + bias = adapt(delta, handledCPCountPlusOne, handledCPCount === basicLength); + delta = 0; + ++handledCPCount; + } + } + + ++delta; + ++n; + + } + return output.join(''); +}; + +/** + * Converts a Punycode string representing a domain name or an email address + * to Unicode. Only the Punycoded parts of the input will be converted, i.e. + * it doesn't matter if you call it on a string that has already been + * converted to Unicode. + * @memberOf punycode + * @param {String} input The Punycoded domain name or email address to + * convert to Unicode. + * @returns {String} The Unicode representation of the given Punycode + * string. + */ +const toUnicode = function(input) { + return mapDomain(input, function(string) { + return regexPunycode.test(string) + ? decode(string.slice(4).toLowerCase()) + : string; + }); +}; + +/** + * Converts a Unicode string representing a domain name or an email address to + * Punycode. Only the non-ASCII parts of the domain name will be converted, + * i.e. it doesn't matter if you call it with a domain that's already in + * ASCII. + * @memberOf punycode + * @param {String} input The domain name or email address to convert, as a + * Unicode string. + * @returns {String} The Punycode representation of the given domain name or + * email address. + */ +const toASCII = function(input) { + return mapDomain(input, function(string) { + return regexNonASCII.test(string) + ? 'xn--' + encode(string) + : string; + }); +}; + +/*--------------------------------------------------------------------------*/ + +/** Define the public API */ +const punycode = { + /** + * A string representing the current Punycode.js version number. + * @memberOf punycode + * @type String + */ + 'version': '2.3.1', + /** + * An object of methods to convert from JavaScript's internal character + * representation (UCS-2) to Unicode code points, and back. + * @see + * @memberOf punycode + * @type Object + */ + 'ucs2': { + 'decode': ucs2decode, + 'encode': ucs2encode + }, + 'decode': decode, + 'encode': encode, + 'toASCII': toASCII, + 'toUnicode': toUnicode +}; + +export { ucs2decode, ucs2encode, decode, encode, toASCII, toUnicode }; +export default punycode; diff --git a/node_modules/punycode.js/punycode.js b/node_modules/punycode.js/punycode.js new file mode 100644 index 0000000..a1ef251 --- /dev/null +++ b/node_modules/punycode.js/punycode.js @@ -0,0 +1,443 @@ +'use strict'; + +/** Highest positive signed 32-bit float value */ +const maxInt = 2147483647; // aka. 0x7FFFFFFF or 2^31-1 + +/** Bootstring parameters */ +const base = 36; +const tMin = 1; +const tMax = 26; +const skew = 38; +const damp = 700; +const initialBias = 72; +const initialN = 128; // 0x80 +const delimiter = '-'; // '\x2D' + +/** Regular expressions */ +const regexPunycode = /^xn--/; +const regexNonASCII = /[^\0-\x7F]/; // Note: U+007F DEL is excluded too. +const regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g; // RFC 3490 separators + +/** Error messages */ +const errors = { + 'overflow': 'Overflow: input needs wider integers to process', + 'not-basic': 'Illegal input >= 0x80 (not a basic code point)', + 'invalid-input': 'Invalid input' +}; + +/** Convenience shortcuts */ +const baseMinusTMin = base - tMin; +const floor = Math.floor; +const stringFromCharCode = String.fromCharCode; + +/*--------------------------------------------------------------------------*/ + +/** + * A generic error utility function. + * @private + * @param {String} type The error type. + * @returns {Error} Throws a `RangeError` with the applicable error message. + */ +function error(type) { + throw new RangeError(errors[type]); +} + +/** + * A generic `Array#map` utility function. + * @private + * @param {Array} array The array to iterate over. + * @param {Function} callback The function that gets called for every array + * item. + * @returns {Array} A new array of values returned by the callback function. + */ +function map(array, callback) { + const result = []; + let length = array.length; + while (length--) { + result[length] = callback(array[length]); + } + return result; +} + +/** + * A simple `Array#map`-like wrapper to work with domain name strings or email + * addresses. + * @private + * @param {String} domain The domain name or email address. + * @param {Function} callback The function that gets called for every + * character. + * @returns {String} A new string of characters returned by the callback + * function. + */ +function mapDomain(domain, callback) { + const parts = domain.split('@'); + let result = ''; + if (parts.length > 1) { + // In email addresses, only the domain name should be punycoded. Leave + // the local part (i.e. everything up to `@`) intact. + result = parts[0] + '@'; + domain = parts[1]; + } + // Avoid `split(regex)` for IE8 compatibility. See #17. + domain = domain.replace(regexSeparators, '\x2E'); + const labels = domain.split('.'); + const encoded = map(labels, callback).join('.'); + return result + encoded; +} + +/** + * Creates an array containing the numeric code points of each Unicode + * character in the string. While JavaScript uses UCS-2 internally, + * this function will convert a pair of surrogate halves (each of which + * UCS-2 exposes as separate characters) into a single code point, + * matching UTF-16. + * @see `punycode.ucs2.encode` + * @see + * @memberOf punycode.ucs2 + * @name decode + * @param {String} string The Unicode input string (UCS-2). + * @returns {Array} The new array of code points. + */ +function ucs2decode(string) { + const output = []; + let counter = 0; + const length = string.length; + while (counter < length) { + const value = string.charCodeAt(counter++); + if (value >= 0xD800 && value <= 0xDBFF && counter < length) { + // It's a high surrogate, and there is a next character. + const extra = string.charCodeAt(counter++); + if ((extra & 0xFC00) == 0xDC00) { // Low surrogate. + output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000); + } else { + // It's an unmatched surrogate; only append this code unit, in case the + // next code unit is the high surrogate of a surrogate pair. + output.push(value); + counter--; + } + } else { + output.push(value); + } + } + return output; +} + +/** + * Creates a string based on an array of numeric code points. + * @see `punycode.ucs2.decode` + * @memberOf punycode.ucs2 + * @name encode + * @param {Array} codePoints The array of numeric code points. + * @returns {String} The new Unicode string (UCS-2). + */ +const ucs2encode = codePoints => String.fromCodePoint(...codePoints); + +/** + * Converts a basic code point into a digit/integer. + * @see `digitToBasic()` + * @private + * @param {Number} codePoint The basic numeric code point value. + * @returns {Number} The numeric value of a basic code point (for use in + * representing integers) in the range `0` to `base - 1`, or `base` if + * the code point does not represent a value. + */ +const basicToDigit = function(codePoint) { + if (codePoint >= 0x30 && codePoint < 0x3A) { + return 26 + (codePoint - 0x30); + } + if (codePoint >= 0x41 && codePoint < 0x5B) { + return codePoint - 0x41; + } + if (codePoint >= 0x61 && codePoint < 0x7B) { + return codePoint - 0x61; + } + return base; +}; + +/** + * Converts a digit/integer into a basic code point. + * @see `basicToDigit()` + * @private + * @param {Number} digit The numeric value of a basic code point. + * @returns {Number} The basic code point whose value (when used for + * representing integers) is `digit`, which needs to be in the range + * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is + * used; else, the lowercase form is used. The behavior is undefined + * if `flag` is non-zero and `digit` has no uppercase form. + */ +const digitToBasic = function(digit, flag) { + // 0..25 map to ASCII a..z or A..Z + // 26..35 map to ASCII 0..9 + return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5); +}; + +/** + * Bias adaptation function as per section 3.4 of RFC 3492. + * https://tools.ietf.org/html/rfc3492#section-3.4 + * @private + */ +const adapt = function(delta, numPoints, firstTime) { + let k = 0; + delta = firstTime ? floor(delta / damp) : delta >> 1; + delta += floor(delta / numPoints); + for (/* no initialization */; delta > baseMinusTMin * tMax >> 1; k += base) { + delta = floor(delta / baseMinusTMin); + } + return floor(k + (baseMinusTMin + 1) * delta / (delta + skew)); +}; + +/** + * Converts a Punycode string of ASCII-only symbols to a string of Unicode + * symbols. + * @memberOf punycode + * @param {String} input The Punycode string of ASCII-only symbols. + * @returns {String} The resulting string of Unicode symbols. + */ +const decode = function(input) { + // Don't use UCS-2. + const output = []; + const inputLength = input.length; + let i = 0; + let n = initialN; + let bias = initialBias; + + // Handle the basic code points: let `basic` be the number of input code + // points before the last delimiter, or `0` if there is none, then copy + // the first basic code points to the output. + + let basic = input.lastIndexOf(delimiter); + if (basic < 0) { + basic = 0; + } + + for (let j = 0; j < basic; ++j) { + // if it's not a basic code point + if (input.charCodeAt(j) >= 0x80) { + error('not-basic'); + } + output.push(input.charCodeAt(j)); + } + + // Main decoding loop: start just after the last delimiter if any basic code + // points were copied; start at the beginning otherwise. + + for (let index = basic > 0 ? basic + 1 : 0; index < inputLength; /* no final expression */) { + + // `index` is the index of the next character to be consumed. + // Decode a generalized variable-length integer into `delta`, + // which gets added to `i`. The overflow checking is easier + // if we increase `i` as we go, then subtract off its starting + // value at the end to obtain `delta`. + const oldi = i; + for (let w = 1, k = base; /* no condition */; k += base) { + + if (index >= inputLength) { + error('invalid-input'); + } + + const digit = basicToDigit(input.charCodeAt(index++)); + + if (digit >= base) { + error('invalid-input'); + } + if (digit > floor((maxInt - i) / w)) { + error('overflow'); + } + + i += digit * w; + const t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + + if (digit < t) { + break; + } + + const baseMinusT = base - t; + if (w > floor(maxInt / baseMinusT)) { + error('overflow'); + } + + w *= baseMinusT; + + } + + const out = output.length + 1; + bias = adapt(i - oldi, out, oldi == 0); + + // `i` was supposed to wrap around from `out` to `0`, + // incrementing `n` each time, so we'll fix that now: + if (floor(i / out) > maxInt - n) { + error('overflow'); + } + + n += floor(i / out); + i %= out; + + // Insert `n` at position `i` of the output. + output.splice(i++, 0, n); + + } + + return String.fromCodePoint(...output); +}; + +/** + * Converts a string of Unicode symbols (e.g. a domain name label) to a + * Punycode string of ASCII-only symbols. + * @memberOf punycode + * @param {String} input The string of Unicode symbols. + * @returns {String} The resulting Punycode string of ASCII-only symbols. + */ +const encode = function(input) { + const output = []; + + // Convert the input in UCS-2 to an array of Unicode code points. + input = ucs2decode(input); + + // Cache the length. + const inputLength = input.length; + + // Initialize the state. + let n = initialN; + let delta = 0; + let bias = initialBias; + + // Handle the basic code points. + for (const currentValue of input) { + if (currentValue < 0x80) { + output.push(stringFromCharCode(currentValue)); + } + } + + const basicLength = output.length; + let handledCPCount = basicLength; + + // `handledCPCount` is the number of code points that have been handled; + // `basicLength` is the number of basic code points. + + // Finish the basic string with a delimiter unless it's empty. + if (basicLength) { + output.push(delimiter); + } + + // Main encoding loop: + while (handledCPCount < inputLength) { + + // All non-basic code points < n have been handled already. Find the next + // larger one: + let m = maxInt; + for (const currentValue of input) { + if (currentValue >= n && currentValue < m) { + m = currentValue; + } + } + + // Increase `delta` enough to advance the decoder's state to , + // but guard against overflow. + const handledCPCountPlusOne = handledCPCount + 1; + if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) { + error('overflow'); + } + + delta += (m - n) * handledCPCountPlusOne; + n = m; + + for (const currentValue of input) { + if (currentValue < n && ++delta > maxInt) { + error('overflow'); + } + if (currentValue === n) { + // Represent delta as a generalized variable-length integer. + let q = delta; + for (let k = base; /* no condition */; k += base) { + const t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + if (q < t) { + break; + } + const qMinusT = q - t; + const baseMinusT = base - t; + output.push( + stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0)) + ); + q = floor(qMinusT / baseMinusT); + } + + output.push(stringFromCharCode(digitToBasic(q, 0))); + bias = adapt(delta, handledCPCountPlusOne, handledCPCount === basicLength); + delta = 0; + ++handledCPCount; + } + } + + ++delta; + ++n; + + } + return output.join(''); +}; + +/** + * Converts a Punycode string representing a domain name or an email address + * to Unicode. Only the Punycoded parts of the input will be converted, i.e. + * it doesn't matter if you call it on a string that has already been + * converted to Unicode. + * @memberOf punycode + * @param {String} input The Punycoded domain name or email address to + * convert to Unicode. + * @returns {String} The Unicode representation of the given Punycode + * string. + */ +const toUnicode = function(input) { + return mapDomain(input, function(string) { + return regexPunycode.test(string) + ? decode(string.slice(4).toLowerCase()) + : string; + }); +}; + +/** + * Converts a Unicode string representing a domain name or an email address to + * Punycode. Only the non-ASCII parts of the domain name will be converted, + * i.e. it doesn't matter if you call it with a domain that's already in + * ASCII. + * @memberOf punycode + * @param {String} input The domain name or email address to convert, as a + * Unicode string. + * @returns {String} The Punycode representation of the given domain name or + * email address. + */ +const toASCII = function(input) { + return mapDomain(input, function(string) { + return regexNonASCII.test(string) + ? 'xn--' + encode(string) + : string; + }); +}; + +/*--------------------------------------------------------------------------*/ + +/** Define the public API */ +const punycode = { + /** + * A string representing the current Punycode.js version number. + * @memberOf punycode + * @type String + */ + 'version': '2.3.1', + /** + * An object of methods to convert from JavaScript's internal character + * representation (UCS-2) to Unicode code points, and back. + * @see + * @memberOf punycode + * @type Object + */ + 'ucs2': { + 'decode': ucs2decode, + 'encode': ucs2encode + }, + 'decode': decode, + 'encode': encode, + 'toASCII': toASCII, + 'toUnicode': toUnicode +}; + +module.exports = punycode; diff --git a/node_modules/toml/.jshintrc b/node_modules/toml/.jshintrc new file mode 100644 index 0000000..96747b1 --- /dev/null +++ b/node_modules/toml/.jshintrc @@ -0,0 +1,18 @@ +{ + "node": true, + "browser": true, + "browserify": true, + "curly": true, + "eqeqeq": true, + "eqnull": false, + "latedef": "nofunc", + "newcap": true, + "noarg": true, + "undef": true, + "strict": true, + "trailing": true, + "smarttabs": true, + "indent": 2, + "quotmark": true, + "laxbreak": true +} diff --git a/node_modules/toml/.travis.yml b/node_modules/toml/.travis.yml new file mode 100644 index 0000000..f46aeb8 --- /dev/null +++ b/node_modules/toml/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +sudo: false +node_js: + - "4.1" + - "4.0" + - "0.12" + - "0.10" diff --git a/node_modules/toml/CHANGELOG.md b/node_modules/toml/CHANGELOG.md new file mode 100644 index 0000000..65b4db6 --- /dev/null +++ b/node_modules/toml/CHANGELOG.md @@ -0,0 +1,116 @@ +2.3.0 - July 13 2015 +==================== + +* Correctly handle quoted keys ([#21](https://github.com/BinaryMuse/toml-node/issues/21)) + +2.2.3 - June 8 2015 +=================== + +* Support empty inline tables ([#24](https://github.com/BinaryMuse/toml-node/issues/24)) +* Do not allow implicit table definitions to replace value ([#23](https://github.com/BinaryMuse/toml-node/issues/23)) +* Don't allow tables to replace inline tables ([#25](https://github.com/BinaryMuse/toml-node/issues/25)) + +2.2.2 - April 3 2015 +==================== + +* Correctly handle newlines at beginning of string ([#22](https://github.com/BinaryMuse/toml-node/issues/22)) + +2.2.1 - March 17 2015 +===================== + +* Parse dates generated by Date#toISOString() ([#20](https://github.com/BinaryMuse/toml-node/issues/20)) + +2.2.0 - Feb 26 2015 +=================== + +* Support TOML spec v0.4.0 + +2.1.0 - Jan 7 2015 +================== + +* Support TOML spec v0.3.1 + +2.0.6 - May 23 2014 +=================== + +### Bug Fixes + +* Fix support for empty arrays with newlines ([#13](https://github.com/BinaryMuse/toml-node/issues/13)) + +2.0.5 - May 5 2014 +================== + +### Bug Fixes + +* Fix loop iteration leak, by [sebmck](https://github.com/sebmck) ([#12](https://github.com/BinaryMuse/toml-node/pull/12)) + +### Development + +* Tests now run JSHint on `lib/compiler.js` + +2.0.4 - Mar 9 2014 +================== + +### Bug Fixes + +* Fix failure on duplicate table name inside table array ([#11](https://github.com/BinaryMuse/toml-node/issues/11)) + +2.0.2 - Feb 23 2014 +=================== + +### Bug Fixes + +* Fix absence of errors when table path starts or ends with period + +2.0.1 - Feb 23 2014 +=================== + +### Bug Fixes + +* Fix incorrect messaging in array type errors +* Fix missing error when overwriting key with table array + +2.0.0 - Feb 23 2014 +=================== + +### Features + +* Add support for [version 0.2 of the TOML spec](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md) ([#9](https://github.com/BinaryMuse/toml-node/issues/9)) + +### Internals + +* Upgrade to PEG.js v0.8 and rewrite compiler; parser is now considerably faster (from ~7000ms to ~1000ms to parse `example.toml` 1000 times on Node.js v0.10) + +1.0.4 - Aug 17 2013 +=================== + +### Bug Fixes + +* Fix support for empty arrays + +1.0.3 - Aug 17 2013 +=================== + +### Bug Fixes + +* Fix typo in array type error message +* Fix single-element arrays with no trailing commas + +1.0.2 - Aug 17 2013 +=================== + +### Bug Fixes + +* Fix errors on lines that contain only whitespace ([#7](https://github.com/BinaryMuse/toml-node/issues/7)) + +1.0.1 - Aug 17 2013 +=================== + +### Internals + +* Remove old code remaining from the remove streaming API + +1.0.0 - Aug 17 2013 +=================== + +Initial stable release diff --git a/node_modules/toml/LICENSE b/node_modules/toml/LICENSE new file mode 100644 index 0000000..44ae2bf --- /dev/null +++ b/node_modules/toml/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2012 Michelle Tilley + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/toml/README.md b/node_modules/toml/README.md new file mode 100644 index 0000000..ff4dc58 --- /dev/null +++ b/node_modules/toml/README.md @@ -0,0 +1,93 @@ +TOML Parser for Node.js +======================= + +[![Build Status](https://travis-ci.org/BinaryMuse/toml-node.png?branch=master)](https://travis-ci.org/BinaryMuse/toml-node) + +[![NPM](https://nodei.co/npm/toml.png?downloads=true)](https://nodei.co/npm/toml/) + +If you haven't heard of TOML, well you're just missing out. [Go check it out now.](https://github.com/mojombo/toml) Back? Good. + +TOML Spec Support +----------------- + +toml-node supports version 0.4.0 the TOML spec as specified by [mojombo/toml@v0.4.0](https://github.com/mojombo/toml/blob/master/versions/en/toml-v0.4.0.md) + +Installation +------------ + +toml-node is available via npm. + + npm install toml + +toml-node also works with browser module bundlers like Browserify and webpack. + +Usage +----- + +### Standalone + +Say you have some awesome TOML in a variable called `someTomlString`. Maybe it came from the web; maybe it came from a file; wherever it came from, it came asynchronously! Let's turn that sucker into a JavaScript object. + +```javascript +var toml = require('toml'); +var data = toml.parse(someTomlString); +console.dir(data); +``` + +`toml.parse` throws an exception in the case of a parsing error; such exceptions have a `line` and `column` property on them to help identify the offending text. + +```javascript +try { + toml.parse(someCrazyKnuckleHeadedTrblToml); +} catch (e) { + console.error("Parsing error on line " + e.line + ", column " + e.column + + ": " + e.message); +} +``` + +### Streaming + +As of toml-node version 1.0, the streaming interface has been removed. Instead, use a module like [concat-stream](https://npmjs.org/package/concat-stream): + +```javascript +var toml = require('toml'); +var concat = require('concat-stream'); +var fs = require('fs'); + +fs.createReadStream('tomlFile.toml', 'utf8').pipe(concat(function(data) { + var parsed = toml.parse(data); +})); +``` + +Thanks [@ForbesLindesay](https://github.com/ForbesLindesay) for the suggestion. + +### Requiring with Node.js + +You can use the [toml-require package](https://github.com/BinaryMuse/toml-require) to `require()` your `.toml` files with Node.js + +Live Demo +--------- + +You can experiment with TOML online at http://binarymuse.github.io/toml-node/, which uses the latest version of this library. + +Building & Testing +------------------ + +toml-node uses [the PEG.js parser generator](http://pegjs.majda.cz/). + + npm install + npm run build + npm test + +Any changes to `src/toml.peg` requires a regeneration of the parser with `npm run build`. + +toml-node is tested on Travis CI and is tested against: + + * Node 0.10 + * Node 0.12 + * Latest stable io.js + +License +------- + +toml-node is licensed under the MIT license agreement. See the LICENSE file for more information. diff --git a/node_modules/toml/benchmark.js b/node_modules/toml/benchmark.js new file mode 100644 index 0000000..99fba1d --- /dev/null +++ b/node_modules/toml/benchmark.js @@ -0,0 +1,12 @@ +var toml = require('./index'); +var fs = require('fs'); +var data = fs.readFileSync('./test/example.toml', 'utf8'); + +var iterations = 1000; + +var start = new Date(); +for(var i = 0; i < iterations; i++) { + toml.parse(data); +} +var end = new Date(); +console.log("%s iterations in %sms", iterations, end - start); diff --git a/node_modules/toml/index.d.ts b/node_modules/toml/index.d.ts new file mode 100644 index 0000000..7e9052b --- /dev/null +++ b/node_modules/toml/index.d.ts @@ -0,0 +1,3 @@ +declare module 'toml' { + export function parse(input: string): any; +} diff --git a/node_modules/toml/index.js b/node_modules/toml/index.js new file mode 100644 index 0000000..6caf44a --- /dev/null +++ b/node_modules/toml/index.js @@ -0,0 +1,9 @@ +var parser = require('./lib/parser'); +var compiler = require('./lib/compiler'); + +module.exports = { + parse: function(input) { + var nodes = parser.parse(input.toString()); + return compiler.compile(nodes); + } +}; diff --git a/node_modules/toml/package.json b/node_modules/toml/package.json new file mode 100644 index 0000000..186ad00 --- /dev/null +++ b/node_modules/toml/package.json @@ -0,0 +1,24 @@ +{ + "name": "toml", + "version": "3.0.0", + "description": "TOML parser for Node.js (parses TOML spec v0.4.0)", + "main": "index.js", + "types": "index.d.ts", + "scripts": { + "build": "pegjs --cache src/toml.pegjs lib/parser.js", + "test": "jshint lib/compiler.js && nodeunit test/test_*.js", + "prepublish": "npm run build" + }, + "repository": "git://github.com/BinaryMuse/toml-node.git", + "keywords": [ + "toml", + "parser" + ], + "author": "Michelle Tilley ", + "license": "MIT", + "devDependencies": { + "jshint": "*", + "nodeunit": "~0.9.0", + "pegjs": "~0.8.0" + } +} diff --git a/node_modules/toml/src/toml.pegjs b/node_modules/toml/src/toml.pegjs new file mode 100644 index 0000000..7051707 --- /dev/null +++ b/node_modules/toml/src/toml.pegjs @@ -0,0 +1,231 @@ +{ + var nodes = []; + + function genError(err, line, col) { + var ex = new Error(err); + ex.line = line; + ex.column = col; + throw ex; + } + + function addNode(node) { + nodes.push(node); + } + + function node(type, value, line, column, key) { + var obj = { type: type, value: value, line: line(), column: column() }; + if (key) obj.key = key; + return obj; + } + + function convertCodePoint(str, line, col) { + var num = parseInt("0x" + str); + + if ( + !isFinite(num) || + Math.floor(num) != num || + num < 0 || + num > 0x10FFFF || + (num > 0xD7FF && num < 0xE000) + ) { + genError("Invalid Unicode escape code: " + str, line, col); + } else { + return fromCodePoint(num); + } + } + + function fromCodePoint() { + var MAX_SIZE = 0x4000; + var codeUnits = []; + var highSurrogate; + var lowSurrogate; + var index = -1; + var length = arguments.length; + if (!length) { + return ''; + } + var result = ''; + while (++index < length) { + var codePoint = Number(arguments[index]); + if (codePoint <= 0xFFFF) { // BMP code point + codeUnits.push(codePoint); + } else { // Astral code point; split in surrogate halves + // http://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae + codePoint -= 0x10000; + highSurrogate = (codePoint >> 10) + 0xD800; + lowSurrogate = (codePoint % 0x400) + 0xDC00; + codeUnits.push(highSurrogate, lowSurrogate); + } + if (index + 1 == length || codeUnits.length > MAX_SIZE) { + result += String.fromCharCode.apply(null, codeUnits); + codeUnits.length = 0; + } + } + return result; + } +} + +start + = line* { return nodes } + +line + = S* expr:expression S* comment* (NL+ / EOF) + / S+ (NL+ / EOF) + / NL + +expression + = comment / path / tablearray / assignment + +comment + = '#' (!(NL / EOF) .)* + +path + = '[' S* name:table_key S* ']' { addNode(node('ObjectPath', name, line, column)) } + +tablearray + = '[' '[' S* name:table_key S* ']' ']' { addNode(node('ArrayPath', name, line, column)) } + +table_key + = parts:dot_ended_table_key_part+ name:table_key_part { return parts.concat(name) } + / name:table_key_part { return [name] } + +table_key_part + = S* name:key S* { return name } + / S* name:quoted_key S* { return name } + +dot_ended_table_key_part + = S* name:key S* '.' S* { return name } + / S* name:quoted_key S* '.' S* { return name } + +assignment + = key:key S* '=' S* value:value { addNode(node('Assign', value, line, column, key)) } + / key:quoted_key S* '=' S* value:value { addNode(node('Assign', value, line, column, key)) } + +key + = chars:ASCII_BASIC+ { return chars.join('') } + +quoted_key + = node:double_quoted_single_line_string { return node.value } + / node:single_quoted_single_line_string { return node.value } + +value + = string / datetime / float / integer / boolean / array / inline_table + +string + = double_quoted_multiline_string + / double_quoted_single_line_string + / single_quoted_multiline_string + / single_quoted_single_line_string + +double_quoted_multiline_string + = '"""' NL? chars:multiline_string_char* '"""' { return node('String', chars.join(''), line, column) } +double_quoted_single_line_string + = '"' chars:string_char* '"' { return node('String', chars.join(''), line, column) } +single_quoted_multiline_string + = "'''" NL? chars:multiline_literal_char* "'''" { return node('String', chars.join(''), line, column) } +single_quoted_single_line_string + = "'" chars:literal_char* "'" { return node('String', chars.join(''), line, column) } + +string_char + = ESCAPED / (!'"' char:. { return char }) + +literal_char + = (!"'" char:. { return char }) + +multiline_string_char + = ESCAPED / multiline_string_delim / (!'"""' char:. { return char}) + +multiline_string_delim + = '\\' NL NLS* { return '' } + +multiline_literal_char + = (!"'''" char:. { return char }) + +float + = left:(float_text / integer_text) ('e' / 'E') right:integer_text { return node('Float', parseFloat(left + 'e' + right), line, column) } + / text:float_text { return node('Float', parseFloat(text), line, column) } + +float_text + = '+'? digits:(DIGITS '.' DIGITS) { return digits.join('') } + / '-' digits:(DIGITS '.' DIGITS) { return '-' + digits.join('') } + +integer + = text:integer_text { return node('Integer', parseInt(text, 10), line, column) } + +integer_text + = '+'? digits:DIGIT+ !'.' { return digits.join('') } + / '-' digits:DIGIT+ !'.' { return '-' + digits.join('') } + +boolean + = 'true' { return node('Boolean', true, line, column) } + / 'false' { return node('Boolean', false, line, column) } + +array + = '[' array_sep* ']' { return node('Array', [], line, column) } + / '[' value:array_value? ']' { return node('Array', value ? [value] : [], line, column) } + / '[' values:array_value_list+ ']' { return node('Array', values, line, column) } + / '[' values:array_value_list+ value:array_value ']' { return node('Array', values.concat(value), line, column) } + +array_value + = array_sep* value:value array_sep* { return value } + +array_value_list + = array_sep* value:value array_sep* ',' array_sep* { return value } + +array_sep + = S / NL / comment + +inline_table + = '{' S* values:inline_table_assignment* S* '}' { return node('InlineTable', values, line, column) } + +inline_table_assignment + = S* key:key S* '=' S* value:value S* ',' S* { return node('InlineTableValue', value, line, column, key) } + / S* key:key S* '=' S* value:value { return node('InlineTableValue', value, line, column, key) } + +secfragment + = '.' digits:DIGITS { return "." + digits } + +date + = date:( + DIGIT DIGIT DIGIT DIGIT + '-' + DIGIT DIGIT + '-' + DIGIT DIGIT + ) { return date.join('') } + +time + = time:(DIGIT DIGIT ':' DIGIT DIGIT ':' DIGIT DIGIT secfragment?) { return time.join('') } + +time_with_offset + = time:( + DIGIT DIGIT ':' DIGIT DIGIT ':' DIGIT DIGIT secfragment? + ('-' / '+') + DIGIT DIGIT ':' DIGIT DIGIT + ) { return time.join('') } + +datetime + = date:date 'T' time:time 'Z' { return node('Date', new Date(date + "T" + time + "Z"), line, column) } + / date:date 'T' time:time_with_offset { return node('Date', new Date(date + "T" + time), line, column) } + + +S = [ \t] +NL = "\n" / "\r" "\n" +NLS = NL / S +EOF = !. +HEX = [0-9a-f]i +DIGIT = DIGIT_OR_UNDER +DIGIT_OR_UNDER = [0-9] + / '_' { return "" } +ASCII_BASIC = [A-Za-z0-9_\-] +DIGITS = d:DIGIT_OR_UNDER+ { return d.join('') } +ESCAPED = '\\"' { return '"' } + / '\\\\' { return '\\' } + / '\\b' { return '\b' } + / '\\t' { return '\t' } + / '\\n' { return '\n' } + / '\\f' { return '\f' } + / '\\r' { return '\r' } + / ESCAPED_UNICODE +ESCAPED_UNICODE = "\\U" digits:(HEX HEX HEX HEX HEX HEX HEX HEX) { return convertCodePoint(digits.join('')) } + / "\\u" digits:(HEX HEX HEX HEX) { return convertCodePoint(digits.join('')) } diff --git a/node_modules/toml/test/bad.toml b/node_modules/toml/test/bad.toml new file mode 100644 index 0000000..d51c3f3 --- /dev/null +++ b/node_modules/toml/test/bad.toml @@ -0,0 +1,5 @@ +[something] +awesome = "this is" + +[something.awesome] +this = "isn't" diff --git a/node_modules/toml/test/example.toml b/node_modules/toml/test/example.toml new file mode 100644 index 0000000..ea9dc35 --- /dev/null +++ b/node_modules/toml/test/example.toml @@ -0,0 +1,32 @@ +# This is a TOML document. Boom. + +title = "TOML Example" + +[owner] +name = "Tom Preston-Werner" +organization = "GitHub" +bio = "GitHub Cofounder & CEO\n\tLikes \"tater tots\" and beer and backslashes: \\" +dob = 1979-05-27T07:32:00Z # First class dates? Why not? + +[database] +server = "192.168.1.1" +ports = [ 8001, 8001, 8003 ] +connection_max = 5000 +connection_min = -2 # Don't ask me how +max_temp = 87.1 # It's a float +min_temp = -17.76 +enabled = true + +[servers] + + # You can indent as you please. Tabs or spaces. TOML don't care. + [servers.alpha] + ip = "10.0.0.1" + dc = "eqdc10" + + [servers.beta] + ip = "10.0.0.2" + dc = "eqdc10" + +[clients] +data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it diff --git a/node_modules/toml/test/hard_example.toml b/node_modules/toml/test/hard_example.toml new file mode 100644 index 0000000..38856c8 --- /dev/null +++ b/node_modules/toml/test/hard_example.toml @@ -0,0 +1,33 @@ +# Test file for TOML +# Only this one tries to emulate a TOML file written by a user of the kind of parser writers probably hate +# This part you'll really hate + +[the] +test_string = "You'll hate me after this - #" # " Annoying, isn't it? + + [the.hard] + test_array = [ "] ", " # "] # ] There you go, parse this! + test_array2 = [ "Test #11 ]proved that", "Experiment #9 was a success" ] + # You didn't think it'd as easy as chucking out the last #, did you? + another_test_string = " Same thing, but with a string #" + harder_test_string = " And when \"'s are in the string, along with # \"" # "and comments are there too" + # Things will get harder + + [the.hard."bit#"] + "what?" = "You don't think some user won't do that?" + multi_line_array = [ + "]", + # ] Oh yes I did + ] + +# Each of the following keygroups/key value pairs should produce an error. Uncomment to them to test + +#[error] if you didn't catch this, your parser is broken +#string = "Anything other than tabs, spaces and newline after a keygroup or key value pair has ended should produce an error unless it is a comment" like this +#array = [ +# "This might most likely happen in multiline arrays", +# Like here, +# "or here, +# and here" +# ] End of array comment, forgot the # +#number = 3.14 pi <--again forgot the # diff --git a/node_modules/toml/test/inline_tables.toml b/node_modules/toml/test/inline_tables.toml new file mode 100644 index 0000000..c91088e --- /dev/null +++ b/node_modules/toml/test/inline_tables.toml @@ -0,0 +1,10 @@ +name = { first = "Tom", last = "Preston-Werner" } +point = { x = 1, y = 2 } +nested = { x = { a = { b = 3 } } } + +points = [ { x = 1, y = 2, z = 3 }, + { x = 7, y = 8, z = 9 }, + { x = 2, y = 4, z = 8 } ] + +arrays = [ { x = [1, 2, 3], y = [4, 5, 6] }, + { x = [7, 8, 9], y = [0, 1, 2] } ] diff --git a/node_modules/toml/test/literal_strings.toml b/node_modules/toml/test/literal_strings.toml new file mode 100644 index 0000000..36772bb --- /dev/null +++ b/node_modules/toml/test/literal_strings.toml @@ -0,0 +1,5 @@ +# What you see is what you get. +winpath = 'C:\Users\nodejs\templates' +winpath2 = '\\ServerX\admin$\system32\' +quoted = 'Tom "Dubs" Preston-Werner' +regex = '<\i\c*\s*>' diff --git a/node_modules/toml/test/multiline_eat_whitespace.toml b/node_modules/toml/test/multiline_eat_whitespace.toml new file mode 100644 index 0000000..904c170 --- /dev/null +++ b/node_modules/toml/test/multiline_eat_whitespace.toml @@ -0,0 +1,15 @@ +# The following strings are byte-for-byte equivalent: +key1 = "The quick brown fox jumps over the lazy dog." + +key2 = """ +The quick brown \ + + + fox jumps over \ + the lazy dog.""" + +key3 = """\ + The quick brown \ + fox jumps over \ + the lazy dog.\ + """ diff --git a/node_modules/toml/test/multiline_literal_strings.toml b/node_modules/toml/test/multiline_literal_strings.toml new file mode 100644 index 0000000..bc88494 --- /dev/null +++ b/node_modules/toml/test/multiline_literal_strings.toml @@ -0,0 +1,7 @@ +regex2 = '''I [dw]on't need \d{2} apples''' +lines = ''' +The first newline is +trimmed in raw strings. + All other whitespace + is preserved. +''' diff --git a/node_modules/toml/test/multiline_strings.toml b/node_modules/toml/test/multiline_strings.toml new file mode 100644 index 0000000..6eb8c45 --- /dev/null +++ b/node_modules/toml/test/multiline_strings.toml @@ -0,0 +1,6 @@ +# The following strings are byte-for-byte equivalent: +key1 = "One\nTwo" +key2 = """One\nTwo""" +key3 = """ +One +Two""" diff --git a/node_modules/toml/test/smoke.js b/node_modules/toml/test/smoke.js new file mode 100644 index 0000000..7769f9c --- /dev/null +++ b/node_modules/toml/test/smoke.js @@ -0,0 +1,22 @@ +var fs = require('fs'); +var parser = require('../index'); + +var codes = [ + "# test\n my.key=\"value\"\nother = 101\nthird = -37", + "first = 1.2\nsecond = -56.02\nth = true\nfth = false", + "time = 1979-05-27T07:32:00Z", + "test = [\"one\", ]", + "test = [[1, 2,], [true, false,],]", + "[my.sub.path]\nkey = true\nother = -15.3\n[my.sub]\nkey=false", + "arry = [\"one\", \"two\",\"thr\nee\", \"\\u03EA\"]", + fs.readFileSync(__dirname + '/example.toml', 'utf8'), + fs.readFileSync(__dirname + '/hard_example.toml', 'utf8') +] + +console.log("============================================="); +for(i in codes) { + var code = codes[i]; + console.log(code + "\n"); + console.log(JSON.stringify(parser.parse(code))); + console.log("============================================="); +} diff --git a/node_modules/toml/test/table_arrays_easy.toml b/node_modules/toml/test/table_arrays_easy.toml new file mode 100644 index 0000000..ac3883b --- /dev/null +++ b/node_modules/toml/test/table_arrays_easy.toml @@ -0,0 +1,10 @@ +[[products]] +name = "Hammer" +sku = 738594937 + +[[products]] + +[[products]] +name = "Nail" +sku = 284758393 +color = "gray" diff --git a/node_modules/toml/test/table_arrays_hard.toml b/node_modules/toml/test/table_arrays_hard.toml new file mode 100644 index 0000000..2ade540 --- /dev/null +++ b/node_modules/toml/test/table_arrays_hard.toml @@ -0,0 +1,31 @@ +[[fruit]] +name = "durian" +variety = [] + +[[fruit]] +name = "apple" + + [fruit.physical] + color = "red" + shape = "round" + + [[fruit.variety]] + name = "red delicious" + + [[fruit.variety]] + name = "granny smith" + +[[fruit]] + +[[fruit]] +name = "banana" + + [[fruit.variety]] + name = "plantain" + +[[fruit]] +name = "orange" + +[fruit.physical] +color = "orange" +shape = "round" diff --git a/node_modules/toml/test/test_toml.js b/node_modules/toml/test/test_toml.js new file mode 100644 index 0000000..1f654b3 --- /dev/null +++ b/node_modules/toml/test/test_toml.js @@ -0,0 +1,596 @@ +var toml = require('../'); +var fs = require('fs'); + +var assert = require("nodeunit").assert; + +assert.parsesToml = function(tomlStr, expected) { + try { + var actual = toml.parse(tomlStr); + } catch (e) { + var errInfo = "line: " + e.line + ", column: " + e.column; + return assert.fail("TOML parse error: " + e.message, errInfo, null, "at", assert.parsesToml); + } + return assert.deepEqual(actual, expected); +}; + +var exampleExpected = { + title: "TOML Example", + owner: { + name: "Tom Preston-Werner", + organization: "GitHub", + bio: "GitHub Cofounder & CEO\n\tLikes \"tater tots\" and beer and backslashes: \\", + dob: new Date("1979-05-27T07:32:00Z") + }, + database: { + server: "192.168.1.1", + ports: [8001, 8001, 8003], + connection_max: 5000, + connection_min: -2, + max_temp: 87.1, + min_temp: -17.76, + enabled: true + }, + servers: { + alpha: { + ip: "10.0.0.1", + dc: "eqdc10" + }, + beta: { + ip: "10.0.0.2", + dc: "eqdc10" + } + }, + clients: { + data: [ ["gamma", "delta"], [1, 2] ] + } +}; + +var hardExampleExpected = { + the: { + hard: { + another_test_string: ' Same thing, but with a string #', + 'bit#': { + multi_line_array: [']'], + 'what?': "You don't think some user won't do that?" + }, + harder_test_string: " And when \"'s are in the string, along with # \"", + test_array: ['] ', ' # '], + test_array2: ['Test #11 ]proved that', 'Experiment #9 was a success'] + }, + test_string: "You'll hate me after this - #" + } +}; + +var easyTableArrayExpected = { + "products": [ + { "name": "Hammer", "sku": 738594937 }, + { }, + { "name": "Nail", "sku": 284758393, "color": "gray" } + ] +}; + +var hardTableArrayExpected = { + "fruit": [ + { + "name": "durian", + "variety": [] + }, + { + "name": "apple", + "physical": { + "color": "red", + "shape": "round" + }, + "variety": [ + { "name": "red delicious" }, + { "name": "granny smith" } + ] + }, + {}, + { + "name": "banana", + "variety": [ + { "name": "plantain" } + ] + }, + { + "name": "orange", + "physical": { + "color": "orange", + "shape": "round" + } + } + ] +} + +var badInputs = [ + '[error] if you didn\'t catch this, your parser is broken', + 'string = "Anything other than tabs, spaces and newline after a table or key value pair has ended should produce an error unless it is a comment" like this', + 'array = [\n \"This might most likely happen in multiline arrays\",\n Like here,\n \"or here,\n and here\"\n ] End of array comment, forgot the #', + 'number = 3.14 pi <--again forgot the #' +]; + +exports.testParsesExample = function(test) { + var str = fs.readFileSync(__dirname + "/example.toml", 'utf-8') + test.parsesToml(str, exampleExpected); + test.done(); +}; + +exports.testParsesHardExample = function(test) { + var str = fs.readFileSync(__dirname + "/hard_example.toml", 'utf-8') + test.parsesToml(str, hardExampleExpected); + test.done(); +}; + +exports.testEasyTableArrays = function(test) { + var str = fs.readFileSync(__dirname + "/table_arrays_easy.toml", 'utf8') + test.parsesToml(str, easyTableArrayExpected); + test.done(); +}; + +exports.testHarderTableArrays = function(test) { + var str = fs.readFileSync(__dirname + "/table_arrays_hard.toml", 'utf8') + test.parsesToml(str, hardTableArrayExpected); + test.done(); +}; + +exports.testSupportsTrailingCommasInArrays = function(test) { + var str = 'arr = [1, 2, 3,]'; + var expected = { arr: [1, 2, 3] }; + test.parsesToml(str, expected); + test.done(); +}; + +exports.testSingleElementArrayWithNoTrailingComma = function(test) { + var str = "a = [1]"; + test.parsesToml(str, { + a: [1] + }); + test.done(); +}; + +exports.testEmptyArray = function(test) { + var str = "a = []"; + test.parsesToml(str, { + a: [] + }); + test.done(); +}; + +exports.testArrayWithWhitespace = function(test) { + var str = "[versions]\nfiles = [\n 3, \n 5 \n\n ]"; + test.parsesToml(str, { + versions: { + files: [3, 5] + } + }); + test.done(); +}; + +exports.testEmptyArrayWithWhitespace = function(test) { + var str = "[versions]\nfiles = [\n \n ]"; + test.parsesToml(str, { + versions: { + files: [] + } + }); + test.done(); +}; + +exports.testDefineOnSuperkey = function(test) { + var str = "[a.b]\nc = 1\n\n[a]\nd = 2"; + var expected = { + a: { + b: { + c: 1 + }, + d: 2 + } + }; + test.parsesToml(str, expected); + test.done(); +}; + +exports.testWhitespace = function(test) { + var str = "a = 1\n \n b = 2 "; + test.parsesToml(str, { + a: 1, b: 2 + }); + test.done(); +}; + +exports.testUnicode = function(test) { + var str = "str = \"My name is Jos\\u00E9\""; + test.parsesToml(str, { + str: "My name is Jos\u00E9" + }); + + var str = "str = \"My name is Jos\\U000000E9\""; + test.parsesToml(str, { + str: "My name is Jos\u00E9" + }); + test.done(); +}; + +exports.testMultilineStrings = function(test) { + var str = fs.readFileSync(__dirname + "/multiline_strings.toml", 'utf8'); + test.parsesToml(str, { + key1: "One\nTwo", + key2: "One\nTwo", + key3: "One\nTwo" + }); + test.done(); +}; + +exports.testMultilineEatWhitespace = function(test) { + var str = fs.readFileSync(__dirname + "/multiline_eat_whitespace.toml", 'utf8'); + test.parsesToml(str, { + key1: "The quick brown fox jumps over the lazy dog.", + key2: "The quick brown fox jumps over the lazy dog.", + key3: "The quick brown fox jumps over the lazy dog." + }); + test.done(); +}; + +exports.testLiteralStrings = function(test) { + var str = fs.readFileSync(__dirname + "/literal_strings.toml", 'utf8'); + test.parsesToml(str, { + winpath: "C:\\Users\\nodejs\\templates", + winpath2: "\\\\ServerX\\admin$\\system32\\", + quoted: "Tom \"Dubs\" Preston-Werner", + regex: "<\\i\\c*\\s*>" + }); + test.done(); +}; + +exports.testMultilineLiteralStrings = function(test) { + var str = fs.readFileSync(__dirname + "/multiline_literal_strings.toml", 'utf8'); + test.parsesToml(str, { + regex2: "I [dw]on't need \\d{2} apples", + lines: "The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n" + }); + test.done(); +}; + +exports.testIntegerFormats = function(test) { + var str = "a = +99\nb = 42\nc = 0\nd = -17\ne = 1_000_001\nf = 1_2_3_4_5 # why u do dis"; + test.parsesToml(str, { + a: 99, + b: 42, + c: 0, + d: -17, + e: 1000001, + f: 12345 + }); + test.done(); +}; + +exports.testFloatFormats = function(test) { + var str = "a = +1.0\nb = 3.1415\nc = -0.01\n" + + "d = 5e+22\ne = 1e6\nf = -2E-2\n" + + "g = 6.626e-34\n" + + "h = 9_224_617.445_991_228_313\n" + + "i = 1e1_000"; + test.parsesToml(str, { + a: 1.0, + b: 3.1415, + c: -0.01, + d: 5e22, + e: 1e6, + f: -2e-2, + g: 6.626e-34, + h: 9224617.445991228313, + i: 1e1000 + }); + test.done(); +}; + +exports.testDate = function(test) { + var date = new Date("1979-05-27T07:32:00Z"); + test.parsesToml("a = 1979-05-27T07:32:00Z", { + a: date + }); + test.done(); +}; + +exports.testDateWithOffset = function(test) { + var date1 = new Date("1979-05-27T07:32:00-07:00"), + date2 = new Date("1979-05-27T07:32:00+02:00"); + test.parsesToml("a = 1979-05-27T07:32:00-07:00\nb = 1979-05-27T07:32:00+02:00", { + a: date1, + b: date2 + }); + test.done(); +}; + +exports.testDateWithSecondFraction = function(test) { + var date = new Date("1979-05-27T00:32:00.999999-07:00"); + test.parsesToml("a = 1979-05-27T00:32:00.999999-07:00", { + a: date + }); + test.done(); +}; + +exports.testDateFromIsoString = function(test) { + // https://github.com/BinaryMuse/toml-node/issues/20 + var date = new Date(), + dateStr = date.toISOString(), + tomlStr = "a = " + dateStr; + + test.parsesToml(tomlStr, { + a: date + }); + test.done(); +}; + +exports.testLeadingNewlines = function(test) { + // https://github.com/BinaryMuse/toml-node/issues/22 + var str = "\ntest = \"ing\""; + test.parsesToml(str, { + test: "ing" + }); + test.done(); +}; + +exports.testInlineTables = function(test) { + var str = fs.readFileSync(__dirname + "/inline_tables.toml", 'utf8'); + test.parsesToml(str, { + name: { + first: "Tom", + last: "Preston-Werner" + }, + point: { + x: 1, + y: 2 + }, + nested: { + x: { + a: { + b: 3 + } + } + }, + points: [ + { x: 1, y: 2, z: 3 }, + { x: 7, y: 8, z: 9 }, + { x: 2, y: 4, z: 8 } + ], + arrays: [ + { x: [1, 2, 3], y: [4, 5, 6] }, + { x: [7, 8, 9], y: [0, 1, 2] } + ] + }); + test.done(); +}; + +exports.testEmptyInlineTables = function(test) { + // https://github.com/BinaryMuse/toml-node/issues/24 + var str = "a = { }"; + test.parsesToml(str, { + a: {} + }); + test.done(); +}; + +exports.testKeyNamesWithWhitespaceAroundStartAndFinish = function(test) { + var str = "[ a ]\nb = 1"; + test.parsesToml(str, { + a: { + b: 1 + } + }); + test.done(); +}; + +exports.testKeyNamesWithWhitespaceAroundDots = function(test) { + var str = "[ a . b . c]\nd = 1"; + test.parsesToml(str, { + a: { + b: { + c: { + d: 1 + } + } + } + }); + test.done(); +}; + +exports.testSimpleQuotedKeyNames = function(test) { + var str = "[\"ʞ\"]\na = 1"; + test.parsesToml(str, { + "ʞ": { + a: 1 + } + }); + test.done(); +}; + +exports.testComplexQuotedKeyNames = function(test) { + var str = "[ a . \"ʞ\" . c ]\nd = 1"; + test.parsesToml(str, { + a: { + "ʞ": { + c: { + d: 1 + } + } + } + }); + test.done(); +}; + +exports.testEscapedQuotesInQuotedKeyNames = function(test) { + test.parsesToml("[\"the \\\"thing\\\"\"]\na = true", { + 'the "thing"': { + a: true + } + }); + test.done(); +}; + +exports.testMoreComplexQuotedKeyNames = function(test) { + // https://github.com/BinaryMuse/toml-node/issues/21 + test.parsesToml('["the\\ key"]\n\none = "one"\ntwo = 2\nthree = false', { + "the\\ key": { + one: "one", + two: 2, + three: false + } + }); + test.parsesToml('[a."the\\ key"]\n\none = "one"\ntwo = 2\nthree = false', { + a: { + "the\\ key": { + one: "one", + two: 2, + three: false + } + } + }); + test.parsesToml('[a."the-key"]\n\none = "one"\ntwo = 2\nthree = false', { + a: { + "the-key": { + one: "one", + two: 2, + three: false + } + } + }); + test.parsesToml('[a."the.key"]\n\none = "one"\ntwo = 2\nthree = false', { + a: { + "the.key": { + one: "one", + two: 2, + three: false + } + } + }); + // https://github.com/BinaryMuse/toml-node/issues/34 + test.parsesToml('[table]\n\'a "quoted value"\' = "value"', { + table: { + 'a "quoted value"': "value" + } + }); + // https://github.com/BinaryMuse/toml-node/issues/33 + test.parsesToml('[module]\n"foo=bar" = "zzz"', { + module: { + "foo=bar": "zzz" + } + }); + + test.done(); +}; + +exports.testErrorOnBadUnicode = function(test) { + var str = "str = \"My name is Jos\\uD800\""; + test.throws(function() { + toml.parse(str); + }); + test.done(); +}; + +exports.testErrorOnDotAtStartOfKey = function(test) { + test.throws(function() { + var str = "[.a]\nb = 1"; + toml.parse(str); + }); + test.done() +}; + +exports.testErrorOnDotAtEndOfKey = function(test) { + test.throws(function() { + var str = "[.a]\nb = 1"; + toml.parse(str); + }); + test.done() +}; + +exports.testErrorOnTableOverride = function(test) { + test.throws(function() { + var str = "[a]\nb = 1\n\n[a]\nc = 2"; + toml.parse(str); + }); + test.done() +}; + +exports.testErrorOnKeyOverride = function(test) { + test.throws(function() { + var str = "[a]\nb = 1\n[a.b]\nc = 2"; + toml.parse(str); + }); + test.done() +}; + +exports.testErrorOnKeyOverrideWithNested = function(test) { + // https://github.com/BinaryMuse/toml-node/issues/23 + test.throws(function() { + var str = "[a]\nb = \"a\"\n[a.b.c]"; + toml.parse(str); + }, "existing key 'a.b'"); + test.done(); +}; + +exports.testErrorOnKeyOverrideWithArrayTable = function(test) { + test.throws(function() { + var str = "[a]\nb = 1\n[[a]]\nc = 2"; + toml.parse(str); + }); + test.done() +}; + +exports.testErrorOnKeyReplace = function(test) { + test.throws(function() { + var str = "[a]\nb = 1\nb = 2"; + toml.parse(str); + }); + test.done() +}; + +exports.testErrorOnInlineTableReplace = function(test) { + // https://github.com/BinaryMuse/toml-node/issues/25 + test.throws(function() { + var str = "a = { b = 1 }\n[a]\nc = 2"; + toml.parse(str); + }, "existing key 'a'"); + test.done(); +}; + +exports.testErrorOnArrayMismatch = function(test) { + test.throws(function() { + var str = 'data = [1, 2, "test"]' + toml.parse(str); + }); + test.done(); +}; + +exports.testErrorOnBadInputs = function(test) { + var count = 0; + for (i in badInputs) { + (function(num) { + test.throws(function() { + toml.parse(badInputs[num]); + }); + })(i); + } + test.done(); +}; + +exports.testErrorsHaveCorrectLineAndColumn = function(test) { + var str = "[a]\nb = 1\n [a.b]\nc = 2"; + try { toml.parse(str); } + catch (e) { + test.equal(e.line, 3); + test.equal(e.column, 2); + test.done(); + } +}; + +exports.testUsingConstructorAsKey = function(test) { + test.parsesToml("[empty]\n[emptier]\n[constructor]\nconstructor = 1\n[emptiest]", { + "empty": {}, + "emptier": {}, + "constructor": { "constructor": 1 }, + "emptiest": {} + }); + test.done(); +}; diff --git a/node_modules/typedoc/LICENSE b/node_modules/typedoc/LICENSE new file mode 100644 index 0000000..ad410e1 --- /dev/null +++ b/node_modules/typedoc/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/typedoc/README.md b/node_modules/typedoc/README.md new file mode 100644 index 0000000..4822f76 --- /dev/null +++ b/node_modules/typedoc/README.md @@ -0,0 +1,87 @@ +# TypeDoc + +Documentation generator for TypeScript projects. + +## Documentation + +For more detailed documentation, the changelog, and TypeDoc documentation rendered with TypeDoc, see https://typedoc.org. + +## Installation + +TypeDoc runs on Node.js and is available as a NPM package. + +```text +npm install typedoc --save-dev +``` + +## Usage + +To generate documentation TypeDoc needs to know your project entry point and TypeScript +compiler options. It will automatically try to find your `tsconfig.json` file, so you can +just specify the entry point of your library: + +```text +typedoc src/index.ts +``` + +If you have multiple entry points, specify each of them. + +```text +typedoc package1/index.ts package2/index.ts +``` + +If you specify a directory, TypeDoc will use the `entryPointStrategy` option to determine how to resolve it. +By default, TypeDoc will search for a file called `index` under the directory. + +### Monorepos / Workspaces + +If your codebase is comprised of one or more npm packages, you can build documentation for each of them individually +and merge the results together into a single site by setting `entryPointStrategy` to `packages`. In this mode TypeDoc +requires configuration to be present in each directory to specify the entry points. For an example setup, see +https://github.com/Gerrit0/typedoc-packages-example + +### Arguments + +For a complete list of the command line arguments run `typedoc --help` or visit +[our website](https://typedoc.org/options/). + +- `--out `
+ Specifies the location the documentation should be written to. Defaults to `./docs` +- `--json `
+ Specifies the location and file name a json file describing the project is + written to. When specified no documentation will be generated unless `--out` is also + specified. +- `--options`
+ Specify a json option file that should be loaded. If not specified TypeDoc + will look for 'typedoc.json' in the current directory. +- `--tsconfig `
+ Specify a typescript config file that should be loaded. If not + specified TypeDoc will look for 'tsconfig.json' in the current directory. +- `--exclude `
+ Exclude files by the given pattern when a path is provided as source. + Supports standard minimatch patterns. + +#### Theming + +- `--theme `
+ Specify the theme that should be used. +- `--name `
+ Set the name of the project that will be used in the header of the template. +- `--readme `
+ Path to the readme file that should be displayed on the index page. Pass `none` to disable the index page + and start the documentation on the globals page. + +#### Miscellaneous + +- `--version`
+ Display the version number of TypeDoc. +- `--help`
+ Display all TypeDoc options. + +## Contributing + +This project is maintained by a community of developers. Contributions are welcome and appreciated. +You can find TypeDoc on GitHub; feel free to open an issue or create a pull request: +https://github.com/TypeStrong/typedoc + +For more information, read the [contribution guide](https://github.com/TypeStrong/typedoc/blob/master/.github/CONTRIBUTING.md). diff --git a/node_modules/typedoc/bin/package.json b/node_modules/typedoc/bin/package.json new file mode 100644 index 0000000..5bbefff --- /dev/null +++ b/node_modules/typedoc/bin/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/typedoc/bin/typedoc b/node_modules/typedoc/bin/typedoc new file mode 100755 index 0000000..d7f1ae6 --- /dev/null +++ b/node_modules/typedoc/bin/typedoc @@ -0,0 +1,24 @@ +#!/usr/bin/env node +//@ts-check + +const { fork } = require("child_process"); + +function main() { + fork(__dirname + "/../dist/lib/cli.js", process.argv.slice(2), { + stdio: "inherit", + }).on("exit", (code) => { + // Watch restart required? Fork a new child + if (code === 7) { + // Set an environment variable to ensure we continue watching + // Otherwise, the watch might stop unexpectedly if the watch + // option was set in a config file originally, and change to false + // later, causing a restart + process.env["TYPEDOC_FORCE_WATCH"] = "1"; + main(); + } else { + process.exit(code || 0); + } + }); +} + +main(); diff --git a/node_modules/typedoc/package.json b/node_modules/typedoc/package.json new file mode 100644 index 0000000..b517d0b --- /dev/null +++ b/node_modules/typedoc/package.json @@ -0,0 +1,107 @@ +{ + "name": "typedoc", + "description": "Create api documentation for TypeScript projects.", + "version": "0.27.8", + "homepage": "https://typedoc.org", + "type": "module", + "exports": { + ".": "./dist/index.js", + "./tsdoc.json": "./tsdoc.json", + "./package.json": "./package.json", + "./debug": "./dist/lib/debug/index.js" + }, + "types": "./dist/index.d.ts", + "bin": { + "typedoc": "bin/typedoc" + }, + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git://github.com/TypeStrong/TypeDoc.git" + }, + "bugs": { + "url": "https://github.com/TypeStrong/TypeDoc/issues" + }, + "engines": { + "node": ">= 18" + }, + "dependencies": { + "@gerrit0/mini-shiki": "^1.24.0", + "lunr": "^2.3.9", + "markdown-it": "^14.1.0", + "minimatch": "^9.0.5", + "yaml": "^2.6.1" + }, + "peerDependencies": { + "typescript": "5.0.x || 5.1.x || 5.2.x || 5.3.x || 5.4.x || 5.5.x || 5.6.x || 5.7.x" + }, + "devDependencies": { + "@types/lunr": "^2.3.7", + "@types/markdown-it": "^14.1.2", + "@types/mocha": "^10.0.10", + "@types/node": "18", + "@typestrong/fs-fixture-builder": "github:TypeStrong/fs-fixture-builder#34113409e3a171e68ce5e2b55461ef5c35591cfe", + "c8": "^10.1.2", + "esbuild": "^0.24.0", + "eslint": "^9.15.0", + "mocha": "^10.8.2", + "prettier": "3.3.3", + "puppeteer": "^23.6.1", + "semver": "^7.6.3", + "tsx": "^4.19.2", + "typescript": "5.7.2", + "typescript-eslint": "^8.15.0" + }, + "files": [ + "/bin", + "!*.map", + "/dist", + "!/dist/tmp", + "!/dist/.tsbuildinfo", + "!/dist/test", + "/LICENSE", + "/static", + "/tsdoc.json" + ], + "scripts": { + "test": "mocha --config .config/mocha.fast.json", + "test:cov": "c8 -r lcov mocha --config .config/mocha.fast.json", + "doc:c": "node bin/typedoc --tsconfig src/test/converter/tsconfig.json", + "doc:cd": "node --inspect-brk bin/typedoc --tsconfig src/test/converter/tsconfig.json", + "doc:c2": "node bin/typedoc --options src/test/converter2 --tsconfig src/test/converter2/tsconfig.json", + "doc:c2d": "node --inspect-brk bin/typedoc --options src/test/converter2 --tsconfig src/test/converter2/tsconfig.json", + "example": "cd example && node ../bin/typedoc", + "test:full": "c8 -r lcov -r text-summary mocha --config .config/mocha.full.json", + "rebuild_specs": "node scripts/rebuild_specs.js", + "build": "npm run build:tsc && npm run build:themes", + "build:tsc": "tsc --project .", + "build:themes": "node scripts/build_themes.js", + "build:prod": "npm run build:prod:tsc && npm run build:themes", + "build:prod:tsc": "tsc --project . --sourceMap false --declarationMap false", + "lint": "eslint . --max-warnings 0 && npm run prettier -- --check .", + "prettier": "prettier --config .config/.prettierrc.json --ignore-path .config/.prettierignore", + "prepack": "node scripts/set_strict.js false && npm run build:prod", + "prepare": "node scripts/prepare.mjs", + "postpublish": "node scripts/set_strict.js true" + }, + "keywords": [ + "typescript", + "documentation", + "generator" + ], + "c8": { + "extension": [ + ".ts", + ".tsx" + ], + "reporter": [ + "html-spa", + "text-summary" + ], + "exclude": [ + "**/*.d.ts", + "src/test", + "tmp" + ] + } +} diff --git a/node_modules/typedoc/static/main.js b/node_modules/typedoc/static/main.js new file mode 100644 index 0000000..6411353 --- /dev/null +++ b/node_modules/typedoc/static/main.js @@ -0,0 +1,58 @@ +"use strict";(()=>{var De=Object.create;var le=Object.defineProperty;var Fe=Object.getOwnPropertyDescriptor;var Ne=Object.getOwnPropertyNames;var Ve=Object.getPrototypeOf,Be=Object.prototype.hasOwnProperty;var qe=(t,e)=>()=>(e||t((e={exports:{}}).exports,e),e.exports);var je=(t,e,n,r)=>{if(e&&typeof e=="object"||typeof e=="function")for(let i of Ne(e))!Be.call(t,i)&&i!==n&&le(t,i,{get:()=>e[i],enumerable:!(r=Fe(e,i))||r.enumerable});return t};var $e=(t,e,n)=>(n=t!=null?De(Ve(t)):{},je(e||!t||!t.__esModule?le(n,"default",{value:t,enumerable:!0}):n,t));var pe=qe((de,he)=>{(function(){var t=function(e){var n=new t.Builder;return n.pipeline.add(t.trimmer,t.stopWordFilter,t.stemmer),n.searchPipeline.add(t.stemmer),e.call(n,n),n.build()};t.version="2.3.9";t.utils={},t.utils.warn=function(e){return function(n){e.console&&console.warn&&console.warn(n)}}(this),t.utils.asString=function(e){return e==null?"":e.toString()},t.utils.clone=function(e){if(e==null)return e;for(var n=Object.create(null),r=Object.keys(e),i=0;i0){var d=t.utils.clone(n)||{};d.position=[a,c],d.index=s.length,s.push(new t.Token(r.slice(a,o),d))}a=o+1}}return s},t.tokenizer.separator=/[\s\-]+/;t.Pipeline=function(){this._stack=[]},t.Pipeline.registeredFunctions=Object.create(null),t.Pipeline.registerFunction=function(e,n){n in this.registeredFunctions&&t.utils.warn("Overwriting existing registered function: "+n),e.label=n,t.Pipeline.registeredFunctions[e.label]=e},t.Pipeline.warnIfFunctionNotRegistered=function(e){var n=e.label&&e.label in this.registeredFunctions;n||t.utils.warn(`Function is not registered with pipeline. This may cause problems when serialising the index. +`,e)},t.Pipeline.load=function(e){var n=new t.Pipeline;return e.forEach(function(r){var i=t.Pipeline.registeredFunctions[r];if(i)n.add(i);else throw new Error("Cannot load unregistered function: "+r)}),n},t.Pipeline.prototype.add=function(){var e=Array.prototype.slice.call(arguments);e.forEach(function(n){t.Pipeline.warnIfFunctionNotRegistered(n),this._stack.push(n)},this)},t.Pipeline.prototype.after=function(e,n){t.Pipeline.warnIfFunctionNotRegistered(n);var r=this._stack.indexOf(e);if(r==-1)throw new Error("Cannot find existingFn");r=r+1,this._stack.splice(r,0,n)},t.Pipeline.prototype.before=function(e,n){t.Pipeline.warnIfFunctionNotRegistered(n);var r=this._stack.indexOf(e);if(r==-1)throw new Error("Cannot find existingFn");this._stack.splice(r,0,n)},t.Pipeline.prototype.remove=function(e){var n=this._stack.indexOf(e);n!=-1&&this._stack.splice(n,1)},t.Pipeline.prototype.run=function(e){for(var n=this._stack.length,r=0;r1&&(oe&&(r=s),o!=e);)i=r-n,s=n+Math.floor(i/2),o=this.elements[s*2];if(o==e||o>e)return s*2;if(ol?d+=2:a==l&&(n+=r[c+1]*i[d+1],c+=2,d+=2);return n},t.Vector.prototype.similarity=function(e){return this.dot(e)/this.magnitude()||0},t.Vector.prototype.toArray=function(){for(var e=new Array(this.elements.length/2),n=1,r=0;n0){var o=s.str.charAt(0),a;o in s.node.edges?a=s.node.edges[o]:(a=new t.TokenSet,s.node.edges[o]=a),s.str.length==1&&(a.final=!0),i.push({node:a,editsRemaining:s.editsRemaining,str:s.str.slice(1)})}if(s.editsRemaining!=0){if("*"in s.node.edges)var l=s.node.edges["*"];else{var l=new t.TokenSet;s.node.edges["*"]=l}if(s.str.length==0&&(l.final=!0),i.push({node:l,editsRemaining:s.editsRemaining-1,str:s.str}),s.str.length>1&&i.push({node:s.node,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)}),s.str.length==1&&(s.node.final=!0),s.str.length>=1){if("*"in s.node.edges)var c=s.node.edges["*"];else{var c=new t.TokenSet;s.node.edges["*"]=c}s.str.length==1&&(c.final=!0),i.push({node:c,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)})}if(s.str.length>1){var d=s.str.charAt(0),m=s.str.charAt(1),p;m in s.node.edges?p=s.node.edges[m]:(p=new t.TokenSet,s.node.edges[m]=p),s.str.length==1&&(p.final=!0),i.push({node:p,editsRemaining:s.editsRemaining-1,str:d+s.str.slice(2)})}}}return r},t.TokenSet.fromString=function(e){for(var n=new t.TokenSet,r=n,i=0,s=e.length;i=e;n--){var r=this.uncheckedNodes[n],i=r.child.toString();i in this.minimizedNodes?r.parent.edges[r.char]=this.minimizedNodes[i]:(r.child._str=i,this.minimizedNodes[i]=r.child),this.uncheckedNodes.pop()}};t.Index=function(e){this.invertedIndex=e.invertedIndex,this.fieldVectors=e.fieldVectors,this.tokenSet=e.tokenSet,this.fields=e.fields,this.pipeline=e.pipeline},t.Index.prototype.search=function(e){return this.query(function(n){var r=new t.QueryParser(e,n);r.parse()})},t.Index.prototype.query=function(e){for(var n=new t.Query(this.fields),r=Object.create(null),i=Object.create(null),s=Object.create(null),o=Object.create(null),a=Object.create(null),l=0;l1?this._b=1:this._b=e},t.Builder.prototype.k1=function(e){this._k1=e},t.Builder.prototype.add=function(e,n){var r=e[this._ref],i=Object.keys(this._fields);this._documents[r]=n||{},this.documentCount+=1;for(var s=0;s=this.length)return t.QueryLexer.EOS;var e=this.str.charAt(this.pos);return this.pos+=1,e},t.QueryLexer.prototype.width=function(){return this.pos-this.start},t.QueryLexer.prototype.ignore=function(){this.start==this.pos&&(this.pos+=1),this.start=this.pos},t.QueryLexer.prototype.backup=function(){this.pos-=1},t.QueryLexer.prototype.acceptDigitRun=function(){var e,n;do e=this.next(),n=e.charCodeAt(0);while(n>47&&n<58);e!=t.QueryLexer.EOS&&this.backup()},t.QueryLexer.prototype.more=function(){return this.pos1&&(e.backup(),e.emit(t.QueryLexer.TERM)),e.ignore(),e.more())return t.QueryLexer.lexText},t.QueryLexer.lexEditDistance=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(t.QueryLexer.EDIT_DISTANCE),t.QueryLexer.lexText},t.QueryLexer.lexBoost=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(t.QueryLexer.BOOST),t.QueryLexer.lexText},t.QueryLexer.lexEOS=function(e){e.width()>0&&e.emit(t.QueryLexer.TERM)},t.QueryLexer.termSeparator=t.tokenizer.separator,t.QueryLexer.lexText=function(e){for(;;){var n=e.next();if(n==t.QueryLexer.EOS)return t.QueryLexer.lexEOS;if(n.charCodeAt(0)==92){e.escapeCharacter();continue}if(n==":")return t.QueryLexer.lexField;if(n=="~")return e.backup(),e.width()>0&&e.emit(t.QueryLexer.TERM),t.QueryLexer.lexEditDistance;if(n=="^")return e.backup(),e.width()>0&&e.emit(t.QueryLexer.TERM),t.QueryLexer.lexBoost;if(n=="+"&&e.width()===1||n=="-"&&e.width()===1)return e.emit(t.QueryLexer.PRESENCE),t.QueryLexer.lexText;if(n.match(t.QueryLexer.termSeparator))return t.QueryLexer.lexTerm}},t.QueryParser=function(e,n){this.lexer=new t.QueryLexer(e),this.query=n,this.currentClause={},this.lexemeIdx=0},t.QueryParser.prototype.parse=function(){this.lexer.run(),this.lexemes=this.lexer.lexemes;for(var e=t.QueryParser.parseClause;e;)e=e(this);return this.query},t.QueryParser.prototype.peekLexeme=function(){return this.lexemes[this.lexemeIdx]},t.QueryParser.prototype.consumeLexeme=function(){var e=this.peekLexeme();return this.lexemeIdx+=1,e},t.QueryParser.prototype.nextClause=function(){var e=this.currentClause;this.query.clause(e),this.currentClause={}},t.QueryParser.parseClause=function(e){var n=e.peekLexeme();if(n!=null)switch(n.type){case t.QueryLexer.PRESENCE:return t.QueryParser.parsePresence;case t.QueryLexer.FIELD:return t.QueryParser.parseField;case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var r="expected either a field or a term, found "+n.type;throw n.str.length>=1&&(r+=" with value '"+n.str+"'"),new t.QueryParseError(r,n.start,n.end)}},t.QueryParser.parsePresence=function(e){var n=e.consumeLexeme();if(n!=null){switch(n.str){case"-":e.currentClause.presence=t.Query.presence.PROHIBITED;break;case"+":e.currentClause.presence=t.Query.presence.REQUIRED;break;default:var r="unrecognised presence operator'"+n.str+"'";throw new t.QueryParseError(r,n.start,n.end)}var i=e.peekLexeme();if(i==null){var r="expecting term or field, found nothing";throw new t.QueryParseError(r,n.start,n.end)}switch(i.type){case t.QueryLexer.FIELD:return t.QueryParser.parseField;case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var r="expecting term or field, found '"+i.type+"'";throw new t.QueryParseError(r,i.start,i.end)}}},t.QueryParser.parseField=function(e){var n=e.consumeLexeme();if(n!=null){if(e.query.allFields.indexOf(n.str)==-1){var r=e.query.allFields.map(function(o){return"'"+o+"'"}).join(", "),i="unrecognised field '"+n.str+"', possible fields: "+r;throw new t.QueryParseError(i,n.start,n.end)}e.currentClause.fields=[n.str];var s=e.peekLexeme();if(s==null){var i="expecting term, found nothing";throw new t.QueryParseError(i,n.start,n.end)}switch(s.type){case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var i="expecting term, found '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},t.QueryParser.parseTerm=function(e){var n=e.consumeLexeme();if(n!=null){e.currentClause.term=n.str.toLowerCase(),n.str.indexOf("*")!=-1&&(e.currentClause.usePipeline=!1);var r=e.peekLexeme();if(r==null){e.nextClause();return}switch(r.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+r.type+"'";throw new t.QueryParseError(i,r.start,r.end)}}},t.QueryParser.parseEditDistance=function(e){var n=e.consumeLexeme();if(n!=null){var r=parseInt(n.str,10);if(isNaN(r)){var i="edit distance must be numeric";throw new t.QueryParseError(i,n.start,n.end)}e.currentClause.editDistance=r;var s=e.peekLexeme();if(s==null){e.nextClause();return}switch(s.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},t.QueryParser.parseBoost=function(e){var n=e.consumeLexeme();if(n!=null){var r=parseInt(n.str,10);if(isNaN(r)){var i="boost must be numeric";throw new t.QueryParseError(i,n.start,n.end)}e.currentClause.boost=r;var s=e.peekLexeme();if(s==null){e.nextClause();return}switch(s.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},function(e,n){typeof define=="function"&&define.amd?define(n):typeof de=="object"?he.exports=n():e.lunr=n()}(this,function(){return t})})()});window.translations||={copy:"Copy",copied:"Copied!",normally_hidden:"This member is normally hidden due to your filter settings.",hierarchy_expand:"Expand",hierarchy_collapse:"Collapse",folder:"Folder",kind_1:"Project",kind_2:"Module",kind_4:"Namespace",kind_8:"Enumeration",kind_16:"Enumeration Member",kind_32:"Variable",kind_64:"Function",kind_128:"Class",kind_256:"Interface",kind_512:"Constructor",kind_1024:"Property",kind_2048:"Method",kind_4096:"Call Signature",kind_8192:"Index Signature",kind_16384:"Constructor Signature",kind_32768:"Parameter",kind_65536:"Type Literal",kind_131072:"Type Parameter",kind_262144:"Accessor",kind_524288:"Get Signature",kind_1048576:"Set Signature",kind_2097152:"Type Alias",kind_4194304:"Reference",kind_8388608:"Document"};var ce=[];function G(t,e){ce.push({selector:e,constructor:t})}var J=class{alwaysVisibleMember=null;constructor(){this.createComponents(document.body),this.ensureFocusedElementVisible(),this.listenForCodeCopies(),window.addEventListener("hashchange",()=>this.ensureFocusedElementVisible()),document.body.style.display||(this.ensureFocusedElementVisible(),this.updateIndexVisibility(),this.scrollToHash())}createComponents(e){ce.forEach(n=>{e.querySelectorAll(n.selector).forEach(r=>{r.dataset.hasInstance||(new n.constructor({el:r,app:this}),r.dataset.hasInstance=String(!0))})})}filterChanged(){this.ensureFocusedElementVisible()}showPage(){document.body.style.display&&(document.body.style.removeProperty("display"),this.ensureFocusedElementVisible(),this.updateIndexVisibility(),this.scrollToHash())}scrollToHash(){if(location.hash){let e=document.getElementById(location.hash.substring(1));if(!e)return;e.scrollIntoView({behavior:"instant",block:"start"})}}ensureActivePageVisible(){let e=document.querySelector(".tsd-navigation .current"),n=e?.parentElement;for(;n&&!n.classList.contains(".tsd-navigation");)n instanceof HTMLDetailsElement&&(n.open=!0),n=n.parentElement;if(e&&!ze(e)){let r=e.getBoundingClientRect().top-document.documentElement.clientHeight/4;document.querySelector(".site-menu").scrollTop=r,document.querySelector(".col-sidebar").scrollTop=r}}updateIndexVisibility(){let e=document.querySelector(".tsd-index-content"),n=e?.open;e&&(e.open=!0),document.querySelectorAll(".tsd-index-section").forEach(r=>{r.style.display="block";let i=Array.from(r.querySelectorAll(".tsd-index-link")).every(s=>s.offsetParent==null);r.style.display=i?"none":"block"}),e&&(e.open=n)}ensureFocusedElementVisible(){if(this.alwaysVisibleMember&&(this.alwaysVisibleMember.classList.remove("always-visible"),this.alwaysVisibleMember.firstElementChild.remove(),this.alwaysVisibleMember=null),!location.hash)return;let e=document.getElementById(location.hash.substring(1));if(!e)return;let n=e.parentElement;for(;n&&n.tagName!=="SECTION";)n=n.parentElement;if(!n)return;let r=n.offsetParent==null,i=n;for(;i!==document.body;)i instanceof HTMLDetailsElement&&(i.open=!0),i=i.parentElement;if(n.offsetParent==null){this.alwaysVisibleMember=n,n.classList.add("always-visible");let s=document.createElement("p");s.classList.add("warning"),s.textContent=window.translations.normally_hidden,n.prepend(s)}r&&e.scrollIntoView()}listenForCodeCopies(){document.querySelectorAll("pre > button").forEach(e=>{let n;e.addEventListener("click",()=>{e.previousElementSibling instanceof HTMLElement&&navigator.clipboard.writeText(e.previousElementSibling.innerText.trim()),e.textContent=window.translations.copied,e.classList.add("visible"),clearTimeout(n),n=setTimeout(()=>{e.classList.remove("visible"),n=setTimeout(()=>{e.textContent=window.translations.copy},100)},1e3)})})}};function ze(t){let e=t.getBoundingClientRect(),n=Math.max(document.documentElement.clientHeight,window.innerHeight);return!(e.bottom<0||e.top-n>=0)}var ue=(t,e=100)=>{let n;return()=>{clearTimeout(n),n=setTimeout(()=>t(),e)}};var ge=$e(pe(),1);async function A(t){let e=Uint8Array.from(atob(t),s=>s.charCodeAt(0)),r=new Blob([e]).stream().pipeThrough(new DecompressionStream("deflate")),i=await new Response(r).text();return JSON.parse(i)}async function fe(t,e){if(!window.searchData)return;let n=await A(window.searchData);t.data=n,t.index=ge.Index.load(n.index),e.classList.remove("loading"),e.classList.add("ready")}function ve(){let t=document.getElementById("tsd-search");if(!t)return;let e={base:document.documentElement.dataset.base+"/"},n=document.getElementById("tsd-search-script");t.classList.add("loading"),n&&(n.addEventListener("error",()=>{t.classList.remove("loading"),t.classList.add("failure")}),n.addEventListener("load",()=>{fe(e,t)}),fe(e,t));let r=document.querySelector("#tsd-search input"),i=document.querySelector("#tsd-search .results");if(!r||!i)throw new Error("The input field or the result list wrapper was not found");i.addEventListener("mouseup",()=>{re(t)}),r.addEventListener("focus",()=>t.classList.add("has-focus")),We(t,i,r,e)}function We(t,e,n,r){n.addEventListener("input",ue(()=>{Ue(t,e,n,r)},200)),n.addEventListener("keydown",i=>{i.key=="Enter"?Je(e,t):i.key=="ArrowUp"?(me(e,n,-1),i.preventDefault()):i.key==="ArrowDown"&&(me(e,n,1),i.preventDefault())}),document.body.addEventListener("keypress",i=>{i.altKey||i.ctrlKey||i.metaKey||!n.matches(":focus")&&i.key==="/"&&(i.preventDefault(),n.focus())}),document.body.addEventListener("keyup",i=>{t.classList.contains("has-focus")&&(i.key==="Escape"||!e.matches(":focus-within")&&!n.matches(":focus"))&&(n.blur(),re(t))})}function re(t){t.classList.remove("has-focus")}function Ue(t,e,n,r){if(!r.index||!r.data)return;e.textContent="";let i=n.value.trim(),s;if(i){let o=i.split(" ").map(a=>a.length?`*${a}*`:"").join(" ");s=r.index.search(o)}else s=[];for(let o=0;oa.score-o.score);for(let o=0,a=Math.min(10,s.length);o`,d=ye(l.name,i);globalThis.DEBUG_SEARCH_WEIGHTS&&(d+=` (score: ${s[o].score.toFixed(2)})`),l.parent&&(d=` + ${ye(l.parent,i)}.${d}`);let m=document.createElement("li");m.classList.value=l.classes??"";let p=document.createElement("a");p.href=r.base+l.url,p.innerHTML=c+d,m.append(p),p.addEventListener("focus",()=>{e.querySelector(".current")?.classList.remove("current"),m.classList.add("current")}),e.appendChild(m)}}function me(t,e,n){let r=t.querySelector(".current");if(!r)r=t.querySelector(n==1?"li:first-child":"li:last-child"),r&&r.classList.add("current");else{let i=r;if(n===1)do i=i.nextElementSibling??void 0;while(i instanceof HTMLElement&&i.offsetParent==null);else do i=i.previousElementSibling??void 0;while(i instanceof HTMLElement&&i.offsetParent==null);i?(r.classList.remove("current"),i.classList.add("current")):n===-1&&(r.classList.remove("current"),e.focus())}}function Je(t,e){let n=t.querySelector(".current");if(n||(n=t.querySelector("li:first-child")),n){let r=n.querySelector("a");r&&(window.location.href=r.href),re(e)}}function ye(t,e){if(e==="")return t;let n=t.toLocaleLowerCase(),r=e.toLocaleLowerCase(),i=[],s=0,o=n.indexOf(r);for(;o!=-1;)i.push(ne(t.substring(s,o)),`${ne(t.substring(o,o+r.length))}`),s=o+r.length,o=n.indexOf(r,s);return i.push(ne(t.substring(s))),i.join("")}var Ge={"&":"&","<":"<",">":">","'":"'",'"':"""};function ne(t){return t.replace(/[&<>"'"]/g,e=>Ge[e])}var I=class{el;app;constructor(e){this.el=e.el,this.app=e.app}};var H="mousedown",Ee="mousemove",B="mouseup",X={x:0,y:0},xe=!1,ie=!1,Xe=!1,D=!1,be=/Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent);document.documentElement.classList.add(be?"is-mobile":"not-mobile");be&&"ontouchstart"in document.documentElement&&(Xe=!0,H="touchstart",Ee="touchmove",B="touchend");document.addEventListener(H,t=>{ie=!0,D=!1;let e=H=="touchstart"?t.targetTouches[0]:t;X.y=e.pageY||0,X.x=e.pageX||0});document.addEventListener(Ee,t=>{if(ie&&!D){let e=H=="touchstart"?t.targetTouches[0]:t,n=X.x-(e.pageX||0),r=X.y-(e.pageY||0);D=Math.sqrt(n*n+r*r)>10}});document.addEventListener(B,()=>{ie=!1});document.addEventListener("click",t=>{xe&&(t.preventDefault(),t.stopImmediatePropagation(),xe=!1)});var Y=class extends I{active;className;constructor(e){super(e),this.className=this.el.dataset.toggle||"",this.el.addEventListener(B,n=>this.onPointerUp(n)),this.el.addEventListener("click",n=>n.preventDefault()),document.addEventListener(H,n=>this.onDocumentPointerDown(n)),document.addEventListener(B,n=>this.onDocumentPointerUp(n))}setActive(e){if(this.active==e)return;this.active=e,document.documentElement.classList.toggle("has-"+this.className,e),this.el.classList.toggle("active",e);let n=(this.active?"to-has-":"from-has-")+this.className;document.documentElement.classList.add(n),setTimeout(()=>document.documentElement.classList.remove(n),500)}onPointerUp(e){D||(this.setActive(!0),e.preventDefault())}onDocumentPointerDown(e){if(this.active){if(e.target.closest(".col-sidebar, .tsd-filter-group"))return;this.setActive(!1)}}onDocumentPointerUp(e){if(!D&&this.active&&e.target.closest(".col-sidebar")){let n=e.target.closest("a");if(n){let r=window.location.href;r.indexOf("#")!=-1&&(r=r.substring(0,r.indexOf("#"))),n.href.substring(0,r.length)==r&&setTimeout(()=>this.setActive(!1),250)}}}};var se;try{se=localStorage}catch{se={getItem(){return null},setItem(){}}}var C=se;var Le=document.head.appendChild(document.createElement("style"));Le.dataset.for="filters";var Z=class extends I{key;value;constructor(e){super(e),this.key=`filter-${this.el.name}`,this.value=this.el.checked,this.el.addEventListener("change",()=>{this.setLocalStorage(this.el.checked)}),this.setLocalStorage(this.fromLocalStorage()),Le.innerHTML+=`html:not(.${this.key}) .tsd-is-${this.el.name} { display: none; } +`,this.app.updateIndexVisibility()}fromLocalStorage(){let e=C.getItem(this.key);return e?e==="true":this.el.checked}setLocalStorage(e){C.setItem(this.key,e.toString()),this.value=e,this.handleValueChange()}handleValueChange(){this.el.checked=this.value,document.documentElement.classList.toggle(this.key,this.value),this.app.filterChanged(),this.app.updateIndexVisibility()}};var oe=new Map,ae=class{open;accordions=[];key;constructor(e,n){this.key=e,this.open=n}add(e){this.accordions.push(e),e.open=this.open,e.addEventListener("toggle",()=>{this.toggle(e.open)})}toggle(e){for(let n of this.accordions)n.open=e;C.setItem(this.key,e.toString())}},K=class extends I{constructor(e){super(e);let n=this.el.querySelector("summary"),r=n.querySelector("a");r&&r.addEventListener("click",()=>{location.assign(r.href)});let i=`tsd-accordion-${n.dataset.key??n.textContent.trim().replace(/\s+/g,"-").toLowerCase()}`,s;if(oe.has(i))s=oe.get(i);else{let o=C.getItem(i),a=o?o==="true":this.el.open;s=new ae(i,a),oe.set(i,s)}s.add(this.el)}};function Se(t){let e=C.getItem("tsd-theme")||"os";t.value=e,we(e),t.addEventListener("change",()=>{C.setItem("tsd-theme",t.value),we(t.value)})}function we(t){document.documentElement.dataset.theme=t}var ee;function Ce(){let t=document.getElementById("tsd-nav-script");t&&(t.addEventListener("load",Te),Te())}async function Te(){let t=document.getElementById("tsd-nav-container");if(!t||!window.navigationData)return;let e=await A(window.navigationData);ee=document.documentElement.dataset.base,ee.endsWith("/")||(ee+="/"),t.innerHTML="";for(let n of e)Ie(n,t,[]);window.app.createComponents(t),window.app.showPage(),window.app.ensureActivePageVisible()}function Ie(t,e,n){let r=e.appendChild(document.createElement("li"));if(t.children){let i=[...n,t.text],s=r.appendChild(document.createElement("details"));s.className=t.class?`${t.class} tsd-accordion`:"tsd-accordion";let o=s.appendChild(document.createElement("summary"));o.className="tsd-accordion-summary",o.dataset.key=i.join("$"),o.innerHTML='',ke(t,o);let a=s.appendChild(document.createElement("div"));a.className="tsd-accordion-details";let l=a.appendChild(document.createElement("ul"));l.className="tsd-nested-navigation";for(let c of t.children)Ie(c,l,i)}else ke(t,r,t.class)}function ke(t,e,n){if(t.path){let r=e.appendChild(document.createElement("a"));if(r.href=ee+t.path,n&&(r.className=n),location.pathname===r.pathname&&!r.href.includes("#")&&r.classList.add("current"),t.kind){let i=window.translations[`kind_${t.kind}`].replaceAll('"',""");r.innerHTML=``}r.appendChild(document.createElement("span")).textContent=t.text}else{let r=e.appendChild(document.createElement("span")),i=window.translations.folder.replaceAll('"',""");r.innerHTML=``,r.appendChild(document.createElement("span")).textContent=t.text}}var te=document.documentElement.dataset.base;te.endsWith("/")||(te+="/");function Pe(){document.querySelector(".tsd-full-hierarchy")?Ye():document.querySelector(".tsd-hierarchy")&&Ze()}function Ye(){document.addEventListener("click",r=>{let i=r.target;for(;i.parentElement&&i.parentElement.tagName!="LI";)i=i.parentElement;i.dataset.dropdown&&(i.dataset.dropdown=String(i.dataset.dropdown!=="true"))});let t=new Map,e=new Set;for(let r of document.querySelectorAll(".tsd-full-hierarchy [data-refl]")){let i=r.querySelector("ul");t.has(r.dataset.refl)?e.add(r.dataset.refl):i&&t.set(r.dataset.refl,i)}for(let r of e)n(r);function n(r){let i=t.get(r).cloneNode(!0);i.querySelectorAll("[id]").forEach(s=>{s.removeAttribute("id")}),i.querySelectorAll("[data-dropdown]").forEach(s=>{s.dataset.dropdown="false"});for(let s of document.querySelectorAll(`[data-refl="${r}"]`)){let o=tt(),a=s.querySelector("ul");s.insertBefore(o,a),o.dataset.dropdown=String(!!a),a||s.appendChild(i.cloneNode(!0))}}}function Ze(){let t=document.getElementById("tsd-hierarchy-script");t&&(t.addEventListener("load",Qe),Qe())}async function Qe(){let t=document.querySelector(".tsd-panel.tsd-hierarchy:has(h4 a)");if(!t||!window.hierarchyData)return;let e=+t.dataset.refl,n=await A(window.hierarchyData),r=t.querySelector("ul"),i=document.createElement("ul");if(i.classList.add("tsd-hierarchy"),Ke(i,n,e),r.querySelectorAll("li").length==i.querySelectorAll("li").length)return;let s=document.createElement("span");s.classList.add("tsd-hierarchy-toggle"),s.textContent=window.translations.hierarchy_expand,t.querySelector("h4 a")?.insertAdjacentElement("afterend",s),s.insertAdjacentText("beforebegin",", "),s.addEventListener("click",()=>{s.textContent===window.translations.hierarchy_expand?(r.insertAdjacentElement("afterend",i),r.remove(),s.textContent=window.translations.hierarchy_collapse):(i.insertAdjacentElement("afterend",r),i.remove(),s.textContent=window.translations.hierarchy_expand)})}function Ke(t,e,n){let r=e.roots.filter(i=>et(e,i,n));for(let i of r)t.appendChild(_e(e,i,n))}function _e(t,e,n,r=new Set){if(r.has(e))return;r.add(e);let i=t.reflections[e],s=document.createElement("li");if(s.classList.add("tsd-hierarchy-item"),e===n){let o=s.appendChild(document.createElement("span"));o.textContent=i.name,o.classList.add("tsd-hierarchy-target")}else{for(let a of i.uniqueNameParents||[]){let l=t.reflections[a],c=s.appendChild(document.createElement("a"));c.textContent=l.name,c.href=te+l.url,c.className=l.class+" tsd-signature-type",s.append(document.createTextNode("."))}let o=s.appendChild(document.createElement("a"));o.textContent=t.reflections[e].name,o.href=te+i.url,o.className=i.class+" tsd-signature-type"}if(i.children){let o=s.appendChild(document.createElement("ul"));o.classList.add("tsd-hierarchy");for(let a of i.children){let l=_e(t,a,n,r);l&&o.appendChild(l)}}return r.delete(e),s}function et(t,e,n){if(e===n)return!0;let r=new Set,i=[t.reflections[e]];for(;i.length;){let s=i.pop();if(!r.has(s)){r.add(s);for(let o of s.children||[]){if(o===n)return!0;i.push(t.reflections[o])}}}return!1}function tt(){let t=document.createElementNS("http://www.w3.org/2000/svg","svg");return t.setAttribute("width","20"),t.setAttribute("height","20"),t.setAttribute("viewBox","0 0 24 24"),t.setAttribute("fill","none"),t.innerHTML='',t}G(Y,"a[data-toggle]");G(K,".tsd-accordion");G(Z,".tsd-filter-item input[type=checkbox]");var Oe=document.getElementById("tsd-theme");Oe&&Se(Oe);var nt=new J;Object.defineProperty(window,"app",{value:nt});ve();Ce();Pe();})(); +/*! Bundled license information: + +lunr/lunr.js: + (** + * lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 2.3.9 + * Copyright (C) 2020 Oliver Nightingale + * @license MIT + *) + (*! + * lunr.utils + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.Set + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.tokenizer + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.Pipeline + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.Vector + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.stemmer + * Copyright (C) 2020 Oliver Nightingale + * Includes code from - http://tartarus.org/~martin/PorterStemmer/js.txt + *) + (*! + * lunr.stopWordFilter + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.trimmer + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.TokenSet + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.Index + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.Builder + * Copyright (C) 2020 Oliver Nightingale + *) +*/ diff --git a/node_modules/typedoc/static/style.css b/node_modules/typedoc/static/style.css new file mode 100644 index 0000000..2ab8b83 --- /dev/null +++ b/node_modules/typedoc/static/style.css @@ -0,0 +1,1611 @@ +@layer typedoc { + :root { + /* Light */ + --light-color-background: #f2f4f8; + --light-color-background-secondary: #eff0f1; + --light-color-warning-text: #222; + --light-color-background-warning: #e6e600; + --light-color-accent: #c5c7c9; + --light-color-active-menu-item: var(--light-color-accent); + --light-color-text: #222; + --light-color-text-aside: #6e6e6e; + + --light-color-icon-background: var(--light-color-background); + --light-color-icon-text: var(--light-color-text); + + --light-color-comment-tag-text: var(--light-color-text); + --light-color-comment-tag: var(--light-color-background); + + --light-color-link: #1f70c2; + --light-color-focus-outline: #3584e4; + + --light-color-ts-keyword: #056bd6; + --light-color-ts-project: #b111c9; + --light-color-ts-module: var(--light-color-ts-project); + --light-color-ts-namespace: var(--light-color-ts-project); + --light-color-ts-enum: #7e6f15; + --light-color-ts-enum-member: var(--light-color-ts-enum); + --light-color-ts-variable: #4760ec; + --light-color-ts-function: #572be7; + --light-color-ts-class: #1f70c2; + --light-color-ts-interface: #108024; + --light-color-ts-constructor: var(--light-color-ts-class); + --light-color-ts-property: #9f5f30; + --light-color-ts-method: #be3989; + --light-color-ts-reference: #ff4d82; + --light-color-ts-call-signature: var(--light-color-ts-method); + --light-color-ts-index-signature: var(--light-color-ts-property); + --light-color-ts-constructor-signature: var( + --light-color-ts-constructor + ); + --light-color-ts-parameter: var(--light-color-ts-variable); + /* type literal not included as links will never be generated to it */ + --light-color-ts-type-parameter: #a55c0e; + --light-color-ts-accessor: #c73c3c; + --light-color-ts-get-signature: var(--light-color-ts-accessor); + --light-color-ts-set-signature: var(--light-color-ts-accessor); + --light-color-ts-type-alias: #d51270; + /* reference not included as links will be colored with the kind that it points to */ + --light-color-document: #000000; + + --light-color-alert-note: #0969d9; + --light-color-alert-tip: #1a7f37; + --light-color-alert-important: #8250df; + --light-color-alert-warning: #9a6700; + --light-color-alert-caution: #cf222e; + + --light-external-icon: url("data:image/svg+xml;utf8,"); + --light-color-scheme: light; + + /* Dark */ + --dark-color-background: #2b2e33; + --dark-color-background-secondary: #1e2024; + --dark-color-background-warning: #bebe00; + --dark-color-warning-text: #222; + --dark-color-accent: #9096a2; + --dark-color-active-menu-item: #5d5d6a; + --dark-color-text: #f5f5f5; + --dark-color-text-aside: #dddddd; + + --dark-color-icon-background: var(--dark-color-background-secondary); + --dark-color-icon-text: var(--dark-color-text); + + --dark-color-comment-tag-text: var(--dark-color-text); + --dark-color-comment-tag: var(--dark-color-background); + + --dark-color-link: #00aff4; + --dark-color-focus-outline: #4c97f2; + + --dark-color-ts-keyword: #3399ff; + --dark-color-ts-project: #e358ff; + --dark-color-ts-module: var(--dark-color-ts-project); + --dark-color-ts-namespace: var(--dark-color-ts-project); + --dark-color-ts-enum: #f4d93e; + --dark-color-ts-enum-member: var(--dark-color-ts-enum); + --dark-color-ts-variable: #798dff; + --dark-color-ts-function: #a280ff; + --dark-color-ts-class: #8ac4ff; + --dark-color-ts-interface: #6cff87; + --dark-color-ts-constructor: var(--dark-color-ts-class); + --dark-color-ts-property: #ff984d; + --dark-color-ts-method: #ff4db8; + --dark-color-ts-reference: #ff4d82; + --dark-color-ts-call-signature: var(--dark-color-ts-method); + --dark-color-ts-index-signature: var(--dark-color-ts-property); + --dark-color-ts-constructor-signature: var(--dark-color-ts-constructor); + --dark-color-ts-parameter: var(--dark-color-ts-variable); + /* type literal not included as links will never be generated to it */ + --dark-color-ts-type-parameter: #e07d13; + --dark-color-ts-accessor: #ff6060; + --dark-color-ts-get-signature: var(--dark-color-ts-accessor); + --dark-color-ts-set-signature: var(--dark-color-ts-accessor); + --dark-color-ts-type-alias: #ff6492; + /* reference not included as links will be colored with the kind that it points to */ + --dark-color-document: #ffffff; + + --dark-color-alert-note: #0969d9; + --dark-color-alert-tip: #1a7f37; + --dark-color-alert-important: #8250df; + --dark-color-alert-warning: #9a6700; + --dark-color-alert-caution: #cf222e; + + --dark-external-icon: url("data:image/svg+xml;utf8,"); + --dark-color-scheme: dark; + } + + @media (prefers-color-scheme: light) { + :root { + --color-background: var(--light-color-background); + --color-background-secondary: var( + --light-color-background-secondary + ); + --color-background-warning: var(--light-color-background-warning); + --color-warning-text: var(--light-color-warning-text); + --color-accent: var(--light-color-accent); + --color-active-menu-item: var(--light-color-active-menu-item); + --color-text: var(--light-color-text); + --color-text-aside: var(--light-color-text-aside); + + --color-icon-background: var(--light-color-icon-background); + --color-icon-text: var(--light-color-icon-text); + + --color-comment-tag-text: var(--light-color-text); + --color-comment-tag: var(--light-color-background); + + --color-link: var(--light-color-link); + --color-focus-outline: var(--light-color-focus-outline); + + --color-ts-keyword: var(--light-color-ts-keyword); + --color-ts-project: var(--light-color-ts-project); + --color-ts-module: var(--light-color-ts-module); + --color-ts-namespace: var(--light-color-ts-namespace); + --color-ts-enum: var(--light-color-ts-enum); + --color-ts-enum-member: var(--light-color-ts-enum-member); + --color-ts-variable: var(--light-color-ts-variable); + --color-ts-function: var(--light-color-ts-function); + --color-ts-class: var(--light-color-ts-class); + --color-ts-interface: var(--light-color-ts-interface); + --color-ts-constructor: var(--light-color-ts-constructor); + --color-ts-property: var(--light-color-ts-property); + --color-ts-method: var(--light-color-ts-method); + --color-ts-reference: var(--light-color-ts-reference); + --color-ts-call-signature: var(--light-color-ts-call-signature); + --color-ts-index-signature: var(--light-color-ts-index-signature); + --color-ts-constructor-signature: var( + --light-color-ts-constructor-signature + ); + --color-ts-parameter: var(--light-color-ts-parameter); + --color-ts-type-parameter: var(--light-color-ts-type-parameter); + --color-ts-accessor: var(--light-color-ts-accessor); + --color-ts-get-signature: var(--light-color-ts-get-signature); + --color-ts-set-signature: var(--light-color-ts-set-signature); + --color-ts-type-alias: var(--light-color-ts-type-alias); + --color-document: var(--light-color-document); + + --color-alert-note: var(--light-color-alert-note); + --color-alert-tip: var(--light-color-alert-tip); + --color-alert-important: var(--light-color-alert-important); + --color-alert-warning: var(--light-color-alert-warning); + --color-alert-caution: var(--light-color-alert-caution); + + --external-icon: var(--light-external-icon); + --color-scheme: var(--light-color-scheme); + } + } + + @media (prefers-color-scheme: dark) { + :root { + --color-background: var(--dark-color-background); + --color-background-secondary: var( + --dark-color-background-secondary + ); + --color-background-warning: var(--dark-color-background-warning); + --color-warning-text: var(--dark-color-warning-text); + --color-accent: var(--dark-color-accent); + --color-active-menu-item: var(--dark-color-active-menu-item); + --color-text: var(--dark-color-text); + --color-text-aside: var(--dark-color-text-aside); + + --color-icon-background: var(--dark-color-icon-background); + --color-icon-text: var(--dark-color-icon-text); + + --color-comment-tag-text: var(--dark-color-text); + --color-comment-tag: var(--dark-color-background); + + --color-link: var(--dark-color-link); + --color-focus-outline: var(--dark-color-focus-outline); + + --color-ts-keyword: var(--dark-color-ts-keyword); + --color-ts-project: var(--dark-color-ts-project); + --color-ts-module: var(--dark-color-ts-module); + --color-ts-namespace: var(--dark-color-ts-namespace); + --color-ts-enum: var(--dark-color-ts-enum); + --color-ts-enum-member: var(--dark-color-ts-enum-member); + --color-ts-variable: var(--dark-color-ts-variable); + --color-ts-function: var(--dark-color-ts-function); + --color-ts-class: var(--dark-color-ts-class); + --color-ts-interface: var(--dark-color-ts-interface); + --color-ts-constructor: var(--dark-color-ts-constructor); + --color-ts-property: var(--dark-color-ts-property); + --color-ts-method: var(--dark-color-ts-method); + --color-ts-reference: var(--dark-color-ts-reference); + --color-ts-call-signature: var(--dark-color-ts-call-signature); + --color-ts-index-signature: var(--dark-color-ts-index-signature); + --color-ts-constructor-signature: var( + --dark-color-ts-constructor-signature + ); + --color-ts-parameter: var(--dark-color-ts-parameter); + --color-ts-type-parameter: var(--dark-color-ts-type-parameter); + --color-ts-accessor: var(--dark-color-ts-accessor); + --color-ts-get-signature: var(--dark-color-ts-get-signature); + --color-ts-set-signature: var(--dark-color-ts-set-signature); + --color-ts-type-alias: var(--dark-color-ts-type-alias); + --color-document: var(--dark-color-document); + + --color-alert-note: var(--dark-color-alert-note); + --color-alert-tip: var(--dark-color-alert-tip); + --color-alert-important: var(--dark-color-alert-important); + --color-alert-warning: var(--dark-color-alert-warning); + --color-alert-caution: var(--dark-color-alert-caution); + + --external-icon: var(--dark-external-icon); + --color-scheme: var(--dark-color-scheme); + } + } + + html { + color-scheme: var(--color-scheme); + } + + body { + margin: 0; + } + + :root[data-theme="light"] { + --color-background: var(--light-color-background); + --color-background-secondary: var(--light-color-background-secondary); + --color-background-warning: var(--light-color-background-warning); + --color-warning-text: var(--light-color-warning-text); + --color-icon-background: var(--light-color-icon-background); + --color-accent: var(--light-color-accent); + --color-active-menu-item: var(--light-color-active-menu-item); + --color-text: var(--light-color-text); + --color-text-aside: var(--light-color-text-aside); + --color-icon-text: var(--light-color-icon-text); + + --color-comment-tag-text: var(--light-color-text); + --color-comment-tag: var(--light-color-background); + + --color-link: var(--light-color-link); + --color-focus-outline: var(--light-color-focus-outline); + + --color-ts-keyword: var(--light-color-ts-keyword); + --color-ts-project: var(--light-color-ts-project); + --color-ts-module: var(--light-color-ts-module); + --color-ts-namespace: var(--light-color-ts-namespace); + --color-ts-enum: var(--light-color-ts-enum); + --color-ts-enum-member: var(--light-color-ts-enum-member); + --color-ts-variable: var(--light-color-ts-variable); + --color-ts-function: var(--light-color-ts-function); + --color-ts-class: var(--light-color-ts-class); + --color-ts-interface: var(--light-color-ts-interface); + --color-ts-constructor: var(--light-color-ts-constructor); + --color-ts-property: var(--light-color-ts-property); + --color-ts-method: var(--light-color-ts-method); + --color-ts-reference: var(--light-color-ts-reference); + --color-ts-call-signature: var(--light-color-ts-call-signature); + --color-ts-index-signature: var(--light-color-ts-index-signature); + --color-ts-constructor-signature: var( + --light-color-ts-constructor-signature + ); + --color-ts-parameter: var(--light-color-ts-parameter); + --color-ts-type-parameter: var(--light-color-ts-type-parameter); + --color-ts-accessor: var(--light-color-ts-accessor); + --color-ts-get-signature: var(--light-color-ts-get-signature); + --color-ts-set-signature: var(--light-color-ts-set-signature); + --color-ts-type-alias: var(--light-color-ts-type-alias); + --color-document: var(--light-color-document); + + --color-note: var(--light-color-note); + --color-tip: var(--light-color-tip); + --color-important: var(--light-color-important); + --color-warning: var(--light-color-warning); + --color-caution: var(--light-color-caution); + + --external-icon: var(--light-external-icon); + --color-scheme: var(--light-color-scheme); + } + + :root[data-theme="dark"] { + --color-background: var(--dark-color-background); + --color-background-secondary: var(--dark-color-background-secondary); + --color-background-warning: var(--dark-color-background-warning); + --color-warning-text: var(--dark-color-warning-text); + --color-icon-background: var(--dark-color-icon-background); + --color-accent: var(--dark-color-accent); + --color-active-menu-item: var(--dark-color-active-menu-item); + --color-text: var(--dark-color-text); + --color-text-aside: var(--dark-color-text-aside); + --color-icon-text: var(--dark-color-icon-text); + + --color-comment-tag-text: var(--dark-color-text); + --color-comment-tag: var(--dark-color-background); + + --color-link: var(--dark-color-link); + --color-focus-outline: var(--dark-color-focus-outline); + + --color-ts-keyword: var(--dark-color-ts-keyword); + --color-ts-project: var(--dark-color-ts-project); + --color-ts-module: var(--dark-color-ts-module); + --color-ts-namespace: var(--dark-color-ts-namespace); + --color-ts-enum: var(--dark-color-ts-enum); + --color-ts-enum-member: var(--dark-color-ts-enum-member); + --color-ts-variable: var(--dark-color-ts-variable); + --color-ts-function: var(--dark-color-ts-function); + --color-ts-class: var(--dark-color-ts-class); + --color-ts-interface: var(--dark-color-ts-interface); + --color-ts-constructor: var(--dark-color-ts-constructor); + --color-ts-property: var(--dark-color-ts-property); + --color-ts-method: var(--dark-color-ts-method); + --color-ts-reference: var(--dark-color-ts-reference); + --color-ts-call-signature: var(--dark-color-ts-call-signature); + --color-ts-index-signature: var(--dark-color-ts-index-signature); + --color-ts-constructor-signature: var( + --dark-color-ts-constructor-signature + ); + --color-ts-parameter: var(--dark-color-ts-parameter); + --color-ts-type-parameter: var(--dark-color-ts-type-parameter); + --color-ts-accessor: var(--dark-color-ts-accessor); + --color-ts-get-signature: var(--dark-color-ts-get-signature); + --color-ts-set-signature: var(--dark-color-ts-set-signature); + --color-ts-type-alias: var(--dark-color-ts-type-alias); + --color-document: var(--dark-color-document); + + --color-note: var(--dark-color-note); + --color-tip: var(--dark-color-tip); + --color-important: var(--dark-color-important); + --color-warning: var(--dark-color-warning); + --color-caution: var(--dark-color-caution); + + --external-icon: var(--dark-external-icon); + --color-scheme: var(--dark-color-scheme); + } + + *:focus-visible, + .tsd-accordion-summary:focus-visible svg { + outline: 2px solid var(--color-focus-outline); + } + + .always-visible, + .always-visible .tsd-signatures { + display: inherit !important; + } + + h1, + h2, + h3, + h4, + h5, + h6 { + line-height: 1.2; + } + + h1 { + font-size: 1.875rem; + margin: 0.67rem 0; + } + + h2 { + font-size: 1.5rem; + margin: 0.83rem 0; + } + + h3 { + font-size: 1.25rem; + margin: 1rem 0; + } + + h4 { + font-size: 1.05rem; + margin: 1.33rem 0; + } + + h5 { + font-size: 1rem; + margin: 1.5rem 0; + } + + h6 { + font-size: 0.875rem; + margin: 2.33rem 0; + } + + dl, + menu, + ol, + ul { + margin: 1em 0; + } + + dd { + margin: 0 0 0 34px; + } + + .container { + max-width: 1700px; + padding: 0 2rem; + } + + /* Footer */ + footer { + border-top: 1px solid var(--color-accent); + padding-top: 1rem; + padding-bottom: 1rem; + max-height: 3.5rem; + } + footer > p { + margin: 0 1em; + } + + .container-main { + margin: 0 auto; + /* toolbar, footer, margin */ + min-height: calc(100vh - 41px - 56px - 4rem); + } + + @keyframes fade-in { + from { + opacity: 0; + } + to { + opacity: 1; + } + } + @keyframes fade-out { + from { + opacity: 1; + visibility: visible; + } + to { + opacity: 0; + } + } + @keyframes fade-in-delayed { + 0% { + opacity: 0; + } + 33% { + opacity: 0; + } + 100% { + opacity: 1; + } + } + @keyframes fade-out-delayed { + 0% { + opacity: 1; + visibility: visible; + } + 66% { + opacity: 0; + } + 100% { + opacity: 0; + } + } + @keyframes pop-in-from-right { + from { + transform: translate(100%, 0); + } + to { + transform: translate(0, 0); + } + } + @keyframes pop-out-to-right { + from { + transform: translate(0, 0); + visibility: visible; + } + to { + transform: translate(100%, 0); + } + } + body { + background: var(--color-background); + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Noto Sans", + Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji"; + font-size: 16px; + color: var(--color-text); + } + + a { + color: var(--color-link); + text-decoration: none; + } + a:hover { + text-decoration: underline; + } + a.external[target="_blank"] { + background-image: var(--external-icon); + background-position: top 3px right; + background-repeat: no-repeat; + padding-right: 13px; + } + a.tsd-anchor-link { + color: var(--color-text); + } + + code, + pre { + font-family: Menlo, Monaco, Consolas, "Courier New", monospace; + padding: 0.2em; + margin: 0; + font-size: 0.875rem; + border-radius: 0.8em; + } + + pre { + position: relative; + white-space: pre-wrap; + word-wrap: break-word; + padding: 10px; + border: 1px solid var(--color-accent); + margin-bottom: 8px; + } + pre code { + padding: 0; + font-size: 100%; + } + pre > button { + position: absolute; + top: 10px; + right: 10px; + opacity: 0; + transition: opacity 0.1s; + box-sizing: border-box; + } + pre:hover > button, + pre > button.visible { + opacity: 1; + } + + blockquote { + margin: 1em 0; + padding-left: 1em; + border-left: 4px solid gray; + } + + .tsd-typography { + line-height: 1.333em; + } + .tsd-typography ul { + list-style: square; + padding: 0 0 0 20px; + margin: 0; + } + .tsd-typography .tsd-index-panel h3, + .tsd-index-panel .tsd-typography h3, + .tsd-typography h4, + .tsd-typography h5, + .tsd-typography h6 { + font-size: 1em; + } + .tsd-typography h5, + .tsd-typography h6 { + font-weight: normal; + } + .tsd-typography p, + .tsd-typography ul, + .tsd-typography ol { + margin: 1em 0; + } + .tsd-typography table { + border-collapse: collapse; + border: none; + } + .tsd-typography td, + .tsd-typography th { + padding: 6px 13px; + border: 1px solid var(--color-accent); + } + .tsd-typography thead, + .tsd-typography tr:nth-child(even) { + background-color: var(--color-background-secondary); + } + + .tsd-alert { + padding: 8px 16px; + margin-bottom: 16px; + border-left: 0.25em solid var(--alert-color); + } + .tsd-alert blockquote > :last-child, + .tsd-alert > :last-child { + margin-bottom: 0; + } + .tsd-alert-title { + color: var(--alert-color); + display: inline-flex; + align-items: center; + } + .tsd-alert-title span { + margin-left: 4px; + } + + .tsd-alert-note { + --alert-color: var(--color-alert-note); + } + .tsd-alert-tip { + --alert-color: var(--color-alert-tip); + } + .tsd-alert-important { + --alert-color: var(--color-alert-important); + } + .tsd-alert-warning { + --alert-color: var(--color-alert-warning); + } + .tsd-alert-caution { + --alert-color: var(--color-alert-caution); + } + + .tsd-breadcrumb { + margin: 0; + padding: 0; + color: var(--color-text-aside); + } + .tsd-breadcrumb a { + color: var(--color-text-aside); + text-decoration: none; + } + .tsd-breadcrumb a:hover { + text-decoration: underline; + } + .tsd-breadcrumb li { + display: inline; + } + .tsd-breadcrumb li:after { + content: " / "; + } + + .tsd-comment-tags { + display: flex; + flex-direction: column; + } + dl.tsd-comment-tag-group { + display: flex; + align-items: center; + overflow: hidden; + margin: 0.5em 0; + } + dl.tsd-comment-tag-group dt { + display: flex; + margin-right: 0.5em; + font-size: 0.875em; + font-weight: normal; + } + dl.tsd-comment-tag-group dd { + margin: 0; + } + code.tsd-tag { + padding: 0.25em 0.4em; + border: 0.1em solid var(--color-accent); + margin-right: 0.25em; + font-size: 70%; + } + h1 code.tsd-tag:first-of-type { + margin-left: 0.25em; + } + + dl.tsd-comment-tag-group dd:before, + dl.tsd-comment-tag-group dd:after { + content: " "; + } + dl.tsd-comment-tag-group dd pre, + dl.tsd-comment-tag-group dd:after { + clear: both; + } + dl.tsd-comment-tag-group p { + margin: 0; + } + + .tsd-panel.tsd-comment .lead { + font-size: 1.1em; + line-height: 1.333em; + margin-bottom: 2em; + } + .tsd-panel.tsd-comment .lead:last-child { + margin-bottom: 0; + } + + .tsd-filter-visibility h4 { + font-size: 1rem; + padding-top: 0.75rem; + padding-bottom: 0.5rem; + margin: 0; + } + .tsd-filter-item:not(:last-child) { + margin-bottom: 0.5rem; + } + .tsd-filter-input { + display: flex; + width: -moz-fit-content; + width: fit-content; + align-items: center; + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; + cursor: pointer; + } + .tsd-filter-input input[type="checkbox"] { + cursor: pointer; + position: absolute; + width: 1.5em; + height: 1.5em; + opacity: 0; + } + .tsd-filter-input input[type="checkbox"]:disabled { + pointer-events: none; + } + .tsd-filter-input svg { + cursor: pointer; + width: 1.5em; + height: 1.5em; + margin-right: 0.5em; + border-radius: 0.33em; + /* Leaving this at full opacity breaks event listeners on Firefox. + Don't remove unless you know what you're doing. */ + opacity: 0.99; + } + .tsd-filter-input input[type="checkbox"]:focus-visible + svg { + outline: 2px solid var(--color-focus-outline); + } + .tsd-checkbox-background { + fill: var(--color-accent); + } + input[type="checkbox"]:checked ~ svg .tsd-checkbox-checkmark { + stroke: var(--color-text); + } + .tsd-filter-input input:disabled ~ svg > .tsd-checkbox-background { + fill: var(--color-background); + stroke: var(--color-accent); + stroke-width: 0.25rem; + } + .tsd-filter-input input:disabled ~ svg > .tsd-checkbox-checkmark { + stroke: var(--color-accent); + } + + .settings-label { + font-weight: bold; + text-transform: uppercase; + display: inline-block; + } + + .tsd-filter-visibility .settings-label { + margin: 0.75rem 0 0.5rem 0; + } + + .tsd-theme-toggle .settings-label { + margin: 0.75rem 0.75rem 0 0; + } + + .tsd-hierarchy h4 label:hover span { + text-decoration: underline; + } + + .tsd-hierarchy { + list-style: square; + margin: 0; + } + .tsd-hierarchy-target { + font-weight: bold; + } + .tsd-hierarchy-toggle { + color: var(--color-link); + cursor: pointer; + } + + .tsd-full-hierarchy:not(:last-child) { + margin-bottom: 1em; + padding-bottom: 1em; + border-bottom: 1px solid var(--color-accent); + } + .tsd-full-hierarchy, + .tsd-full-hierarchy ul { + list-style: none; + margin: 0; + padding: 0; + } + .tsd-full-hierarchy ul { + padding-left: 1.5rem; + } + .tsd-full-hierarchy a { + padding: 0.25rem 0 !important; + font-size: 1rem; + display: inline-flex; + align-items: center; + color: var(--color-text); + } + .tsd-full-hierarchy svg[data-dropdown] { + cursor: pointer; + } + .tsd-full-hierarchy svg[data-dropdown="false"] { + transform: rotate(-90deg); + } + .tsd-full-hierarchy svg[data-dropdown="false"] ~ ul { + display: none; + } + + .tsd-panel-group.tsd-index-group { + margin-bottom: 0; + } + .tsd-index-panel .tsd-index-list { + list-style: none; + line-height: 1.333em; + margin: 0; + padding: 0.25rem 0 0 0; + overflow: hidden; + display: grid; + grid-template-columns: repeat(3, 1fr); + column-gap: 1rem; + grid-template-rows: auto; + } + @media (max-width: 1024px) { + .tsd-index-panel .tsd-index-list { + grid-template-columns: repeat(2, 1fr); + } + } + @media (max-width: 768px) { + .tsd-index-panel .tsd-index-list { + grid-template-columns: repeat(1, 1fr); + } + } + .tsd-index-panel .tsd-index-list li { + -webkit-page-break-inside: avoid; + -moz-page-break-inside: avoid; + -ms-page-break-inside: avoid; + -o-page-break-inside: avoid; + page-break-inside: avoid; + } + + .tsd-flag { + display: inline-block; + padding: 0.25em 0.4em; + border-radius: 4px; + color: var(--color-comment-tag-text); + background-color: var(--color-comment-tag); + text-indent: 0; + font-size: 75%; + line-height: 1; + font-weight: normal; + } + + .tsd-anchor { + position: relative; + top: -100px; + } + + .tsd-member { + position: relative; + } + .tsd-member .tsd-anchor + h3 { + display: flex; + align-items: center; + margin-top: 0; + margin-bottom: 0; + border-bottom: none; + } + + .tsd-navigation.settings { + margin: 1rem 0; + } + .tsd-navigation > a, + .tsd-navigation .tsd-accordion-summary { + width: calc(100% - 0.25rem); + display: flex; + align-items: center; + } + .tsd-navigation a, + .tsd-navigation summary > span, + .tsd-page-navigation a { + display: flex; + width: calc(100% - 0.25rem); + align-items: center; + padding: 0.25rem; + color: var(--color-text); + text-decoration: none; + box-sizing: border-box; + } + .tsd-navigation a.current, + .tsd-page-navigation a.current { + background: var(--color-active-menu-item); + } + .tsd-navigation a:hover, + .tsd-page-navigation a:hover { + text-decoration: underline; + } + .tsd-navigation ul, + .tsd-page-navigation ul { + margin-top: 0; + margin-bottom: 0; + padding: 0; + list-style: none; + } + .tsd-navigation li, + .tsd-page-navigation li { + padding: 0; + max-width: 100%; + } + .tsd-navigation .tsd-nav-link { + display: none; + } + .tsd-nested-navigation { + margin-left: 3rem; + } + .tsd-nested-navigation > li > details { + margin-left: -1.5rem; + } + .tsd-small-nested-navigation { + margin-left: 1.5rem; + } + .tsd-small-nested-navigation > li > details { + margin-left: -1.5rem; + } + + .tsd-page-navigation-section { + margin-left: 10px; + } + .tsd-page-navigation-section > summary { + padding: 0.25rem; + } + .tsd-page-navigation-section > div { + margin-left: 20px; + } + .tsd-page-navigation ul { + padding-left: 1.75rem; + } + + #tsd-sidebar-links a { + margin-top: 0; + margin-bottom: 0.5rem; + line-height: 1.25rem; + } + #tsd-sidebar-links a:last-of-type { + margin-bottom: 0; + } + + a.tsd-index-link { + padding: 0.25rem 0 !important; + font-size: 1rem; + line-height: 1.25rem; + display: inline-flex; + align-items: center; + color: var(--color-text); + } + .tsd-accordion-summary { + list-style-type: none; /* hide marker on non-safari */ + outline: none; /* broken on safari, so just hide it */ + } + .tsd-accordion-summary::-webkit-details-marker { + display: none; /* hide marker on safari */ + } + .tsd-accordion-summary, + .tsd-accordion-summary a { + -moz-user-select: none; + -webkit-user-select: none; + -ms-user-select: none; + user-select: none; + + cursor: pointer; + } + .tsd-accordion-summary a { + width: calc(100% - 1.5rem); + } + .tsd-accordion-summary > * { + margin-top: 0; + margin-bottom: 0; + padding-top: 0; + padding-bottom: 0; + } + .tsd-accordion .tsd-accordion-summary > svg { + margin-left: 0.25rem; + vertical-align: text-top; + } + /* + * We need to be careful to target the arrow indicating whether the accordion + * is open, but not any other SVGs included in the details element. + */ + .tsd-accordion:not([open]) > .tsd-accordion-summary > svg:first-child, + .tsd-accordion:not([open]) > .tsd-accordion-summary > h1 > svg:first-child, + .tsd-accordion:not([open]) > .tsd-accordion-summary > h2 > svg:first-child, + .tsd-accordion:not([open]) > .tsd-accordion-summary > h3 > svg:first-child, + .tsd-accordion:not([open]) > .tsd-accordion-summary > h4 > svg:first-child, + .tsd-accordion:not([open]) > .tsd-accordion-summary > h5 > svg:first-child { + transform: rotate(-90deg); + } + .tsd-index-content > :not(:first-child) { + margin-top: 0.75rem; + } + .tsd-index-heading { + margin-top: 1.5rem; + margin-bottom: 0.75rem; + } + + .tsd-no-select { + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; + } + .tsd-kind-icon { + margin-right: 0.5rem; + width: 1.25rem; + height: 1.25rem; + min-width: 1.25rem; + min-height: 1.25rem; + } + .tsd-signature > .tsd-kind-icon { + margin-right: 0.8rem; + } + + .tsd-panel { + margin-bottom: 2.5rem; + } + .tsd-panel.tsd-member { + margin-bottom: 4rem; + } + .tsd-panel:empty { + display: none; + } + .tsd-panel > h1, + .tsd-panel > h2, + .tsd-panel > h3 { + margin: 1.5rem -1.5rem 0.75rem -1.5rem; + padding: 0 1.5rem 0.75rem 1.5rem; + } + .tsd-panel > h1.tsd-before-signature, + .tsd-panel > h2.tsd-before-signature, + .tsd-panel > h3.tsd-before-signature { + margin-bottom: 0; + border-bottom: none; + } + + .tsd-panel-group { + margin: 2rem 0; + } + .tsd-panel-group.tsd-index-group { + margin: 2rem 0; + } + .tsd-panel-group.tsd-index-group details { + margin: 2rem 0; + } + .tsd-panel-group > .tsd-accordion-summary { + margin-bottom: 1rem; + } + + #tsd-search { + transition: background-color 0.2s; + } + #tsd-search .title { + position: relative; + z-index: 2; + } + #tsd-search .field { + position: absolute; + left: 0; + top: 0; + right: 2.5rem; + height: 100%; + } + #tsd-search .field input { + box-sizing: border-box; + position: relative; + top: -50px; + z-index: 1; + width: 100%; + padding: 0 10px; + opacity: 0; + outline: 0; + border: 0; + background: transparent; + color: var(--color-text); + } + #tsd-search .field label { + position: absolute; + overflow: hidden; + right: -40px; + } + #tsd-search .field input, + #tsd-search .title, + #tsd-toolbar-links a { + transition: opacity 0.2s; + } + #tsd-search .results { + position: absolute; + visibility: hidden; + top: 40px; + width: 100%; + margin: 0; + padding: 0; + list-style: none; + box-shadow: 0 0 4px rgba(0, 0, 0, 0.25); + } + #tsd-search .results li { + background-color: var(--color-background); + line-height: initial; + padding: 4px; + } + #tsd-search .results li:nth-child(even) { + background-color: var(--color-background-secondary); + } + #tsd-search .results li.state { + display: none; + } + #tsd-search .results li.current:not(.no-results), + #tsd-search .results li:hover:not(.no-results) { + background-color: var(--color-accent); + } + #tsd-search .results a { + display: flex; + align-items: center; + padding: 0.25rem; + box-sizing: border-box; + } + #tsd-search .results a:before { + top: 10px; + } + #tsd-search .results span.parent { + color: var(--color-text-aside); + font-weight: normal; + } + #tsd-search.has-focus { + background-color: var(--color-accent); + } + #tsd-search.has-focus .field input { + top: 0; + opacity: 1; + } + #tsd-search.has-focus .title, + #tsd-search.has-focus #tsd-toolbar-links a { + z-index: 0; + opacity: 0; + } + #tsd-search.has-focus .results { + visibility: visible; + } + #tsd-search.loading .results li.state.loading { + display: block; + } + #tsd-search.failure .results li.state.failure { + display: block; + } + + #tsd-toolbar-links { + position: absolute; + top: 0; + right: 2rem; + height: 100%; + display: flex; + align-items: center; + justify-content: flex-end; + } + #tsd-toolbar-links a { + margin-left: 1.5rem; + } + #tsd-toolbar-links a:hover { + text-decoration: underline; + } + + .tsd-signature { + margin: 0 0 1rem 0; + padding: 1rem 0.5rem; + border: 1px solid var(--color-accent); + font-family: Menlo, Monaco, Consolas, "Courier New", monospace; + font-size: 14px; + overflow-x: auto; + } + + .tsd-signature-keyword { + color: var(--color-ts-keyword); + font-weight: normal; + } + + .tsd-signature-symbol { + color: var(--color-text-aside); + font-weight: normal; + } + + .tsd-signature-type { + font-style: italic; + font-weight: normal; + } + + .tsd-signatures { + padding: 0; + margin: 0 0 1em 0; + list-style-type: none; + } + .tsd-signatures .tsd-signature { + margin: 0; + border-color: var(--color-accent); + border-width: 1px 0; + transition: background-color 0.1s; + } + .tsd-signatures .tsd-index-signature:not(:last-child) { + margin-bottom: 1em; + } + .tsd-signatures .tsd-index-signature .tsd-signature { + border-width: 1px; + } + .tsd-description .tsd-signatures .tsd-signature { + border-width: 1px; + } + + ul.tsd-parameter-list, + ul.tsd-type-parameter-list { + list-style: square; + margin: 0; + padding-left: 20px; + } + ul.tsd-parameter-list > li.tsd-parameter-signature, + ul.tsd-type-parameter-list > li.tsd-parameter-signature { + list-style: none; + margin-left: -20px; + } + ul.tsd-parameter-list h5, + ul.tsd-type-parameter-list h5 { + font-size: 16px; + margin: 1em 0 0.5em 0; + } + .tsd-sources { + margin-top: 1rem; + font-size: 0.875em; + } + .tsd-sources a { + color: var(--color-text-aside); + text-decoration: underline; + } + .tsd-sources ul { + list-style: none; + padding: 0; + } + + .tsd-page-toolbar { + position: sticky; + z-index: 1; + top: 0; + left: 0; + width: 100%; + color: var(--color-text); + background: var(--color-background-secondary); + border-bottom: 1px var(--color-accent) solid; + transition: transform 0.3s ease-in-out; + } + .tsd-page-toolbar a { + color: var(--color-text); + text-decoration: none; + } + .tsd-page-toolbar a.title { + font-weight: bold; + } + .tsd-page-toolbar a.title:hover { + text-decoration: underline; + } + .tsd-page-toolbar .tsd-toolbar-contents { + display: flex; + justify-content: space-between; + height: 2.5rem; + margin: 0 auto; + } + .tsd-page-toolbar .table-cell { + position: relative; + white-space: nowrap; + line-height: 40px; + } + .tsd-page-toolbar .table-cell:first-child { + width: 100%; + } + .tsd-page-toolbar .tsd-toolbar-icon { + box-sizing: border-box; + line-height: 0; + padding: 12px 0; + } + + .tsd-widget { + display: inline-block; + overflow: hidden; + opacity: 0.8; + height: 40px; + transition: + opacity 0.1s, + background-color 0.2s; + vertical-align: bottom; + cursor: pointer; + } + .tsd-widget:hover { + opacity: 0.9; + } + .tsd-widget.active { + opacity: 1; + background-color: var(--color-accent); + } + .tsd-widget.no-caption { + width: 40px; + } + .tsd-widget.no-caption:before { + margin: 0; + } + + .tsd-widget.options, + .tsd-widget.menu { + display: none; + } + input[type="checkbox"] + .tsd-widget:before { + background-position: -120px 0; + } + input[type="checkbox"]:checked + .tsd-widget:before { + background-position: -160px 0; + } + + img { + max-width: 100%; + } + + .tsd-member-summary-name { + display: inline-flex; + align-items: center; + padding: 0.25rem; + text-decoration: none; + } + + .tsd-anchor-icon { + display: inline-flex; + align-items: center; + margin-left: 0.5rem; + color: var(--color-text); + } + + .tsd-anchor-icon svg { + width: 1em; + height: 1em; + visibility: hidden; + } + + .tsd-member-summary-name:hover > .tsd-anchor-icon svg, + .tsd-anchor-link:hover > .tsd-anchor-icon svg { + visibility: visible; + } + + .deprecated { + text-decoration: line-through !important; + } + + .warning { + padding: 1rem; + color: var(--color-warning-text); + background: var(--color-background-warning); + } + + .tsd-kind-project { + color: var(--color-ts-project); + } + .tsd-kind-module { + color: var(--color-ts-module); + } + .tsd-kind-namespace { + color: var(--color-ts-namespace); + } + .tsd-kind-enum { + color: var(--color-ts-enum); + } + .tsd-kind-enum-member { + color: var(--color-ts-enum-member); + } + .tsd-kind-variable { + color: var(--color-ts-variable); + } + .tsd-kind-function { + color: var(--color-ts-function); + } + .tsd-kind-class { + color: var(--color-ts-class); + } + .tsd-kind-interface { + color: var(--color-ts-interface); + } + .tsd-kind-constructor { + color: var(--color-ts-constructor); + } + .tsd-kind-property { + color: var(--color-ts-property); + } + .tsd-kind-method { + color: var(--color-ts-method); + } + .tsd-kind-reference { + color: var(--color-ts-reference); + } + .tsd-kind-call-signature { + color: var(--color-ts-call-signature); + } + .tsd-kind-index-signature { + color: var(--color-ts-index-signature); + } + .tsd-kind-constructor-signature { + color: var(--color-ts-constructor-signature); + } + .tsd-kind-parameter { + color: var(--color-ts-parameter); + } + .tsd-kind-type-parameter { + color: var(--color-ts-type-parameter); + } + .tsd-kind-accessor { + color: var(--color-ts-accessor); + } + .tsd-kind-get-signature { + color: var(--color-ts-get-signature); + } + .tsd-kind-set-signature { + color: var(--color-ts-set-signature); + } + .tsd-kind-type-alias { + color: var(--color-ts-type-alias); + } + + /* if we have a kind icon, don't color the text by kind */ + .tsd-kind-icon ~ span { + color: var(--color-text); + } + + * { + scrollbar-width: thin; + scrollbar-color: var(--color-accent) var(--color-icon-background); + } + + *::-webkit-scrollbar { + width: 0.75rem; + } + + *::-webkit-scrollbar-track { + background: var(--color-icon-background); + } + + *::-webkit-scrollbar-thumb { + background-color: var(--color-accent); + border-radius: 999rem; + border: 0.25rem solid var(--color-icon-background); + } + + /* mobile */ + @media (max-width: 769px) { + .tsd-widget.options, + .tsd-widget.menu { + display: inline-block; + } + + .container-main { + display: flex; + } + html .col-content { + float: none; + max-width: 100%; + width: 100%; + } + html .col-sidebar { + position: fixed !important; + overflow-y: auto; + -webkit-overflow-scrolling: touch; + z-index: 1024; + top: 0 !important; + bottom: 0 !important; + left: auto !important; + right: 0 !important; + padding: 1.5rem 1.5rem 0 0; + width: 75vw; + visibility: hidden; + background-color: var(--color-background); + transform: translate(100%, 0); + } + html .col-sidebar > *:last-child { + padding-bottom: 20px; + } + html .overlay { + content: ""; + display: block; + position: fixed; + z-index: 1023; + top: 0; + left: 0; + right: 0; + bottom: 0; + background-color: rgba(0, 0, 0, 0.75); + visibility: hidden; + } + + .to-has-menu .overlay { + animation: fade-in 0.4s; + } + + .to-has-menu .col-sidebar { + animation: pop-in-from-right 0.4s; + } + + .from-has-menu .overlay { + animation: fade-out 0.4s; + } + + .from-has-menu .col-sidebar { + animation: pop-out-to-right 0.4s; + } + + .has-menu body { + overflow: hidden; + } + .has-menu .overlay { + visibility: visible; + } + .has-menu .col-sidebar { + visibility: visible; + transform: translate(0, 0); + display: flex; + flex-direction: column; + gap: 1.5rem; + max-height: 100vh; + padding: 1rem 2rem; + } + .has-menu .tsd-navigation { + max-height: 100%; + } + #tsd-toolbar-links { + display: none; + } + .tsd-navigation .tsd-nav-link { + display: flex; + } + } + + /* one sidebar */ + @media (min-width: 770px) { + .container-main { + display: grid; + grid-template-columns: minmax(0, 1fr) minmax(0, 2fr); + grid-template-areas: "sidebar content"; + margin: 2rem auto; + } + + .col-sidebar { + grid-area: sidebar; + } + .col-content { + grid-area: content; + padding: 0 1rem; + } + } + @media (min-width: 770px) and (max-width: 1399px) { + .col-sidebar { + max-height: calc(100vh - 2rem - 42px); + overflow: auto; + position: sticky; + top: 42px; + padding-top: 1rem; + } + .site-menu { + margin-top: 1rem; + } + } + + /* two sidebars */ + @media (min-width: 1200px) { + .container-main { + grid-template-columns: minmax(0, 1fr) minmax(0, 2.5fr) minmax( + 0, + 20rem + ); + grid-template-areas: "sidebar content toc"; + } + + .col-sidebar { + display: contents; + } + + .page-menu { + grid-area: toc; + padding-left: 1rem; + } + .site-menu { + grid-area: sidebar; + } + + .site-menu { + margin-top: 1rem; + } + + .page-menu, + .site-menu { + max-height: calc(100vh - 2rem - 42px); + overflow: auto; + position: sticky; + top: 42px; + } + } +} diff --git a/node_modules/typedoc/tsdoc.json b/node_modules/typedoc/tsdoc.json new file mode 100644 index 0000000..3bb27a5 --- /dev/null +++ b/node_modules/typedoc/tsdoc.json @@ -0,0 +1,213 @@ +{ + "$schema": "https://developer.microsoft.com/json-schemas/tsdoc/v0/tsdoc.schema.json", + // If updating this, also update tsdoc-defaults.ts + "noStandardTags": false, + "tagDefinitions": [ + { + "tagName": "@author", + "syntaxKind": "block" + }, + { + "tagName": "@module", + "syntaxKind": "block" + }, + { + "tagName": "@mergeModuleWith", + "syntaxKind": "block" + }, + { + "tagName": "@type", + "syntaxKind": "block" + }, + { + "tagName": "@typedef", + "syntaxKind": "block" + }, + { + "tagName": "@callback", + "syntaxKind": "block" + }, + { + "tagName": "@prop", + "syntaxKind": "block", + "allowMultiple": true + }, + { + "tagName": "@property", + "syntaxKind": "block", + "allowMultiple": true + }, + // Don't include @inheritDoc, because the @microsoft/tsdoc-config parser blows up + // if the standard @inheritDoc inline tag is also defined here. + { + "tagName": "@group", + "syntaxKind": "block", + "allowMultiple": true + }, + { + "tagName": "@groupDescription", + "syntaxKind": "block", + "allowMultiple": true + }, + { + "tagName": "@category", + "syntaxKind": "block", + "allowMultiple": true + }, + { + "tagName": "@categoryDescription", + "syntaxKind": "block", + "allowMultiple": true + }, + { + "tagName": "@hidden", + "syntaxKind": "modifier" + }, + { + "tagName": "@ignore", + "syntaxKind": "modifier" + }, + { + "tagName": "@class", + "syntaxKind": "modifier" + }, + { + "tagName": "@abstract", + "syntaxKind": "modifier" + }, + { + "tagName": "@document", + "syntaxKind": "block" + }, + { + "tagName": "@default", + "syntaxKind": "block" + }, + { + "tagName": "@extends", + "syntaxKind": "block" + }, + { + "tagName": "@augments", + "syntaxKind": "block" + }, + { + // TSDoc defines @returns, we also recognize @return for JSDoc compat + "tagName": "@return", + "syntaxKind": "block" + }, + { + "tagName": "@yields", + "syntaxKind": "block" + }, + { + "tagName": "@enum", + "syntaxKind": "modifier" + }, + { + "tagName": "@event", + "syntaxKind": "modifier" + }, + { + "tagName": "@expand", + "syntaxKind": "modifier" + }, + { + "tagName": "@inline", + "syntaxKind": "modifier" + }, + { + "tagName": "@template", + "syntaxKind": "block", + "allowMultiple": true + }, + { + "tagName": "@linkcode", + "syntaxKind": "inline", + "allowMultiple": true + }, + { + "tagName": "@linkplain", + "syntaxKind": "inline", + "allowMultiple": true + }, + { + "tagName": "@include", + "syntaxKind": "inline", + "allowMultiple": true + }, + { + "tagName": "@includeCode", + "syntaxKind": "inline", + "allowMultiple": true + }, + { + "tagName": "@private", + "syntaxKind": "modifier" + }, + { + "tagName": "@protected", + "syntaxKind": "modifier" + }, + { + "tagName": "@satisfies", + "syntaxKind": "block" + }, + { + "tagName": "@since", + "syntaxKind": "block" + }, + { + "tagName": "@license", + "syntaxKind": "block" + }, + { + "tagName": "@import", + "syntaxKind": "block" + }, + { + "tagName": "@overload", + "syntaxKind": "modifier" + }, + { + "tagName": "@namespace", + "syntaxKind": "modifier" + }, + { + "tagName": "@interface", + "syntaxKind": "modifier" + }, + { + "tagName": "@showCategories", + "syntaxKind": "modifier" + }, + { + "tagName": "@hideCategories", + "syntaxKind": "modifier" + }, + { + "tagName": "@showGroups", + "syntaxKind": "modifier" + }, + { + "tagName": "@hideGroups", + "syntaxKind": "modifier" + }, + { + "tagName": "@hideconstructor", + "syntaxKind": "modifier" + }, + { + "tagName": "@jsx", + "syntaxKind": "block" + }, + { + "tagName": "@summary", + "syntaxKind": "block" + }, + { + "tagName": "@useDeclaredType", + "syntaxKind": "modifier" + } + ] +} diff --git a/node_modules/typescript/LICENSE.txt b/node_modules/typescript/LICENSE.txt new file mode 100644 index 0000000..edc24fd --- /dev/null +++ b/node_modules/typescript/LICENSE.txt @@ -0,0 +1,55 @@ +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS diff --git a/node_modules/typescript/README.md b/node_modules/typescript/README.md new file mode 100644 index 0000000..50d5118 --- /dev/null +++ b/node_modules/typescript/README.md @@ -0,0 +1,50 @@ + +# TypeScript + +[![GitHub Actions CI](https://github.com/microsoft/TypeScript/workflows/CI/badge.svg)](https://github.com/microsoft/TypeScript/actions?query=workflow%3ACI) +[![npm version](https://badge.fury.io/js/typescript.svg)](https://www.npmjs.com/package/typescript) +[![Downloads](https://img.shields.io/npm/dm/typescript.svg)](https://www.npmjs.com/package/typescript) +[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/microsoft/TypeScript/badge)](https://securityscorecards.dev/viewer/?uri=github.com/microsoft/TypeScript) + + +[TypeScript](https://www.typescriptlang.org/) is a language for application-scale JavaScript. TypeScript adds optional types to JavaScript that support tools for large-scale JavaScript applications for any browser, for any host, on any OS. TypeScript compiles to readable, standards-based JavaScript. Try it out at the [playground](https://www.typescriptlang.org/play/), and stay up to date via [our blog](https://blogs.msdn.microsoft.com/typescript) and [Twitter account](https://twitter.com/typescript). + +Find others who are using TypeScript at [our community page](https://www.typescriptlang.org/community/). + +## Installing + +For the latest stable version: + +```bash +npm install -D typescript +``` + +For our nightly builds: + +```bash +npm install -D typescript@next +``` + +## Contribute + +There are many ways to [contribute](https://github.com/microsoft/TypeScript/blob/main/CONTRIBUTING.md) to TypeScript. +* [Submit bugs](https://github.com/microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](https://stackoverflow.com/questions/tagged/typescript). +* Help each other in the [TypeScript Community Discord](https://discord.gg/typescript). +* Join the [#typescript](https://twitter.com/search?q=%23TypeScript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/microsoft/TypeScript/blob/main/CONTRIBUTING.md). + +This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see +the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) +with any additional questions or comments. + +## Documentation + +* [TypeScript in 5 minutes](https://www.typescriptlang.org/docs/handbook/typescript-in-5-minutes.html) +* [Programming handbook](https://www.typescriptlang.org/docs/handbook/intro.html) +* [Homepage](https://www.typescriptlang.org/) + +## Roadmap + +For details on our planned features and future direction, please refer to our [roadmap](https://github.com/microsoft/TypeScript/wiki/Roadmap). diff --git a/node_modules/typescript/SECURITY.md b/node_modules/typescript/SECURITY.md new file mode 100644 index 0000000..b3c89ef --- /dev/null +++ b/node_modules/typescript/SECURITY.md @@ -0,0 +1,41 @@ + + +## Security + +Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet) and [Xamarin](https://github.com/xamarin). + +If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/security.md/definition), please report it to us as described below. + +## Reporting Security Issues + +**Please do not report security vulnerabilities through public GitHub issues.** + +Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/security.md/msrc/create-report). + +If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/security.md/msrc/pgp). + +You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc). + +Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: + + * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) + * Full paths of source file(s) related to the manifestation of the issue + * The location of the affected source code (tag/branch/commit or direct URL) + * Any special configuration required to reproduce the issue + * Step-by-step instructions to reproduce the issue + * Proof-of-concept or exploit code (if possible) + * Impact of the issue, including how an attacker might exploit the issue + +This information will help us triage your report more quickly. + +If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/security.md/msrc/bounty) page for more details about our active programs. + +## Preferred Languages + +We prefer all communications to be in English. + +## Policy + +Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/security.md/cvd). + + diff --git a/node_modules/typescript/ThirdPartyNoticeText.txt b/node_modules/typescript/ThirdPartyNoticeText.txt new file mode 100644 index 0000000..b670746 --- /dev/null +++ b/node_modules/typescript/ThirdPartyNoticeText.txt @@ -0,0 +1,193 @@ +/*!----------------- TypeScript ThirdPartyNotices ------------------------------------------------------- + +The TypeScript software incorporates third party material from the projects listed below. The original copyright notice and the license under which Microsoft received such third party material are set forth below. Microsoft reserves all other rights not expressly granted, whether by implication, estoppel or otherwise. + +--------------------------------------------- +Third Party Code Components +-------------------------------------------- + +------------------- DefinitelyTyped -------------------- +This file is based on or incorporates material from the projects listed below (collectively "Third Party Code"). Microsoft is not the original author of the Third Party Code. The original copyright notice and the license, under which Microsoft received such Third Party Code, are set forth below. Such licenses and notices are provided for informational purposes only. Microsoft, not the third party, licenses the Third Party Code to you under the terms set forth in the EULA for the Microsoft Product. Microsoft reserves all other rights not expressly granted under this agreement, whether by implication, estoppel or otherwise. +DefinitelyTyped +This project is licensed under the MIT license. Copyrights are respective of each contributor listed at the beginning of each definition file. Provided for Informational Purposes Only + +MIT License +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the ""Software""), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +-------------------------------------------------------------------------------------- + +------------------- Unicode -------------------- +UNICODE, INC. LICENSE AGREEMENT - DATA FILES AND SOFTWARE + +Unicode Data Files include all data files under the directories +http://www.unicode.org/Public/, http://www.unicode.org/reports/, +http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and +http://www.unicode.org/utility/trac/browser/. + +Unicode Data Files do not include PDF online code charts under the +directory http://www.unicode.org/Public/. + +Software includes any source code published in the Unicode Standard +or under the directories +http://www.unicode.org/Public/, http://www.unicode.org/reports/, +http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and +http://www.unicode.org/utility/trac/browser/. + +NOTICE TO USER: Carefully read the following legal agreement. +BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S +DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"), +YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE +TERMS AND CONDITIONS OF THIS AGREEMENT. +IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE +THE DATA FILES OR SOFTWARE. + +COPYRIGHT AND PERMISSION NOTICE + +Copyright (c) 1991-2017 Unicode, Inc. All rights reserved. +Distributed under the Terms of Use in http://www.unicode.org/copyright.html. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Unicode data files and any associated documentation +(the "Data Files") or Unicode software and any associated documentation +(the "Software") to deal in the Data Files or Software +without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, and/or sell copies of +the Data Files or Software, and to permit persons to whom the Data Files +or Software are furnished to do so, provided that either +(a) this copyright and permission notice appear with all copies +of the Data Files or Software, or +(b) this copyright and permission notice appear in associated +Documentation. + +THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS +NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL +DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THE DATA FILES OR SOFTWARE. + +Except as contained in this notice, the name of a copyright holder +shall not be used in advertising or otherwise to promote the sale, +use or other dealings in these Data Files or Software without prior +written authorization of the copyright holder. +------------------------------------------------------------------------------------- + +-------------------Document Object Model----------------------------- +DOM + +W3C License +This work is being provided by the copyright holders under the following license. +By obtaining and/or copying this work, you (the licensee) agree that you have read, understood, and will comply with the following terms and conditions. +Permission to copy, modify, and distribute this work, with or without modification, for any purpose and without fee or royalty is hereby granted, provided that you include the following +on ALL copies of the work or portions thereof, including modifications: +* The full text of this NOTICE in a location viewable to users of the redistributed or derivative work. +* Any pre-existing intellectual property disclaimers, notices, or terms and conditions. If none exist, the W3C Software and Document Short Notice should be included. +* Notice of any changes or modifications, through a copyright statement on the new code or document such as "This software or document includes material copied from or derived +from [title and URI of the W3C document]. Copyright © [YEAR] W3C® (MIT, ERCIM, Keio, Beihang)." +Disclaimers +THIS WORK IS PROVIDED "AS IS," AND COPYRIGHT HOLDERS MAKE NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO, WARRANTIES OF MERCHANTABILITY OR +FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE OR DOCUMENT WILL NOT INFRINGE ANY THIRD PARTY PATENTS, COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS. +COPYRIGHT HOLDERS WILL NOT BE LIABLE FOR ANY DIRECT, INDIRECT, SPECIAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF ANY USE OF THE SOFTWARE OR DOCUMENT. +The name and trademarks of copyright holders may NOT be used in advertising or publicity pertaining to the work without specific, written prior permission. +Title to copyright in this work will at all times remain with copyright holders. + +--------- + +DOM +Copyright © 2018 WHATWG (Apple, Google, Mozilla, Microsoft). This work is licensed under a Creative Commons Attribution 4.0 International License: Attribution 4.0 International +======================================================================= +Creative Commons Corporation ("Creative Commons") is not a law firm and does not provide legal services or legal advice. Distribution of Creative Commons public licenses does not create a lawyer-client or other relationship. Creative Commons makes its licenses and related information available on an "as-is" basis. Creative Commons gives no warranties regarding its licenses, any material licensed under their terms and conditions, or any related information. Creative Commons disclaims all liability for damages resulting from their use to the fullest extent possible. Using Creative Commons Public Licenses Creative Commons public licenses provide a standard set of terms and conditions that creators and other rights holders may use to share original works of authorship and other material subject to copyright and certain other rights specified in the public license below. The following considerations are for informational purposes only, are not exhaustive, and do not form part of our licenses. Considerations for licensors: Our public licenses are intended for use by those authorized to give the public permission to use material in ways otherwise restricted by copyright and certain other rights. Our licenses are irrevocable. Licensors should read and understand the terms and conditions of the license they choose before applying it. Licensors should also secure all rights necessary before applying our licenses so that the public can reuse the material as expected. Licensors should clearly mark any material not subject to the license. This includes other CC- licensed material, or material used under an exception or limitation to copyright. More considerations for licensors: + +wiki.creativecommons.org/Considerations_for_licensors Considerations for the public: By using one of our public licenses, a licensor grants the public permission to use the licensed material under specified terms and conditions. If the licensor's permission is not necessary for any reason--for example, because of any applicable exception or limitation to copyright--then that use is not regulated by the license. Our licenses grant only permissions under copyright and certain other rights that a licensor has authority to grant. Use of the licensed material may still be restricted for other reasons, including because others have copyright or other rights in the material. A licensor may make special requests, such as asking that all changes be marked or described. Although not required by our licenses, you are encouraged to respect those requests where reasonable. More_considerations for the public: wiki.creativecommons.org/Considerations_for_licensees ======================================================================= +Creative Commons Attribution 4.0 International Public License By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions. Section 1 -- Definitions. a. Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image. b. Adapter's License means the license You apply to Your Copyright and Similar Rights in Your contributions to Adapted Material in accordance with the terms and conditions of this Public License. c. Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights. d. Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements. e. Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material. f. Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License. g. Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license. h. Licensor means the individual(s) or entity(ies) granting rights under this Public License. i. Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them. j. Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world. k. You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning. Section 2 -- Scope. a. License grant. 1. Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to: a. reproduce and Share the Licensed Material, in whole or in part; and b. produce, reproduce, and Share Adapted Material. 2. Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions. 3. Term. The term of this Public License is specified in Section 6(a). 4. Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a) (4) never produces Adapted Material. 5. Downstream recipients. a. Offer from the Licensor -- Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License. b. No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material. 6. No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i). b. Other rights. 1. Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise. 2. Patent and trademark rights are not licensed under this Public License. 3. To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties. Section 3 -- License Conditions. Your exercise of the Licensed Rights is expressly made subject to the following conditions. a. Attribution. 1. If You Share the Licensed Material (including in modified form), You must: a. retain the following if it is supplied by the Licensor with the Licensed Material: i. identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated); ii. a copyright notice; iii. a notice that refers to this Public License; iv. a notice that refers to the disclaimer of warranties; v. a URI or hyperlink to the Licensed Material to the extent reasonably practicable; b. indicate if You modified the Licensed Material and retain an indication of any previous modifications; and c. indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License. 2. You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information. 3. If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable. 4. If You Share Adapted Material You produce, the Adapter's License You apply must not prevent recipients of the Adapted Material from complying with this Public License. Section 4 -- Sui Generis Database Rights. Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material: a. for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database; b. if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and c. You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database. For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights. Section 5 -- Disclaimer of Warranties and Limitation of Liability. a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. c. The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability. Section 6 -- Term and Termination. a. This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically. b. Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates: 1. automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or 2. upon express reinstatement by the Licensor. For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License. c. For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License. d. Sections 1, 5, 6, 7, and 8 survive termination of this Public License. Section 7 -- Other Terms and Conditions. a. The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed. b. Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License. Section 8 -- Interpretation. a. For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License. b. To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions. c. No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor. d. Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority. ======================================================================= Creative Commons is not a party to its public licenses. Notwithstanding, Creative Commons may elect to apply one of its public licenses to material it publishes and in those instances will be considered the "Licensor." Except for the limited purpose of indicating that material is shared under a Creative Commons public license or as otherwise permitted by the Creative Commons policies published at creativecommons.org/policies, Creative Commons does not authorize the use of the trademark "Creative Commons" or any other trademark or logo of Creative Commons without its prior written consent including, without limitation, in connection with any unauthorized modifications to any of its public licenses or any other arrangements, understandings, or agreements concerning use of licensed material. For the avoidance of doubt, this paragraph does not form part of the public licenses. Creative Commons may be contacted at creativecommons.org. + +-------------------------------------------------------------------------------- + +----------------------Web Background Synchronization------------------------------ + +Web Background Synchronization Specification +Portions of spec © by W3C + +W3C Community Final Specification Agreement +To secure commitments from participants for the full text of a Community or Business Group Report, the group may call for voluntary commitments to the following terms; a "summary" is +available. See also the related "W3C Community Contributor License Agreement". +1. The Purpose of this Agreement. +This Agreement sets forth the terms under which I make certain copyright and patent rights available to you for your implementation of the Specification. +Any other capitalized terms not specifically defined herein have the same meaning as those terms have in the "W3C Patent Policy", and if not defined there, in the "W3C Process Document". +2. Copyrights. +2.1. Copyright Grant. I grant to you a perpetual (for the duration of the applicable copyright), worldwide, non-exclusive, no-charge, royalty-free, copyright license, without any obligation for accounting to me, to reproduce, prepare derivative works of, publicly display, publicly perform, sublicense, distribute, and implement the Specification to the full extent of my copyright interest in the Specification. +2.2. Attribution. As a condition of the copyright grant, you must include an attribution to the Specification in any derivative work you make based on the Specification. That attribution must include, at minimum, the Specification name and version number. +3. Patents. +3.1. Patent Licensing Commitment. I agree to license my Essential Claims under the W3C Community RF Licensing Requirements. This requirement includes Essential Claims that I own and any that I have the right to license without obligation of payment or other consideration to an unrelated third party. W3C Community RF Licensing Requirements obligations made concerning the Specification and described in this policy are binding on me for the life of the patents in question and encumber the patents containing Essential Claims, regardless of changes in participation status or W3C Membership. I also agree to license my Essential Claims under the W3C Community RF Licensing Requirements in derivative works of the Specification so long as all normative portions of the Specification are maintained and that this licensing commitment does not extend to any portion of the derivative work that was not included in the Specification. +3.2. Optional, Additional Patent Grant. In addition to the provisions of Section 3.1, I may also, at my option, make certain intellectual property rights infringed by implementations of the Specification, including Essential Claims, available by providing those terms via the W3C Web site. +4. No Other Rights. Except as specifically set forth in this Agreement, no other express or implied patent, trademark, copyright, or other property rights are granted under this Agreement, including by implication, waiver, or estoppel. +5. Antitrust Compliance. I acknowledge that I may compete with other participants, that I am under no obligation to implement the Specification, that each participant is free to develop competing technologies and standards, and that each party is free to license its patent rights to third parties, including for the purpose of enabling competing technologies and standards. +6. Non-Circumvention. I agree that I will not intentionally take or willfully assist any third party to take any action for the purpose of circumventing my obligations under this Agreement. +7. Transition to W3C Recommendation Track. The Specification developed by the Project may transition to the W3C Recommendation Track. The W3C Team is responsible for notifying me that a Corresponding Working Group has been chartered. I have no obligation to join the Corresponding Working Group. If the Specification developed by the Project transitions to the W3C Recommendation Track, the following terms apply: +7.1. If I join the Corresponding Working Group. If I join the Corresponding Working Group, I will be subject to all W3C rules, obligations, licensing commitments, and policies that govern that Corresponding Working Group. +7.2. If I Do Not Join the Corresponding Working Group. +7.2.1. Licensing Obligations to Resulting Specification. If I do not join the Corresponding Working Group, I agree to offer patent licenses according to the W3C Royalty-Free licensing requirements described in Section 5 of the W3C Patent Policy for the portions of the Specification included in the resulting Recommendation. This licensing commitment does not extend to any portion of an implementation of the Recommendation that was not included in the Specification. This licensing commitment may not be revoked but may be modified through the exclusion process defined in Section 4 of the W3C Patent Policy. I am not required to join the Corresponding Working Group to exclude patents from the W3C Royalty-Free licensing commitment, but must otherwise follow the normal exclusion procedures defined by the W3C Patent Policy. The W3C Team will notify me of any Call for Exclusion in the Corresponding Working Group as set forth in Section 4.5 of the W3C Patent Policy. +7.2.2. No Disclosure Obligation. If I do not join the Corresponding Working Group, I have no patent disclosure obligations outside of those set forth in Section 6 of the W3C Patent Policy. +8. Conflict of Interest. I will disclose significant relationships when those relationships might reasonably be perceived as creating a conflict of interest with my role. I will notify W3C of any change in my affiliation using W3C-provided mechanisms. +9. Representations, Warranties and Disclaimers. I represent and warrant that I am legally entitled to grant the rights and promises set forth in this Agreement. IN ALL OTHER RESPECTS THE SPECIFICATION IS PROVIDED “AS IS.” The entire risk as to implementing or otherwise using the Specification is assumed by the implementer and user. Except as stated herein, I expressly disclaim any warranties (express, implied, or otherwise), including implied warranties of merchantability, non-infringement, fitness for a particular purpose, or title, related to the Specification. IN NO EVENT WILL ANY PARTY BE LIABLE TO ANY OTHER PARTY FOR LOST PROFITS OR ANY FORM OF INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY CHARACTER FROM ANY CAUSES OF ACTION OF ANY KIND WITH RESPECT TO THIS AGREEMENT, WHETHER BASED ON BREACH OF CONTRACT, TORT (INCLUDING NEGLIGENCE), OR OTHERWISE, AND WHETHER OR NOT THE OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. All of my obligations under Section 3 regarding the transfer, successors in interest, or assignment of Granted Claims will be satisfied if I notify the transferee or assignee of any patent that I know contains Granted Claims of the obligations under Section 3. Nothing in this Agreement requires me to undertake a patent search. +10. Definitions. +10.1. Agreement. “Agreement” means this W3C Community Final Specification Agreement. +10.2. Corresponding Working Group. “Corresponding Working Group” is a W3C Working Group that is chartered to develop a Recommendation, as defined in the W3C Process Document, that takes the Specification as an input. +10.3. Essential Claims. “Essential Claims” shall mean all claims in any patent or patent application in any jurisdiction in the world that would necessarily be infringed by implementation of the Specification. A claim is necessarily infringed hereunder only when it is not possible to avoid infringing it because there is no non-infringing alternative for implementing the normative portions of the Specification. Existence of a non-infringing alternative shall be judged based on the state of the art at the time of the publication of the Specification. The following are expressly excluded from and shall not be deemed to constitute Essential Claims: +10.3.1. any claims other than as set forth above even if contained in the same patent as Essential Claims; and +10.3.2. claims which would be infringed only by: +portions of an implementation that are not specified in the normative portions of the Specification, or +enabling technologies that may be necessary to make or use any product or portion thereof that complies with the Specification and are not themselves expressly set forth in the Specification (e.g., semiconductor manufacturing technology, compiler technology, object-oriented technology, basic operating system technology, and the like); or +the implementation of technology developed elsewhere and merely incorporated by reference in the body of the Specification. +10.3.3. design patents and design registrations. +For purposes of this definition, the normative portions of the Specification shall be deemed to include only architectural and interoperability requirements. Optional features in the RFC 2119 sense are considered normative unless they are specifically identified as informative. Implementation examples or any other material that merely illustrate the requirements of the Specification are informative, rather than normative. +10.4. I, Me, or My. “I,” “me,” or “my” refers to the signatory. +10.5 Project. “Project” means the W3C Community Group or Business Group for which I executed this Agreement. +10.6. Specification. “Specification” means the Specification identified by the Project as the target of this agreement in a call for Final Specification Commitments. W3C shall provide the authoritative mechanisms for the identification of this Specification. +10.7. W3C Community RF Licensing Requirements. “W3C Community RF Licensing Requirements” license shall mean a non-assignable, non-sublicensable license to make, have made, use, sell, have sold, offer to sell, import, and distribute and dispose of implementations of the Specification that: +10.7.1. shall be available to all, worldwide, whether or not they are W3C Members; +10.7.2. shall extend to all Essential Claims owned or controlled by me; +10.7.3. may be limited to implementations of the Specification, and to what is required by the Specification; +10.7.4. may be conditioned on a grant of a reciprocal RF license (as defined in this policy) to all Essential Claims owned or controlled by the licensee. A reciprocal license may be required to be available to all, and a reciprocal license may itself be conditioned on a further reciprocal license from all. +10.7.5. may not be conditioned on payment of royalties, fees or other consideration; +10.7.6. may be suspended with respect to any licensee when licensor issued by licensee for infringement of claims essential to implement the Specification or any W3C Recommendation; +10.7.7. may not impose any further conditions or restrictions on the use of any technology, intellectual property rights, or other restrictions on behavior of the licensee, but may include reasonable, customary terms relating to operation or maintenance of the license relationship such as the following: choice of law and dispute resolution; +10.7.8. shall not be considered accepted by an implementer who manifests an intent not to accept the terms of the W3C Community RF Licensing Requirements license as offered by the licensor. +10.7.9. The RF license conforming to the requirements in this policy shall be made available by the licensor as long as the Specification is in effect. The term of such license shall be for the life of the patents in question. +I am encouraged to provide a contact from which licensing information can be obtained and other relevant licensing information. Any such information will be made publicly available. +10.8. You or Your. “You,” “you,” or “your” means any person or entity who exercises copyright or patent rights granted under this Agreement, and any person that person or entity controls. + +------------------------------------------------------------------------------------- + +------------------- WebGL ----------------------------- +Copyright (c) 2018 The Khronos Group Inc. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and/or associated documentation files (the +"Materials"), to deal in the Materials without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Materials, and to +permit persons to whom the Materials are furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Materials. + +THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. +------------------------------------------------------ + +------------- End of ThirdPartyNotices ------------------------------------------- */ + diff --git a/node_modules/typescript/bin/tsc b/node_modules/typescript/bin/tsc new file mode 100755 index 0000000..19c62bf --- /dev/null +++ b/node_modules/typescript/bin/tsc @@ -0,0 +1,2 @@ +#!/usr/bin/env node +require('../lib/tsc.js') diff --git a/node_modules/typescript/bin/tsserver b/node_modules/typescript/bin/tsserver new file mode 100755 index 0000000..7143b6a --- /dev/null +++ b/node_modules/typescript/bin/tsserver @@ -0,0 +1,2 @@ +#!/usr/bin/env node +require('../lib/tsserver.js') diff --git a/node_modules/typescript/package.json b/node_modules/typescript/package.json new file mode 100644 index 0000000..7b3d160 --- /dev/null +++ b/node_modules/typescript/package.json @@ -0,0 +1,120 @@ +{ + "name": "typescript", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "5.7.3", + "license": "Apache-2.0", + "description": "TypeScript is a language for application scale JavaScript development", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript" + ], + "bugs": { + "url": "https://github.com/microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/microsoft/TypeScript.git" + }, + "main": "./lib/typescript.js", + "typings": "./lib/typescript.d.ts", + "bin": { + "tsc": "./bin/tsc", + "tsserver": "./bin/tsserver" + }, + "engines": { + "node": ">=14.17" + }, + "files": [ + "bin", + "lib", + "!lib/enu", + "LICENSE.txt", + "README.md", + "SECURITY.md", + "ThirdPartyNoticeText.txt", + "!**/.gitattributes" + ], + "devDependencies": { + "@dprint/formatter": "^0.4.1", + "@dprint/typescript": "0.93.0", + "@esfx/canceltoken": "^1.0.0", + "@eslint/js": "^9.11.1", + "@octokit/rest": "^21.0.2", + "@types/chai": "^4.3.20", + "@types/diff": "^5.2.2", + "@types/minimist": "^1.2.5", + "@types/mocha": "^10.0.8", + "@types/ms": "^0.7.34", + "@types/node": "latest", + "@types/source-map-support": "^0.5.10", + "@types/which": "^3.0.4", + "@typescript-eslint/rule-tester": "^8.8.0", + "@typescript-eslint/type-utils": "^8.8.0", + "@typescript-eslint/utils": "^8.8.0", + "azure-devops-node-api": "^14.1.0", + "c8": "^10.1.2", + "chai": "^4.5.0", + "chalk": "^4.1.2", + "chokidar": "^3.6.0", + "diff": "^5.2.0", + "dprint": "^0.47.2", + "esbuild": "^0.24.0", + "eslint": "^9.11.1", + "eslint-formatter-autolinkable-stylish": "^1.4.0", + "eslint-plugin-regexp": "^2.6.0", + "fast-xml-parser": "^4.5.0", + "glob": "^10.4.5", + "globals": "^15.9.0", + "hereby": "^1.10.0", + "jsonc-parser": "^3.3.1", + "knip": "^5.30.6", + "minimist": "^1.2.8", + "mocha": "^10.7.3", + "mocha-fivemat-progress-reporter": "^0.1.0", + "monocart-coverage-reports": "^2.11.0", + "ms": "^2.1.3", + "playwright": "^1.47.2", + "source-map-support": "^0.5.21", + "tslib": "^2.7.0", + "typescript": "^5.6.2", + "typescript-eslint": "^8.8.0", + "which": "^3.0.1" + }, + "overrides": { + "typescript@*": "$typescript" + }, + "scripts": { + "test": "hereby runtests-parallel --light=false", + "test:eslint-rules": "hereby run-eslint-rules-tests", + "build": "npm run build:compiler && npm run build:tests", + "build:compiler": "hereby local", + "build:tests": "hereby tests", + "build:tests:notypecheck": "hereby tests --no-typecheck", + "clean": "hereby clean", + "gulp": "hereby", + "lint": "hereby lint", + "knip": "hereby knip", + "format": "dprint fmt", + "setup-hooks": "node scripts/link-hooks.mjs" + }, + "browser": { + "fs": false, + "os": false, + "path": false, + "crypto": false, + "buffer": false, + "source-map-support": false, + "inspector": false, + "perf_hooks": false + }, + "packageManager": "npm@8.19.4", + "volta": { + "node": "20.1.0", + "npm": "8.19.4" + }, + "gitHead": "a5e123d9e0690fcea92878ea8a0a382922009fc9" +} diff --git a/node_modules/uc.micro/LICENSE.txt b/node_modules/uc.micro/LICENSE.txt new file mode 100644 index 0000000..a41e0a7 --- /dev/null +++ b/node_modules/uc.micro/LICENSE.txt @@ -0,0 +1,20 @@ +Copyright Mathias Bynens + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/uc.micro/README.md b/node_modules/uc.micro/README.md new file mode 100644 index 0000000..7707da4 --- /dev/null +++ b/node_modules/uc.micro/README.md @@ -0,0 +1,14 @@ +# uc.micro + +[![CI](https://github.com/markdown-it/uc.micro/actions/workflows/ci.yml/badge.svg)](https://github.com/markdown-it/uc.micro/actions/workflows/ci.yml) +[![NPM version](https://img.shields.io/npm/v/uc.micro.svg?style=flat)](https://www.npmjs.org/package/uc.micro) + +> Micro subset of unicode data files for [markdown-it](https://github.com/markdown-it) projects. + +Content of this repo is autogenerated from `unicode-` package, +maintained by [Mathias Bynens](https://github.com/mathiasbynens). + +That's just a proxy to reduce dependencies/install size. + +**This package content is ONLY for [markdown-it](https://github.com/markdown-it) +projects needs. Don't ask to extend it!** diff --git a/node_modules/uc.micro/categories/Cc/regex.mjs b/node_modules/uc.micro/categories/Cc/regex.mjs new file mode 100644 index 0000000..91cd397 --- /dev/null +++ b/node_modules/uc.micro/categories/Cc/regex.mjs @@ -0,0 +1 @@ +export default /[\0-\x1F\x7F-\x9F]/ \ No newline at end of file diff --git a/node_modules/uc.micro/categories/Cf/regex.mjs b/node_modules/uc.micro/categories/Cf/regex.mjs new file mode 100644 index 0000000..bb58c7d --- /dev/null +++ b/node_modules/uc.micro/categories/Cf/regex.mjs @@ -0,0 +1 @@ +export default /[\xAD\u0600-\u0605\u061C\u06DD\u070F\u0890\u0891\u08E2\u180E\u200B-\u200F\u202A-\u202E\u2060-\u2064\u2066-\u206F\uFEFF\uFFF9-\uFFFB]|\uD804[\uDCBD\uDCCD]|\uD80D[\uDC30-\uDC3F]|\uD82F[\uDCA0-\uDCA3]|\uD834[\uDD73-\uDD7A]|\uDB40[\uDC01\uDC20-\uDC7F]/ \ No newline at end of file diff --git a/node_modules/uc.micro/categories/P/regex.mjs b/node_modules/uc.micro/categories/P/regex.mjs new file mode 100644 index 0000000..b084264 --- /dev/null +++ b/node_modules/uc.micro/categories/P/regex.mjs @@ -0,0 +1 @@ +export default /[!-#%-\*,-\/:;\?@\[-\]_\{\}\xA1\xA7\xAB\xB6\xB7\xBB\xBF\u037E\u0387\u055A-\u055F\u0589\u058A\u05BE\u05C0\u05C3\u05C6\u05F3\u05F4\u0609\u060A\u060C\u060D\u061B\u061D-\u061F\u066A-\u066D\u06D4\u0700-\u070D\u07F7-\u07F9\u0830-\u083E\u085E\u0964\u0965\u0970\u09FD\u0A76\u0AF0\u0C77\u0C84\u0DF4\u0E4F\u0E5A\u0E5B\u0F04-\u0F12\u0F14\u0F3A-\u0F3D\u0F85\u0FD0-\u0FD4\u0FD9\u0FDA\u104A-\u104F\u10FB\u1360-\u1368\u1400\u166E\u169B\u169C\u16EB-\u16ED\u1735\u1736\u17D4-\u17D6\u17D8-\u17DA\u1800-\u180A\u1944\u1945\u1A1E\u1A1F\u1AA0-\u1AA6\u1AA8-\u1AAD\u1B5A-\u1B60\u1B7D\u1B7E\u1BFC-\u1BFF\u1C3B-\u1C3F\u1C7E\u1C7F\u1CC0-\u1CC7\u1CD3\u2010-\u2027\u2030-\u2043\u2045-\u2051\u2053-\u205E\u207D\u207E\u208D\u208E\u2308-\u230B\u2329\u232A\u2768-\u2775\u27C5\u27C6\u27E6-\u27EF\u2983-\u2998\u29D8-\u29DB\u29FC\u29FD\u2CF9-\u2CFC\u2CFE\u2CFF\u2D70\u2E00-\u2E2E\u2E30-\u2E4F\u2E52-\u2E5D\u3001-\u3003\u3008-\u3011\u3014-\u301F\u3030\u303D\u30A0\u30FB\uA4FE\uA4FF\uA60D-\uA60F\uA673\uA67E\uA6F2-\uA6F7\uA874-\uA877\uA8CE\uA8CF\uA8F8-\uA8FA\uA8FC\uA92E\uA92F\uA95F\uA9C1-\uA9CD\uA9DE\uA9DF\uAA5C-\uAA5F\uAADE\uAADF\uAAF0\uAAF1\uABEB\uFD3E\uFD3F\uFE10-\uFE19\uFE30-\uFE52\uFE54-\uFE61\uFE63\uFE68\uFE6A\uFE6B\uFF01-\uFF03\uFF05-\uFF0A\uFF0C-\uFF0F\uFF1A\uFF1B\uFF1F\uFF20\uFF3B-\uFF3D\uFF3F\uFF5B\uFF5D\uFF5F-\uFF65]|\uD800[\uDD00-\uDD02\uDF9F\uDFD0]|\uD801\uDD6F|\uD802[\uDC57\uDD1F\uDD3F\uDE50-\uDE58\uDE7F\uDEF0-\uDEF6\uDF39-\uDF3F\uDF99-\uDF9C]|\uD803[\uDEAD\uDF55-\uDF59\uDF86-\uDF89]|\uD804[\uDC47-\uDC4D\uDCBB\uDCBC\uDCBE-\uDCC1\uDD40-\uDD43\uDD74\uDD75\uDDC5-\uDDC8\uDDCD\uDDDB\uDDDD-\uDDDF\uDE38-\uDE3D\uDEA9]|\uD805[\uDC4B-\uDC4F\uDC5A\uDC5B\uDC5D\uDCC6\uDDC1-\uDDD7\uDE41-\uDE43\uDE60-\uDE6C\uDEB9\uDF3C-\uDF3E]|\uD806[\uDC3B\uDD44-\uDD46\uDDE2\uDE3F-\uDE46\uDE9A-\uDE9C\uDE9E-\uDEA2\uDF00-\uDF09]|\uD807[\uDC41-\uDC45\uDC70\uDC71\uDEF7\uDEF8\uDF43-\uDF4F\uDFFF]|\uD809[\uDC70-\uDC74]|\uD80B[\uDFF1\uDFF2]|\uD81A[\uDE6E\uDE6F\uDEF5\uDF37-\uDF3B\uDF44]|\uD81B[\uDE97-\uDE9A\uDFE2]|\uD82F\uDC9F|\uD836[\uDE87-\uDE8B]|\uD83A[\uDD5E\uDD5F]/ \ No newline at end of file diff --git a/node_modules/uc.micro/categories/S/regex.mjs b/node_modules/uc.micro/categories/S/regex.mjs new file mode 100644 index 0000000..45a2624 --- /dev/null +++ b/node_modules/uc.micro/categories/S/regex.mjs @@ -0,0 +1 @@ +export default /[\$\+<->\^`\|~\xA2-\xA6\xA8\xA9\xAC\xAE-\xB1\xB4\xB8\xD7\xF7\u02C2-\u02C5\u02D2-\u02DF\u02E5-\u02EB\u02ED\u02EF-\u02FF\u0375\u0384\u0385\u03F6\u0482\u058D-\u058F\u0606-\u0608\u060B\u060E\u060F\u06DE\u06E9\u06FD\u06FE\u07F6\u07FE\u07FF\u0888\u09F2\u09F3\u09FA\u09FB\u0AF1\u0B70\u0BF3-\u0BFA\u0C7F\u0D4F\u0D79\u0E3F\u0F01-\u0F03\u0F13\u0F15-\u0F17\u0F1A-\u0F1F\u0F34\u0F36\u0F38\u0FBE-\u0FC5\u0FC7-\u0FCC\u0FCE\u0FCF\u0FD5-\u0FD8\u109E\u109F\u1390-\u1399\u166D\u17DB\u1940\u19DE-\u19FF\u1B61-\u1B6A\u1B74-\u1B7C\u1FBD\u1FBF-\u1FC1\u1FCD-\u1FCF\u1FDD-\u1FDF\u1FED-\u1FEF\u1FFD\u1FFE\u2044\u2052\u207A-\u207C\u208A-\u208C\u20A0-\u20C0\u2100\u2101\u2103-\u2106\u2108\u2109\u2114\u2116-\u2118\u211E-\u2123\u2125\u2127\u2129\u212E\u213A\u213B\u2140-\u2144\u214A-\u214D\u214F\u218A\u218B\u2190-\u2307\u230C-\u2328\u232B-\u2426\u2440-\u244A\u249C-\u24E9\u2500-\u2767\u2794-\u27C4\u27C7-\u27E5\u27F0-\u2982\u2999-\u29D7\u29DC-\u29FB\u29FE-\u2B73\u2B76-\u2B95\u2B97-\u2BFF\u2CE5-\u2CEA\u2E50\u2E51\u2E80-\u2E99\u2E9B-\u2EF3\u2F00-\u2FD5\u2FF0-\u2FFF\u3004\u3012\u3013\u3020\u3036\u3037\u303E\u303F\u309B\u309C\u3190\u3191\u3196-\u319F\u31C0-\u31E3\u31EF\u3200-\u321E\u322A-\u3247\u3250\u3260-\u327F\u328A-\u32B0\u32C0-\u33FF\u4DC0-\u4DFF\uA490-\uA4C6\uA700-\uA716\uA720\uA721\uA789\uA78A\uA828-\uA82B\uA836-\uA839\uAA77-\uAA79\uAB5B\uAB6A\uAB6B\uFB29\uFBB2-\uFBC2\uFD40-\uFD4F\uFDCF\uFDFC-\uFDFF\uFE62\uFE64-\uFE66\uFE69\uFF04\uFF0B\uFF1C-\uFF1E\uFF3E\uFF40\uFF5C\uFF5E\uFFE0-\uFFE6\uFFE8-\uFFEE\uFFFC\uFFFD]|\uD800[\uDD37-\uDD3F\uDD79-\uDD89\uDD8C-\uDD8E\uDD90-\uDD9C\uDDA0\uDDD0-\uDDFC]|\uD802[\uDC77\uDC78\uDEC8]|\uD805\uDF3F|\uD807[\uDFD5-\uDFF1]|\uD81A[\uDF3C-\uDF3F\uDF45]|\uD82F\uDC9C|\uD833[\uDF50-\uDFC3]|\uD834[\uDC00-\uDCF5\uDD00-\uDD26\uDD29-\uDD64\uDD6A-\uDD6C\uDD83\uDD84\uDD8C-\uDDA9\uDDAE-\uDDEA\uDE00-\uDE41\uDE45\uDF00-\uDF56]|\uD835[\uDEC1\uDEDB\uDEFB\uDF15\uDF35\uDF4F\uDF6F\uDF89\uDFA9\uDFC3]|\uD836[\uDC00-\uDDFF\uDE37-\uDE3A\uDE6D-\uDE74\uDE76-\uDE83\uDE85\uDE86]|\uD838[\uDD4F\uDEFF]|\uD83B[\uDCAC\uDCB0\uDD2E\uDEF0\uDEF1]|\uD83C[\uDC00-\uDC2B\uDC30-\uDC93\uDCA0-\uDCAE\uDCB1-\uDCBF\uDCC1-\uDCCF\uDCD1-\uDCF5\uDD0D-\uDDAD\uDDE6-\uDE02\uDE10-\uDE3B\uDE40-\uDE48\uDE50\uDE51\uDE60-\uDE65\uDF00-\uDFFF]|\uD83D[\uDC00-\uDED7\uDEDC-\uDEEC\uDEF0-\uDEFC\uDF00-\uDF76\uDF7B-\uDFD9\uDFE0-\uDFEB\uDFF0]|\uD83E[\uDC00-\uDC0B\uDC10-\uDC47\uDC50-\uDC59\uDC60-\uDC87\uDC90-\uDCAD\uDCB0\uDCB1\uDD00-\uDE53\uDE60-\uDE6D\uDE70-\uDE7C\uDE80-\uDE88\uDE90-\uDEBD\uDEBF-\uDEC5\uDECE-\uDEDB\uDEE0-\uDEE8\uDEF0-\uDEF8\uDF00-\uDF92\uDF94-\uDFCA]/ \ No newline at end of file diff --git a/node_modules/uc.micro/categories/Z/regex.mjs b/node_modules/uc.micro/categories/Z/regex.mjs new file mode 100644 index 0000000..6f15419 --- /dev/null +++ b/node_modules/uc.micro/categories/Z/regex.mjs @@ -0,0 +1 @@ +export default /[ \xA0\u1680\u2000-\u200A\u2028\u2029\u202F\u205F\u3000]/ \ No newline at end of file diff --git a/node_modules/uc.micro/index.mjs b/node_modules/uc.micro/index.mjs new file mode 100644 index 0000000..21b80d3 --- /dev/null +++ b/node_modules/uc.micro/index.mjs @@ -0,0 +1,8 @@ +import Any from './properties/Any/regex.mjs'; +import Cc from './categories/Cc/regex.mjs'; +import Cf from './categories/Cf/regex.mjs'; +import P from './categories/P/regex.mjs'; +import S from './categories/S/regex.mjs'; +import Z from './categories/Z/regex.mjs'; + +export { Any, Cc, Cf, P, S, Z }; diff --git a/node_modules/uc.micro/package.json b/node_modules/uc.micro/package.json new file mode 100644 index 0000000..73102ce --- /dev/null +++ b/node_modules/uc.micro/package.json @@ -0,0 +1,37 @@ +{ + "name": "uc.micro", + "version": "2.1.0", + "description": "Micro subset of unicode data files for markdown-it projects.", + "repository": "markdown-it/uc.micro", + "license": "MIT", + "main": "build/index.cjs.js", + "module": "index.mjs", + "exports": { + ".": { + "require": "./build/index.cjs.js", + "import": "./index.mjs" + }, + "./*": { + "require": "./*", + "import": "./*" + } + }, + "files": [ + "index.mjs", + "categories/", + "properties/", + "build/" + ], + "scripts": { + "test": "npm run build && mocha", + "build": "rollup -c", + "update": "node update.mjs && npm test", + "prepublishOnly": "npm run build" + }, + "devDependencies": { + "@unicode/unicode-15.1.0": "^1.5.2", + "mocha": "^10.2.0", + "rollup": "^4.6.1", + "shelljs": "^0.8.5" + } +} diff --git a/node_modules/uc.micro/properties/Any/regex.mjs b/node_modules/uc.micro/properties/Any/regex.mjs new file mode 100644 index 0000000..72d3b16 --- /dev/null +++ b/node_modules/uc.micro/properties/Any/regex.mjs @@ -0,0 +1 @@ +export default /[\0-\uD7FF\uE000-\uFFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/ \ No newline at end of file diff --git a/node_modules/yaml/LICENSE b/node_modules/yaml/LICENSE new file mode 100644 index 0000000..e060aaa --- /dev/null +++ b/node_modules/yaml/LICENSE @@ -0,0 +1,13 @@ +Copyright Eemeli Aro + +Permission to use, copy, modify, and/or distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright notice +and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. diff --git a/node_modules/yaml/README.md b/node_modules/yaml/README.md new file mode 100644 index 0000000..a5bdcba --- /dev/null +++ b/node_modules/yaml/README.md @@ -0,0 +1,188 @@ +# YAML + +`yaml` is a definitive library for [YAML](https://yaml.org/), the human friendly data serialization standard. +This library: + +- Supports both YAML 1.1 and YAML 1.2 and all common data schemas, +- Passes all of the [yaml-test-suite](https://github.com/yaml/yaml-test-suite) tests, +- Can accept any string as input without throwing, parsing as much YAML out of it as it can, and +- Supports parsing, modifying, and writing YAML comments and blank lines. + +The library is released under the ISC open source license, and the code is [available on GitHub](https://github.com/eemeli/yaml/). +It has no external dependencies and runs on Node.js as well as modern browsers. + +For the purposes of versioning, any changes that break any of the documented endpoints or APIs will be considered semver-major breaking changes. +Undocumented library internals may change between minor versions, and previous APIs may be deprecated (but not removed). + +The minimum supported TypeScript version of the included typings is 3.9; +for use in earlier versions you may need to set `skipLibCheck: true` in your config. +This requirement may be updated between minor versions of the library. + +For more information, see the project's documentation site: [**eemeli.org/yaml**](https://eemeli.org/yaml/) + +To install: + +```sh +npm install yaml +``` + +**Note:** These docs are for `yaml@2`. For v1, see the [v1.10.0 tag](https://github.com/eemeli/yaml/tree/v1.10.0) for the source and [eemeli.org/yaml/v1](https://eemeli.org/yaml/v1/) for the documentation. + +The development and maintenance of this library is [sponsored](https://github.com/sponsors/eemeli) by: + +

+ Scipress +     + Manifest +

+ +## API Overview + +The API provided by `yaml` has three layers, depending on how deep you need to go: [Parse & Stringify](https://eemeli.org/yaml/#parse-amp-stringify), [Documents](https://eemeli.org/yaml/#documents), and the underlying [Lexer/Parser/Composer](https://eemeli.org/yaml/#parsing-yaml). +The first has the simplest API and "just works", the second gets you all the bells and whistles supported by the library along with a decent [AST](https://eemeli.org/yaml/#content-nodes), and the third lets you get progressively closer to YAML source, if that's your thing. + +A [command-line tool](https://eemeli.org/yaml/#command-line-tool) is also included. + +### Parse & Stringify + +```js +import { parse, stringify } from 'yaml' +``` + +- [`parse(str, reviver?, options?): value`](https://eemeli.org/yaml/#yaml-parse) +- [`stringify(value, replacer?, options?): string`](https://eemeli.org/yaml/#yaml-stringify) + +### Documents + + +```js +import { + Document, + isDocument, + parseAllDocuments, + parseDocument +} from 'yaml' +``` + +- [`Document`](https://eemeli.org/yaml/#documents) + - [`constructor(value, replacer?, options?)`](https://eemeli.org/yaml/#creating-documents) + - [`#contents`](https://eemeli.org/yaml/#content-nodes) + - [`#directives`](https://eemeli.org/yaml/#stream-directives) + - [`#errors`](https://eemeli.org/yaml/#errors) + - [`#warnings`](https://eemeli.org/yaml/#errors) +- [`isDocument(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) +- [`parseAllDocuments(str, options?): Document[]`](https://eemeli.org/yaml/#parsing-documents) +- [`parseDocument(str, options?): Document`](https://eemeli.org/yaml/#parsing-documents) + +### Content Nodes + + +```js +import { + isAlias, isCollection, isMap, isNode, + isPair, isScalar, isSeq, Scalar, + visit, visitAsync, YAMLMap, YAMLSeq +} from 'yaml' +``` + +- [`isAlias(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) +- [`isCollection(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) +- [`isMap(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) +- [`isNode(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) +- [`isPair(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) +- [`isScalar(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) +- [`isSeq(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) +- [`new Scalar(value)`](https://eemeli.org/yaml/#scalar-values) +- [`new YAMLMap()`](https://eemeli.org/yaml/#collections) +- [`new YAMLSeq()`](https://eemeli.org/yaml/#collections) +- [`doc.createAlias(node, name?): Alias`](https://eemeli.org/yaml/#creating-nodes) +- [`doc.createNode(value, options?): Node`](https://eemeli.org/yaml/#creating-nodes) +- [`doc.createPair(key, value): Pair`](https://eemeli.org/yaml/#creating-nodes) +- [`visit(node, visitor)`](https://eemeli.org/yaml/#finding-and-modifying-nodes) +- [`visitAsync(node, visitor)`](https://eemeli.org/yaml/#finding-and-modifying-nodes) + +### Parsing YAML + +```js +import { Composer, Lexer, Parser } from 'yaml' +``` + +- [`new Lexer().lex(src)`](https://eemeli.org/yaml/#lexer) +- [`new Parser(onNewLine?).parse(src)`](https://eemeli.org/yaml/#parser) +- [`new Composer(options?).compose(tokens)`](https://eemeli.org/yaml/#composer) + +## YAML.parse + +```yaml +# file.yml +YAML: + - A human-readable data serialization language + - https://en.wikipedia.org/wiki/YAML +yaml: + - A complete JavaScript implementation + - https://www.npmjs.com/package/yaml +``` + +```js +import fs from 'fs' +import YAML from 'yaml' + +YAML.parse('3.14159') +// 3.14159 + +YAML.parse('[ true, false, maybe, null ]\n') +// [ true, false, 'maybe', null ] + +const file = fs.readFileSync('./file.yml', 'utf8') +YAML.parse(file) +// { YAML: +// [ 'A human-readable data serialization language', +// 'https://en.wikipedia.org/wiki/YAML' ], +// yaml: +// [ 'A complete JavaScript implementation', +// 'https://www.npmjs.com/package/yaml' ] } +``` + +## YAML.stringify + +```js +import YAML from 'yaml' + +YAML.stringify(3.14159) +// '3.14159\n' + +YAML.stringify([true, false, 'maybe', null]) +// `- true +// - false +// - maybe +// - null +// ` + +YAML.stringify({ number: 3, plain: 'string', block: 'two\nlines\n' }) +// `number: 3 +// plain: string +// block: | +// two +// lines +// ` +``` + +--- + +Browser testing provided by: + + +BrowserStack + diff --git a/node_modules/yaml/bin.mjs b/node_modules/yaml/bin.mjs new file mode 100755 index 0000000..7504ae1 --- /dev/null +++ b/node_modules/yaml/bin.mjs @@ -0,0 +1,11 @@ +#!/usr/bin/env node + +import { UserError, cli, help } from './dist/cli.mjs' + +cli(process.stdin, error => { + if (error instanceof UserError) { + if (error.code === UserError.ARGS) console.error(`${help}\n`) + console.error(error.message) + process.exitCode = error.code + } else if (error) throw error +}) diff --git a/node_modules/yaml/browser/index.js b/node_modules/yaml/browser/index.js new file mode 100644 index 0000000..5f73271 --- /dev/null +++ b/node_modules/yaml/browser/index.js @@ -0,0 +1,5 @@ +// `export * as default from ...` fails on Webpack v4 +// https://github.com/eemeli/yaml/issues/228 +import * as YAML from './dist/index.js' +export default YAML +export * from './dist/index.js' diff --git a/node_modules/yaml/browser/package.json b/node_modules/yaml/browser/package.json new file mode 100644 index 0000000..3dbc1ca --- /dev/null +++ b/node_modules/yaml/browser/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/yaml/package.json b/node_modules/yaml/package.json new file mode 100644 index 0000000..b7fbff4 --- /dev/null +++ b/node_modules/yaml/package.json @@ -0,0 +1,96 @@ +{ + "name": "yaml", + "version": "2.7.0", + "license": "ISC", + "author": "Eemeli Aro ", + "repository": "github:eemeli/yaml", + "description": "JavaScript parser and stringifier for YAML", + "keywords": [ + "YAML", + "parser", + "stringifier" + ], + "homepage": "https://eemeli.org/yaml/", + "files": [ + "browser/", + "dist/", + "util.js" + ], + "type": "commonjs", + "main": "./dist/index.js", + "bin": "./bin.mjs", + "browser": { + "./dist/index.js": "./browser/index.js", + "./dist/util.js": "./browser/dist/util.js", + "./util.js": "./browser/dist/util.js" + }, + "exports": { + ".": { + "types": "./dist/index.d.ts", + "node": "./dist/index.js", + "default": "./browser/index.js" + }, + "./package.json": "./package.json", + "./util": { + "types": "./dist/util.d.ts", + "node": "./dist/util.js", + "default": "./browser/dist/util.js" + } + }, + "scripts": { + "build": "npm run build:node && npm run build:browser", + "build:browser": "rollup -c config/rollup.browser-config.mjs", + "build:node": "rollup -c config/rollup.node-config.mjs", + "clean": "git clean -fdxe node_modules", + "lint": "eslint config/ src/", + "prettier": "prettier --write .", + "prestart": "rollup --sourcemap -c config/rollup.node-config.mjs", + "start": "node --enable-source-maps -i -e 'YAML=require(\"./dist/index.js\");const{parse,parseDocument,parseAllDocuments}=YAML'", + "test": "jest --config config/jest.config.js", + "test:all": "npm test && npm run test:types && npm run test:dist && npm run test:dist:types", + "test:browsers": "cd playground && npm test", + "test:dist": "npm run build:node && jest --config config/jest.config.js", + "test:dist:types": "tsc --allowJs --moduleResolution node --noEmit --target es5 dist/index.js", + "test:types": "tsc --noEmit && tsc --noEmit -p tests/tsconfig.json", + "docs:install": "cd docs-slate && bundle install", + "predocs:deploy": "node docs/prepare-docs.mjs", + "docs:deploy": "cd docs-slate && ./deploy.sh", + "predocs": "node docs/prepare-docs.mjs", + "docs": "cd docs-slate && bundle exec middleman server", + "preversion": "npm test && npm run build", + "prepublishOnly": "npm run clean && npm test && npm run build" + }, + "browserslist": "defaults, not ie 11", + "prettier": { + "arrowParens": "avoid", + "semi": false, + "singleQuote": true, + "trailingComma": "none" + }, + "devDependencies": { + "@babel/core": "^7.12.10", + "@babel/plugin-transform-typescript": "^7.12.17", + "@babel/preset-env": "^7.12.11", + "@eslint/js": "^9.9.1", + "@rollup/plugin-babel": "^6.0.3", + "@rollup/plugin-replace": "^5.0.2", + "@rollup/plugin-typescript": "^12.1.1", + "@types/jest": "^29.2.4", + "@types/node": "^20.11.20", + "babel-jest": "^29.0.1", + "cross-env": "^7.0.3", + "eslint": "^9.9.1", + "eslint-config-prettier": "^9.0.0", + "fast-check": "^2.12.0", + "jest": "^29.0.1", + "jest-ts-webcompat-resolver": "^1.0.0", + "prettier": "^3.0.2", + "rollup": "^4.12.0", + "tslib": "^2.8.1", + "typescript": "^5.7.2", + "typescript-eslint": "^8.4.0" + }, + "engines": { + "node": ">= 14" + } +} diff --git a/node_modules/yaml/util.js b/node_modules/yaml/util.js new file mode 100644 index 0000000..070103f --- /dev/null +++ b/node_modules/yaml/util.js @@ -0,0 +1,2 @@ +// Re-exporter for Node.js < 12.16.0 +module.exports = require('./dist/util.js') diff --git a/openapitools.json b/openapitools.json new file mode 100644 index 0000000..a82623d --- /dev/null +++ b/openapitools.json @@ -0,0 +1,7 @@ +{ + "$schema": "./node_modules/@openapitools/openapi-generator-cli/config.schema.json", + "spaces": 2, + "generator-cli": { + "version": "7.14.0" + } +} diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..88a9093 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,95 @@ +[project] +name = "vectorize_client" +version = "1.0.0" +description = "Vectorize API (Beta)" +authors = [ + {name = "Vectorize",email = "team@openapitools.org"}, +] +license = "NoLicense" +readme = "README.md" +keywords = ["OpenAPI", "OpenAPI-Generator", "Vectorize API (Beta)"] +requires-python = ">=3.9" + +dependencies = [ + "urllib3 (>=2.1.0,<3.0.0)", + "python-dateutil (>=2.8.2)", + "pydantic (>=2)", + "typing-extensions (>=4.7.1)" +] + +[project.urls] +Repository = "https://github.com/GIT_USER_ID/GIT_REPO_ID" + +[tool.poetry] +requires-poetry = ">=2.0" + +[tool.poetry.group.dev.dependencies] +pytest = ">= 7.2.1" +pytest-cov = ">= 2.8.1" +tox = ">= 3.9.0" +flake8 = ">= 4.0.0" +types-python-dateutil = ">= 2.8.19.14" +mypy = ">= 1.5" + + +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[tool.pylint.'MESSAGES CONTROL'] +extension-pkg-whitelist = "pydantic" + +[tool.mypy] +files = [ + "vectorize_client", + #"test", # auto-generated tests + "tests", # hand-written tests +] +# TODO: enable "strict" once all these individual checks are passing +# strict = true + +# List from: https://mypy.readthedocs.io/en/stable/existing_code.html#introduce-stricter-options +warn_unused_configs = true +warn_redundant_casts = true +warn_unused_ignores = true + +## Getting these passing should be easy +strict_equality = true +extra_checks = true + +## Strongly recommend enabling this one as soon as you can +check_untyped_defs = true + +## These shouldn't be too much additional work, but may be tricky to +## get passing if you use a lot of untyped libraries +disallow_subclassing_any = true +disallow_untyped_decorators = true +disallow_any_generics = true + +### These next few are various gradations of forcing use of type annotations +#disallow_untyped_calls = true +#disallow_incomplete_defs = true +#disallow_untyped_defs = true +# +### This one isn't too hard to get passing, but return on investment is lower +#no_implicit_reexport = true +# +### This one can be tricky to get passing if you use a lot of untyped libraries +#warn_return_any = true + +[[tool.mypy.overrides]] +module = [ + "vectorize_client.configuration", +] +warn_unused_ignores = true +strict_equality = true +extra_checks = true +check_untyped_defs = true +disallow_subclassing_any = true +disallow_untyped_decorators = true +disallow_any_generics = true +disallow_untyped_calls = true +disallow_incomplete_defs = true +disallow_untyped_defs = true +no_implicit_reexport = true +warn_return_any = true diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..6cbb2b9 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,4 @@ +urllib3 >= 2.1.0, < 3.0.0 +python_dateutil >= 2.8.2 +pydantic >= 2 +typing-extensions >= 4.7.1 diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..11433ee --- /dev/null +++ b/setup.cfg @@ -0,0 +1,2 @@ +[flake8] +max-line-length=99 diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..dfe074f --- /dev/null +++ b/setup.py @@ -0,0 +1,49 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from setuptools import setup, find_packages # noqa: H301 + +# To install the library, run the following +# +# python setup.py install +# +# prerequisite: setuptools +# http://pypi.python.org/pypi/setuptools +NAME = "vectorize-client" +VERSION = "1.0.0" +PYTHON_REQUIRES = ">= 3.9" +REQUIRES = [ + "urllib3 >= 2.1.0, < 3.0.0", + "python-dateutil >= 2.8.2", + "pydantic >= 2", + "typing-extensions >= 4.7.1", +] + +setup( + name=NAME, + version=VERSION, + description="Vectorize API (Beta)", + author="Vectorize", + author_email="team@openapitools.org", + url="", + keywords=["OpenAPI", "OpenAPI-Generator", "Vectorize API (Beta)"], + install_requires=REQUIRES, + packages=find_packages(exclude=["test", "tests"]), + include_package_data=True, + long_description_content_type='text/markdown', + long_description="""\ + API for Vectorize services + """, # noqa: E501 + package_data={"vectorize_client": ["py.typed"]}, +) \ No newline at end of file diff --git a/src/python/.gitignore b/src/python/.gitignore deleted file mode 100644 index 43995bd..0000000 --- a/src/python/.gitignore +++ /dev/null @@ -1,66 +0,0 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*,cover -.hypothesis/ -venv/ -.venv/ -.python-version -.pytest_cache - -# Translations -*.mo -*.pot - -# Django stuff: -*.log - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -#Ipython Notebook -.ipynb_checkpoints diff --git a/src/python/README.md b/src/python/README.md deleted file mode 100644 index 6138907..0000000 --- a/src/python/README.md +++ /dev/null @@ -1,95 +0,0 @@ -from dataclasses import field - -# Vectorize Client -Python Api Client for Vectorize -For more information, please visit [https://vectorize.io](https://vectorize.io) - -## Requirements. - -Python 3.8+ - -## Installation & Usage -```sh -pip install vectorize-client -``` - -Then import the package: -```python -import vectorize_client -``` - -## Getting Started - -Please follow the [installation procedure](#installation--usage) and then run the following: - -```python -import vectorize_client as v - -TOKEN = '' -ORG = '' - -with v.ApiClient(v.Configuration(access_token=TOKEN)) as api: - pipelines = v.PipelinesApi(api) - response = pipelines.get_pipelines(ORG) - print("Found" + str(len(response.data)) + " pipelines") -``` - -## Documentation for API Endpoints - -All URIs are relative to *https://api.vectorize.io/v1* - -See the full [reference](https://vectorize.readme.io/reference) for more information. - -## Usage - -First, export your token and org id as environment variables: - -```sh -export VECTORIZE_TOKEN= -export VECTORIZE_ORG= -``` -Then, initialize the client with your token and org id: - -```python -import os -TOKEN = os.environ['VECTORIZE_TOKEN'] -ORG = os.environ['VECTORIZE_ORG'] -``` - -### Extraction - -Set the file you want to extract data from: - -```sh -export FILE= -``` - -Then, run the following code: -```python -import os -import vectorize_client as v -import time, logging - -TOKEN = os.environ['VECTORIZE_TOKEN'] -ORG = os.environ['VECTORIZE_ORG'] -FILE = os.environ['FILE'] - -with v.ApiClient(v.Configuration(access_token=TOKEN)) as api: - with open(FILE, 'rb') as file: - data = file.read() - extraction_id = v.ExtractionApi(api).start_extraction(ORG, data).extraction_id - print(f"Extraction started with id {extraction_id}") - while True: - extraction = v.ExtractionApi(api).get_extraction_result(ORG, extraction_id) - if extraction.ready: - extracted_data = extraction.data - if extracted_data.success: - print(extracted_data) - break - else: - raise Exception(extracted_data.error) - print("Waiting for extraction to complete...") - time.sleep(1) -``` - - diff --git a/src/python/pyproject.toml b/src/python/pyproject.toml deleted file mode 100644 index 6cba77a..0000000 --- a/src/python/pyproject.toml +++ /dev/null @@ -1,66 +0,0 @@ -[tool.poetry] -name = "vectorize-client" -version = "0.2.1" -description = "Python client for the Vectorize API" -authors = [ "Vectorize " ] -license = "MIT" -readme = "README.md" -repository = "https://github.com/vectorize-io/vectorize-clients" -keywords = [ - "vectorize", - "vectorize.io", - "generative-ai", - "embeddings", - "rag" -] -include = [ "vectorize_client/py.typed" ] -homepage = "https://vectorize.io" - - [tool.poetry.dependencies] - python = "^3.8" - urllib3 = ">= 1.25.3, < 3.0.0" - python-dateutil = ">= 2.8.2" - pydantic = ">= 2" - typing-extensions = ">= 4.7.1" - - [tool.poetry.dev-dependencies] - pytest = ">= 7.2.1" - pytest-cov = ">= 2.8.1" - tox = ">= 3.9.0" - flake8 = ">= 4.0.0" - types-python-dateutil = ">= 2.8.19.14" - mypy = ">= 1.5" - -[tool.pylint."MESSAGES CONTROL"] -extension-pkg-whitelist = "pydantic" - -[tool.mypy] -files = [ "vectorize_client", "tests" ] -warn_unused_configs = true -warn_redundant_casts = true -warn_unused_ignores = true -strict_equality = true -extra_checks = true -check_untyped_defs = true -disallow_subclassing_any = true -disallow_untyped_decorators = true -disallow_any_generics = true - - [[tool.mypy.overrides]] - module = [ "vectorize_client.configuration" ] - warn_unused_ignores = true - strict_equality = true - extra_checks = true - check_untyped_defs = true - disallow_subclassing_any = true - disallow_untyped_decorators = true - disallow_any_generics = true - disallow_untyped_calls = true - disallow_incomplete_defs = true - disallow_untyped_defs = true - no_implicit_reexport = true - warn_return_any = true - -[build-system] -requires = [ "setuptools" ] -build-backend = "setuptools.build_meta" diff --git a/src/python/vectorize_client/__init__.py b/src/python/vectorize_client/__init__.py deleted file mode 100644 index c202aea..0000000 --- a/src/python/vectorize_client/__init__.py +++ /dev/null @@ -1,120 +0,0 @@ -# coding: utf-8 - -# flake8: noqa - -""" - Vectorize API (Beta) - - API for Vectorize services - - The version of the OpenAPI document: 0.0.1 - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -__version__ = "1.0.0" - -# import apis into sdk package -from vectorize_client.api.connectors_api import ConnectorsApi -from vectorize_client.api.extraction_api import ExtractionApi -from vectorize_client.api.files_api import FilesApi -from vectorize_client.api.pipelines_api import PipelinesApi -from vectorize_client.api.uploads_api import UploadsApi - -# import ApiClient -from vectorize_client.api_response import ApiResponse -from vectorize_client.api_client import ApiClient -from vectorize_client.configuration import Configuration -from vectorize_client.exceptions import OpenApiException -from vectorize_client.exceptions import ApiTypeError -from vectorize_client.exceptions import ApiValueError -from vectorize_client.exceptions import ApiKeyError -from vectorize_client.exceptions import ApiAttributeError -from vectorize_client.exceptions import ApiException - -# import models into sdk package -from vectorize_client.models.ai_platform import AIPlatform -from vectorize_client.models.ai_platform_config_schema import AIPlatformConfigSchema -from vectorize_client.models.ai_platform_schema import AIPlatformSchema -from vectorize_client.models.ai_platform_type import AIPlatformType -from vectorize_client.models.add_user_from_source_connector_response import AddUserFromSourceConnectorResponse -from vectorize_client.models.add_user_to_source_connector_request import AddUserToSourceConnectorRequest -from vectorize_client.models.add_user_to_source_connector_request_selected_files_value import AddUserToSourceConnectorRequestSelectedFilesValue -from vectorize_client.models.create_ai_platform_connector import CreateAIPlatformConnector -from vectorize_client.models.create_ai_platform_connector_response import CreateAIPlatformConnectorResponse -from vectorize_client.models.create_destination_connector import CreateDestinationConnector -from vectorize_client.models.create_destination_connector_response import CreateDestinationConnectorResponse -from vectorize_client.models.create_pipeline_response import CreatePipelineResponse -from vectorize_client.models.create_pipeline_response_data import CreatePipelineResponseData -from vectorize_client.models.create_source_connector import CreateSourceConnector -from vectorize_client.models.create_source_connector_response import CreateSourceConnectorResponse -from vectorize_client.models.created_ai_platform_connector import CreatedAIPlatformConnector -from vectorize_client.models.created_destination_connector import CreatedDestinationConnector -from vectorize_client.models.created_source_connector import CreatedSourceConnector -from vectorize_client.models.deep_research_result import DeepResearchResult -from vectorize_client.models.delete_ai_platform_connector_response import DeleteAIPlatformConnectorResponse -from vectorize_client.models.delete_destination_connector_response import DeleteDestinationConnectorResponse -from vectorize_client.models.delete_file_response import DeleteFileResponse -from vectorize_client.models.delete_pipeline_response import DeletePipelineResponse -from vectorize_client.models.delete_source_connector_response import DeleteSourceConnectorResponse -from vectorize_client.models.destination_connector import DestinationConnector -from vectorize_client.models.destination_connector_schema import DestinationConnectorSchema -from vectorize_client.models.destination_connector_type import DestinationConnectorType -from vectorize_client.models.document import Document -from vectorize_client.models.extraction_chunking_strategy import ExtractionChunkingStrategy -from vectorize_client.models.extraction_result import ExtractionResult -from vectorize_client.models.extraction_result_response import ExtractionResultResponse -from vectorize_client.models.extraction_type import ExtractionType -from vectorize_client.models.get_ai_platform_connectors200_response import GetAIPlatformConnectors200Response -from vectorize_client.models.get_deep_research_response import GetDeepResearchResponse -from vectorize_client.models.get_destination_connectors200_response import GetDestinationConnectors200Response -from vectorize_client.models.get_pipeline_events_response import GetPipelineEventsResponse -from vectorize_client.models.get_pipeline_metrics_response import GetPipelineMetricsResponse -from vectorize_client.models.get_pipeline_response import GetPipelineResponse -from vectorize_client.models.get_pipelines400_response import GetPipelines400Response -from vectorize_client.models.get_pipelines_response import GetPipelinesResponse -from vectorize_client.models.get_source_connectors200_response import GetSourceConnectors200Response -from vectorize_client.models.get_upload_files_response import GetUploadFilesResponse -from vectorize_client.models.metadata_extraction_strategy import MetadataExtractionStrategy -from vectorize_client.models.metadata_extraction_strategy_schema import MetadataExtractionStrategySchema -from vectorize_client.models.n8_n_config import N8NConfig -from vectorize_client.models.pipeline_configuration_schema import PipelineConfigurationSchema -from vectorize_client.models.pipeline_events import PipelineEvents -from vectorize_client.models.pipeline_list_summary import PipelineListSummary -from vectorize_client.models.pipeline_metrics import PipelineMetrics -from vectorize_client.models.pipeline_summary import PipelineSummary -from vectorize_client.models.remove_user_from_source_connector_request import RemoveUserFromSourceConnectorRequest -from vectorize_client.models.remove_user_from_source_connector_response import RemoveUserFromSourceConnectorResponse -from vectorize_client.models.retrieve_context import RetrieveContext -from vectorize_client.models.retrieve_context_message import RetrieveContextMessage -from vectorize_client.models.retrieve_documents_request import RetrieveDocumentsRequest -from vectorize_client.models.retrieve_documents_response import RetrieveDocumentsResponse -from vectorize_client.models.schedule_schema import ScheduleSchema -from vectorize_client.models.schedule_schema_type import ScheduleSchemaType -from vectorize_client.models.source_connector import SourceConnector -from vectorize_client.models.source_connector_schema import SourceConnectorSchema -from vectorize_client.models.source_connector_type import SourceConnectorType -from vectorize_client.models.start_deep_research_request import StartDeepResearchRequest -from vectorize_client.models.start_deep_research_response import StartDeepResearchResponse -from vectorize_client.models.start_extraction_request import StartExtractionRequest -from vectorize_client.models.start_extraction_response import StartExtractionResponse -from vectorize_client.models.start_file_upload_request import StartFileUploadRequest -from vectorize_client.models.start_file_upload_response import StartFileUploadResponse -from vectorize_client.models.start_file_upload_to_connector_request import StartFileUploadToConnectorRequest -from vectorize_client.models.start_file_upload_to_connector_response import StartFileUploadToConnectorResponse -from vectorize_client.models.start_pipeline_response import StartPipelineResponse -from vectorize_client.models.stop_pipeline_response import StopPipelineResponse -from vectorize_client.models.update_ai_platform_connector_request import UpdateAIPlatformConnectorRequest -from vectorize_client.models.update_ai_platform_connector_response import UpdateAIPlatformConnectorResponse -from vectorize_client.models.update_destination_connector_request import UpdateDestinationConnectorRequest -from vectorize_client.models.update_destination_connector_response import UpdateDestinationConnectorResponse -from vectorize_client.models.update_source_connector_request import UpdateSourceConnectorRequest -from vectorize_client.models.update_source_connector_response import UpdateSourceConnectorResponse -from vectorize_client.models.update_source_connector_response_data import UpdateSourceConnectorResponseData -from vectorize_client.models.update_user_in_source_connector_request import UpdateUserInSourceConnectorRequest -from vectorize_client.models.update_user_in_source_connector_response import UpdateUserInSourceConnectorResponse -from vectorize_client.models.updated_ai_platform_connector_data import UpdatedAIPlatformConnectorData -from vectorize_client.models.updated_destination_connector_data import UpdatedDestinationConnectorData -from vectorize_client.models.upload_file import UploadFile diff --git a/src/python/vectorize_client/api/__init__.py b/src/python/vectorize_client/api/__init__.py deleted file mode 100644 index a9f74c1..0000000 --- a/src/python/vectorize_client/api/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# flake8: noqa - -# import apis into api package -from vectorize_client.api.connectors_api import ConnectorsApi -from vectorize_client.api.extraction_api import ExtractionApi -from vectorize_client.api.files_api import FilesApi -from vectorize_client.api.pipelines_api import PipelinesApi -from vectorize_client.api.uploads_api import UploadsApi - diff --git a/src/python/vectorize_client/api/connectors_api.py b/src/python/vectorize_client/api/connectors_api.py deleted file mode 100644 index 995bf44..0000000 --- a/src/python/vectorize_client/api/connectors_api.py +++ /dev/null @@ -1,5414 +0,0 @@ -# coding: utf-8 - -""" - Vectorize API (Beta) - - API for Vectorize services - - The version of the OpenAPI document: 0.0.1 - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - -import warnings -from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt -from typing import Any, Dict, List, Optional, Tuple, Union -from typing_extensions import Annotated - -from pydantic import Field, StrictStr -from typing import List -from typing_extensions import Annotated -from vectorize_client.models.ai_platform import AIPlatform -from vectorize_client.models.add_user_from_source_connector_response import AddUserFromSourceConnectorResponse -from vectorize_client.models.add_user_to_source_connector_request import AddUserToSourceConnectorRequest -from vectorize_client.models.create_ai_platform_connector import CreateAIPlatformConnector -from vectorize_client.models.create_ai_platform_connector_response import CreateAIPlatformConnectorResponse -from vectorize_client.models.create_destination_connector import CreateDestinationConnector -from vectorize_client.models.create_destination_connector_response import CreateDestinationConnectorResponse -from vectorize_client.models.create_source_connector import CreateSourceConnector -from vectorize_client.models.create_source_connector_response import CreateSourceConnectorResponse -from vectorize_client.models.delete_ai_platform_connector_response import DeleteAIPlatformConnectorResponse -from vectorize_client.models.delete_destination_connector_response import DeleteDestinationConnectorResponse -from vectorize_client.models.delete_source_connector_response import DeleteSourceConnectorResponse -from vectorize_client.models.destination_connector import DestinationConnector -from vectorize_client.models.get_ai_platform_connectors200_response import GetAIPlatformConnectors200Response -from vectorize_client.models.get_destination_connectors200_response import GetDestinationConnectors200Response -from vectorize_client.models.get_source_connectors200_response import GetSourceConnectors200Response -from vectorize_client.models.remove_user_from_source_connector_request import RemoveUserFromSourceConnectorRequest -from vectorize_client.models.remove_user_from_source_connector_response import RemoveUserFromSourceConnectorResponse -from vectorize_client.models.source_connector import SourceConnector -from vectorize_client.models.update_ai_platform_connector_request import UpdateAIPlatformConnectorRequest -from vectorize_client.models.update_ai_platform_connector_response import UpdateAIPlatformConnectorResponse -from vectorize_client.models.update_destination_connector_request import UpdateDestinationConnectorRequest -from vectorize_client.models.update_destination_connector_response import UpdateDestinationConnectorResponse -from vectorize_client.models.update_source_connector_request import UpdateSourceConnectorRequest -from vectorize_client.models.update_source_connector_response import UpdateSourceConnectorResponse -from vectorize_client.models.update_user_in_source_connector_request import UpdateUserInSourceConnectorRequest -from vectorize_client.models.update_user_in_source_connector_response import UpdateUserInSourceConnectorResponse - -from vectorize_client.api_client import ApiClient, RequestSerialized -from vectorize_client.api_response import ApiResponse -from vectorize_client.rest import RESTResponseType - - -class ConnectorsApi: - """NOTE: This class is auto generated by OpenAPI Generator - Ref: https://openapi-generator.tech - - Do not edit the class manually. - """ - - def __init__(self, api_client=None) -> None: - if api_client is None: - api_client = ApiClient.get_default() - self.api_client = api_client - - - @validate_call - def add_user_to_source_connector( - self, - organization: StrictStr, - source_connector_id: StrictStr, - add_user_to_source_connector_request: AddUserToSourceConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> AddUserFromSourceConnectorResponse: - """Add a user to a source connector - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param add_user_to_source_connector_request: (required) - :type add_user_to_source_connector_request: AddUserToSourceConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._add_user_to_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - add_user_to_source_connector_request=add_user_to_source_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "AddUserFromSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def add_user_to_source_connector_with_http_info( - self, - organization: StrictStr, - source_connector_id: StrictStr, - add_user_to_source_connector_request: AddUserToSourceConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[AddUserFromSourceConnectorResponse]: - """Add a user to a source connector - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param add_user_to_source_connector_request: (required) - :type add_user_to_source_connector_request: AddUserToSourceConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._add_user_to_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - add_user_to_source_connector_request=add_user_to_source_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "AddUserFromSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def add_user_to_source_connector_without_preload_content( - self, - organization: StrictStr, - source_connector_id: StrictStr, - add_user_to_source_connector_request: AddUserToSourceConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Add a user to a source connector - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param add_user_to_source_connector_request: (required) - :type add_user_to_source_connector_request: AddUserToSourceConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._add_user_to_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - add_user_to_source_connector_request=add_user_to_source_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "AddUserFromSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _add_user_to_source_connector_serialize( - self, - organization, - source_connector_id, - add_user_to_source_connector_request, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if source_connector_id is not None: - _path_params['sourceConnectorId'] = source_connector_id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if add_user_to_source_connector_request is not None: - _body_params = add_user_to_source_connector_request - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='POST', - resource_path='/org/{organization}/connectors/sources/{sourceConnectorId}/users', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def create_ai_platform_connector( - self, - organization: StrictStr, - create_ai_platform_connector: Annotated[List[CreateAIPlatformConnector], Field(min_length=1)], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> CreateAIPlatformConnectorResponse: - """Create a new AI Platform connector. Config values: Amazon Bedrock (BEDROCK): Name (name): text, Access Key (access-key): text, Secret Key (key): text) | Google Vertex AI (VERTEX): Name (name): text, Service Account Json (key): textarea, Region (region): text) | OpenAI (OPENAI): Name (name): text, API Key (key): text) | Voyage AI (VOYAGE): Name (name): text, API Key (key): text) | Built-in (VECTORIZE): ) - - - :param organization: (required) - :type organization: str - :param create_ai_platform_connector: (required) - :type create_ai_platform_connector: List[CreateAIPlatformConnector] - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._create_ai_platform_connector_serialize( - organization=organization, - create_ai_platform_connector=create_ai_platform_connector, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "CreateAIPlatformConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def create_ai_platform_connector_with_http_info( - self, - organization: StrictStr, - create_ai_platform_connector: Annotated[List[CreateAIPlatformConnector], Field(min_length=1)], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[CreateAIPlatformConnectorResponse]: - """Create a new AI Platform connector. Config values: Amazon Bedrock (BEDROCK): Name (name): text, Access Key (access-key): text, Secret Key (key): text) | Google Vertex AI (VERTEX): Name (name): text, Service Account Json (key): textarea, Region (region): text) | OpenAI (OPENAI): Name (name): text, API Key (key): text) | Voyage AI (VOYAGE): Name (name): text, API Key (key): text) | Built-in (VECTORIZE): ) - - - :param organization: (required) - :type organization: str - :param create_ai_platform_connector: (required) - :type create_ai_platform_connector: List[CreateAIPlatformConnector] - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._create_ai_platform_connector_serialize( - organization=organization, - create_ai_platform_connector=create_ai_platform_connector, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "CreateAIPlatformConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def create_ai_platform_connector_without_preload_content( - self, - organization: StrictStr, - create_ai_platform_connector: Annotated[List[CreateAIPlatformConnector], Field(min_length=1)], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Create a new AI Platform connector. Config values: Amazon Bedrock (BEDROCK): Name (name): text, Access Key (access-key): text, Secret Key (key): text) | Google Vertex AI (VERTEX): Name (name): text, Service Account Json (key): textarea, Region (region): text) | OpenAI (OPENAI): Name (name): text, API Key (key): text) | Voyage AI (VOYAGE): Name (name): text, API Key (key): text) | Built-in (VECTORIZE): ) - - - :param organization: (required) - :type organization: str - :param create_ai_platform_connector: (required) - :type create_ai_platform_connector: List[CreateAIPlatformConnector] - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._create_ai_platform_connector_serialize( - organization=organization, - create_ai_platform_connector=create_ai_platform_connector, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "CreateAIPlatformConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _create_ai_platform_connector_serialize( - self, - organization, - create_ai_platform_connector, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - 'CreateAIPlatformConnector': '', - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if create_ai_platform_connector is not None: - _body_params = create_ai_platform_connector - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='POST', - resource_path='/org/{organization}/connectors/aiplatforms', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def create_destination_connector( - self, - organization: StrictStr, - create_destination_connector: Annotated[List[CreateDestinationConnector], Field(min_length=1)], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> CreateDestinationConnectorResponse: - """Create a new destination connector. Config values: Couchbase Capella (CAPELLA): Name (name): text, Cluster Access Name (username): text, Cluster Access Password (password): text, Connection String (connection-string): text) | DataStax Astra (DATASTAX): Name (name): text, API Endpoint (endpoint_secret): text, Application Token (token): text) | Elasticsearch (ELASTIC): Name (name): text, Host (host): text, Port (port): text, API Key (api-key): text) | Pinecone (PINECONE): Name (name): text, API Key (api-key): text) | SingleStore (SINGLESTORE): Name (name): text, Host (host): text, Port (port): number, Database (database): text, Username (username): text, Password (password): text) | Milvus (MILVUS): Name (name): text, Public Endpoint (url): text, Token (token): text, Username (username): text, Password (password): text) | PostgreSQL (POSTGRESQL): Name (name): text, Host (host): text, Port (port): number, Database (database): text, Username (username): text, Password (password): text) | Qdrant (QDRANT): Name (name): text, Host (host): text, API Key (api-key): text) | Supabase (SUPABASE): Name (name): text, Host (host): text, Port (port): number, Database (database): text, Username (username): text, Password (password): text) | Weaviate (WEAVIATE): Name (name): text, Endpoint (host): text, API Key (api-key): text) | Azure AI Search (AZUREAISEARCH): Name (name): text, Azure AI Search Service Name (service-name): text, API Key (api-key): text) | Built-in (VECTORIZE): ) | Chroma (CHROMA): Name (name): text, API Key (apiKey): text) | MongoDB (MONGODB): Name (name): text, API Key (apiKey): text) - - - :param organization: (required) - :type organization: str - :param create_destination_connector: (required) - :type create_destination_connector: List[CreateDestinationConnector] - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._create_destination_connector_serialize( - organization=organization, - create_destination_connector=create_destination_connector, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "CreateDestinationConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def create_destination_connector_with_http_info( - self, - organization: StrictStr, - create_destination_connector: Annotated[List[CreateDestinationConnector], Field(min_length=1)], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[CreateDestinationConnectorResponse]: - """Create a new destination connector. Config values: Couchbase Capella (CAPELLA): Name (name): text, Cluster Access Name (username): text, Cluster Access Password (password): text, Connection String (connection-string): text) | DataStax Astra (DATASTAX): Name (name): text, API Endpoint (endpoint_secret): text, Application Token (token): text) | Elasticsearch (ELASTIC): Name (name): text, Host (host): text, Port (port): text, API Key (api-key): text) | Pinecone (PINECONE): Name (name): text, API Key (api-key): text) | SingleStore (SINGLESTORE): Name (name): text, Host (host): text, Port (port): number, Database (database): text, Username (username): text, Password (password): text) | Milvus (MILVUS): Name (name): text, Public Endpoint (url): text, Token (token): text, Username (username): text, Password (password): text) | PostgreSQL (POSTGRESQL): Name (name): text, Host (host): text, Port (port): number, Database (database): text, Username (username): text, Password (password): text) | Qdrant (QDRANT): Name (name): text, Host (host): text, API Key (api-key): text) | Supabase (SUPABASE): Name (name): text, Host (host): text, Port (port): number, Database (database): text, Username (username): text, Password (password): text) | Weaviate (WEAVIATE): Name (name): text, Endpoint (host): text, API Key (api-key): text) | Azure AI Search (AZUREAISEARCH): Name (name): text, Azure AI Search Service Name (service-name): text, API Key (api-key): text) | Built-in (VECTORIZE): ) | Chroma (CHROMA): Name (name): text, API Key (apiKey): text) | MongoDB (MONGODB): Name (name): text, API Key (apiKey): text) - - - :param organization: (required) - :type organization: str - :param create_destination_connector: (required) - :type create_destination_connector: List[CreateDestinationConnector] - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._create_destination_connector_serialize( - organization=organization, - create_destination_connector=create_destination_connector, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "CreateDestinationConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def create_destination_connector_without_preload_content( - self, - organization: StrictStr, - create_destination_connector: Annotated[List[CreateDestinationConnector], Field(min_length=1)], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Create a new destination connector. Config values: Couchbase Capella (CAPELLA): Name (name): text, Cluster Access Name (username): text, Cluster Access Password (password): text, Connection String (connection-string): text) | DataStax Astra (DATASTAX): Name (name): text, API Endpoint (endpoint_secret): text, Application Token (token): text) | Elasticsearch (ELASTIC): Name (name): text, Host (host): text, Port (port): text, API Key (api-key): text) | Pinecone (PINECONE): Name (name): text, API Key (api-key): text) | SingleStore (SINGLESTORE): Name (name): text, Host (host): text, Port (port): number, Database (database): text, Username (username): text, Password (password): text) | Milvus (MILVUS): Name (name): text, Public Endpoint (url): text, Token (token): text, Username (username): text, Password (password): text) | PostgreSQL (POSTGRESQL): Name (name): text, Host (host): text, Port (port): number, Database (database): text, Username (username): text, Password (password): text) | Qdrant (QDRANT): Name (name): text, Host (host): text, API Key (api-key): text) | Supabase (SUPABASE): Name (name): text, Host (host): text, Port (port): number, Database (database): text, Username (username): text, Password (password): text) | Weaviate (WEAVIATE): Name (name): text, Endpoint (host): text, API Key (api-key): text) | Azure AI Search (AZUREAISEARCH): Name (name): text, Azure AI Search Service Name (service-name): text, API Key (api-key): text) | Built-in (VECTORIZE): ) | Chroma (CHROMA): Name (name): text, API Key (apiKey): text) | MongoDB (MONGODB): Name (name): text, API Key (apiKey): text) - - - :param organization: (required) - :type organization: str - :param create_destination_connector: (required) - :type create_destination_connector: List[CreateDestinationConnector] - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._create_destination_connector_serialize( - organization=organization, - create_destination_connector=create_destination_connector, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "CreateDestinationConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _create_destination_connector_serialize( - self, - organization, - create_destination_connector, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - 'CreateDestinationConnector': '', - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if create_destination_connector is not None: - _body_params = create_destination_connector - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='POST', - resource_path='/org/{organization}/connectors/destinations', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def create_source_connector( - self, - organization: StrictStr, - create_source_connector: Annotated[List[CreateSourceConnector], Field(min_length=1)], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> CreateSourceConnectorResponse: - """Create a new source connector. Config values: Amazon S3 (AWS_S3): Name (name): text, Access Key (access-key): text, Secret Key (secret-key): text, Bucket Name (bucket-name): text, Endpoint (endpoint): url, Region (region): text, Allow as archive destination (archiver): boolean) | Azure Blob Storage (AZURE_BLOB): Name (name): text, Storage Account Name (storage-account-name): text, Storage Account Key (storage-account-key): text, Container (container): text, Endpoint (endpoint): url) | Confluence (CONFLUENCE): Name (name): text, Username (username): text, API Token (api-token): text, Domain (domain): text) | Discord (DISCORD): Name (name): text, Server ID (guild-id): text, Bot token (bot-token): text, Channel ID (channel-ids): array oftext) | Dropbox (DROPBOX): Name (name): text) | Google Drive OAuth (GOOGLE_DRIVE_OAUTH): Name (name): text) | Google Drive (Service Account) (GOOGLE_DRIVE): Name (name): text, Service Account JSON (service-account-json): textarea) | Google Drive Multi-User (Vectorize) (GOOGLE_DRIVE_OAUTH_MULTI): Name (name): text) | Google Drive Multi-User (White Label) (GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM): Name (name): text, OAuth2 Client Id (oauth2-client-id): text, OAuth2 Client Secret (oauth2-client-secret): text) | Firecrawl (FIRECRAWL): Name (name): text, API Key (api-key): text) | GCP Cloud Storage (GCS): Name (name): text, Service Account JSON (service-account-json): textarea, Bucket (bucket-name): text) | Intercom (INTERCOM): Name (name): text, Access Token (intercomAccessToken): text) | OneDrive (ONE_DRIVE): Name (name): text, Client Id (ms-client-id): text, Tenant Id (ms-tenant-id): text, Client Secret (ms-client-secret): text, Users (users): array oftext) | SharePoint (SHAREPOINT): Name (name): text, Client Id (ms-client-id): text, Tenant Id (ms-tenant-id): text, Client Secret (ms-client-secret): text) | Web Crawler (WEB_CRAWLER): Name (name): text, Seed URL(s) (seed-urls): array ofurl) | File Upload (FILE_UPLOAD): Name (name): text) - - - :param organization: (required) - :type organization: str - :param create_source_connector: (required) - :type create_source_connector: List[CreateSourceConnector] - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._create_source_connector_serialize( - organization=organization, - create_source_connector=create_source_connector, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "CreateSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def create_source_connector_with_http_info( - self, - organization: StrictStr, - create_source_connector: Annotated[List[CreateSourceConnector], Field(min_length=1)], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[CreateSourceConnectorResponse]: - """Create a new source connector. Config values: Amazon S3 (AWS_S3): Name (name): text, Access Key (access-key): text, Secret Key (secret-key): text, Bucket Name (bucket-name): text, Endpoint (endpoint): url, Region (region): text, Allow as archive destination (archiver): boolean) | Azure Blob Storage (AZURE_BLOB): Name (name): text, Storage Account Name (storage-account-name): text, Storage Account Key (storage-account-key): text, Container (container): text, Endpoint (endpoint): url) | Confluence (CONFLUENCE): Name (name): text, Username (username): text, API Token (api-token): text, Domain (domain): text) | Discord (DISCORD): Name (name): text, Server ID (guild-id): text, Bot token (bot-token): text, Channel ID (channel-ids): array oftext) | Dropbox (DROPBOX): Name (name): text) | Google Drive OAuth (GOOGLE_DRIVE_OAUTH): Name (name): text) | Google Drive (Service Account) (GOOGLE_DRIVE): Name (name): text, Service Account JSON (service-account-json): textarea) | Google Drive Multi-User (Vectorize) (GOOGLE_DRIVE_OAUTH_MULTI): Name (name): text) | Google Drive Multi-User (White Label) (GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM): Name (name): text, OAuth2 Client Id (oauth2-client-id): text, OAuth2 Client Secret (oauth2-client-secret): text) | Firecrawl (FIRECRAWL): Name (name): text, API Key (api-key): text) | GCP Cloud Storage (GCS): Name (name): text, Service Account JSON (service-account-json): textarea, Bucket (bucket-name): text) | Intercom (INTERCOM): Name (name): text, Access Token (intercomAccessToken): text) | OneDrive (ONE_DRIVE): Name (name): text, Client Id (ms-client-id): text, Tenant Id (ms-tenant-id): text, Client Secret (ms-client-secret): text, Users (users): array oftext) | SharePoint (SHAREPOINT): Name (name): text, Client Id (ms-client-id): text, Tenant Id (ms-tenant-id): text, Client Secret (ms-client-secret): text) | Web Crawler (WEB_CRAWLER): Name (name): text, Seed URL(s) (seed-urls): array ofurl) | File Upload (FILE_UPLOAD): Name (name): text) - - - :param organization: (required) - :type organization: str - :param create_source_connector: (required) - :type create_source_connector: List[CreateSourceConnector] - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._create_source_connector_serialize( - organization=organization, - create_source_connector=create_source_connector, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "CreateSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def create_source_connector_without_preload_content( - self, - organization: StrictStr, - create_source_connector: Annotated[List[CreateSourceConnector], Field(min_length=1)], - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Create a new source connector. Config values: Amazon S3 (AWS_S3): Name (name): text, Access Key (access-key): text, Secret Key (secret-key): text, Bucket Name (bucket-name): text, Endpoint (endpoint): url, Region (region): text, Allow as archive destination (archiver): boolean) | Azure Blob Storage (AZURE_BLOB): Name (name): text, Storage Account Name (storage-account-name): text, Storage Account Key (storage-account-key): text, Container (container): text, Endpoint (endpoint): url) | Confluence (CONFLUENCE): Name (name): text, Username (username): text, API Token (api-token): text, Domain (domain): text) | Discord (DISCORD): Name (name): text, Server ID (guild-id): text, Bot token (bot-token): text, Channel ID (channel-ids): array oftext) | Dropbox (DROPBOX): Name (name): text) | Google Drive OAuth (GOOGLE_DRIVE_OAUTH): Name (name): text) | Google Drive (Service Account) (GOOGLE_DRIVE): Name (name): text, Service Account JSON (service-account-json): textarea) | Google Drive Multi-User (Vectorize) (GOOGLE_DRIVE_OAUTH_MULTI): Name (name): text) | Google Drive Multi-User (White Label) (GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM): Name (name): text, OAuth2 Client Id (oauth2-client-id): text, OAuth2 Client Secret (oauth2-client-secret): text) | Firecrawl (FIRECRAWL): Name (name): text, API Key (api-key): text) | GCP Cloud Storage (GCS): Name (name): text, Service Account JSON (service-account-json): textarea, Bucket (bucket-name): text) | Intercom (INTERCOM): Name (name): text, Access Token (intercomAccessToken): text) | OneDrive (ONE_DRIVE): Name (name): text, Client Id (ms-client-id): text, Tenant Id (ms-tenant-id): text, Client Secret (ms-client-secret): text, Users (users): array oftext) | SharePoint (SHAREPOINT): Name (name): text, Client Id (ms-client-id): text, Tenant Id (ms-tenant-id): text, Client Secret (ms-client-secret): text) | Web Crawler (WEB_CRAWLER): Name (name): text, Seed URL(s) (seed-urls): array ofurl) | File Upload (FILE_UPLOAD): Name (name): text) - - - :param organization: (required) - :type organization: str - :param create_source_connector: (required) - :type create_source_connector: List[CreateSourceConnector] - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._create_source_connector_serialize( - organization=organization, - create_source_connector=create_source_connector, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "CreateSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _create_source_connector_serialize( - self, - organization, - create_source_connector, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - 'CreateSourceConnector': '', - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if create_source_connector is not None: - _body_params = create_source_connector - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='POST', - resource_path='/org/{organization}/connectors/sources', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def delete_ai_platform( - self, - organization: StrictStr, - aiplatform_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> DeleteAIPlatformConnectorResponse: - """Delete an AI platform connector - - - :param organization: (required) - :type organization: str - :param aiplatform_id: (required) - :type aiplatform_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._delete_ai_platform_serialize( - organization=organization, - aiplatform_id=aiplatform_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "DeleteAIPlatformConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def delete_ai_platform_with_http_info( - self, - organization: StrictStr, - aiplatform_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[DeleteAIPlatformConnectorResponse]: - """Delete an AI platform connector - - - :param organization: (required) - :type organization: str - :param aiplatform_id: (required) - :type aiplatform_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._delete_ai_platform_serialize( - organization=organization, - aiplatform_id=aiplatform_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "DeleteAIPlatformConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def delete_ai_platform_without_preload_content( - self, - organization: StrictStr, - aiplatform_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Delete an AI platform connector - - - :param organization: (required) - :type organization: str - :param aiplatform_id: (required) - :type aiplatform_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._delete_ai_platform_serialize( - organization=organization, - aiplatform_id=aiplatform_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "DeleteAIPlatformConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _delete_ai_platform_serialize( - self, - organization, - aiplatform_id, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if aiplatform_id is not None: - _path_params['aiplatformId'] = aiplatform_id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='DELETE', - resource_path='/org/{organization}/connectors/aiplatforms/{aiplatformId}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def delete_destination_connector( - self, - organization: StrictStr, - destination_connector_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> DeleteDestinationConnectorResponse: - """Delete a destination connector - - - :param organization: (required) - :type organization: str - :param destination_connector_id: (required) - :type destination_connector_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._delete_destination_connector_serialize( - organization=organization, - destination_connector_id=destination_connector_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "DeleteDestinationConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def delete_destination_connector_with_http_info( - self, - organization: StrictStr, - destination_connector_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[DeleteDestinationConnectorResponse]: - """Delete a destination connector - - - :param organization: (required) - :type organization: str - :param destination_connector_id: (required) - :type destination_connector_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._delete_destination_connector_serialize( - organization=organization, - destination_connector_id=destination_connector_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "DeleteDestinationConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def delete_destination_connector_without_preload_content( - self, - organization: StrictStr, - destination_connector_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Delete a destination connector - - - :param organization: (required) - :type organization: str - :param destination_connector_id: (required) - :type destination_connector_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._delete_destination_connector_serialize( - organization=organization, - destination_connector_id=destination_connector_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "DeleteDestinationConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _delete_destination_connector_serialize( - self, - organization, - destination_connector_id, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if destination_connector_id is not None: - _path_params['destinationConnectorId'] = destination_connector_id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='DELETE', - resource_path='/org/{organization}/connectors/destinations/{destinationConnectorId}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def delete_source_connector( - self, - organization: StrictStr, - source_connector_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> DeleteSourceConnectorResponse: - """Delete a source connector - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._delete_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "DeleteSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def delete_source_connector_with_http_info( - self, - organization: StrictStr, - source_connector_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[DeleteSourceConnectorResponse]: - """Delete a source connector - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._delete_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "DeleteSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def delete_source_connector_without_preload_content( - self, - organization: StrictStr, - source_connector_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Delete a source connector - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._delete_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "DeleteSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _delete_source_connector_serialize( - self, - organization, - source_connector_id, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if source_connector_id is not None: - _path_params['sourceConnectorId'] = source_connector_id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='DELETE', - resource_path='/org/{organization}/connectors/sources/{sourceConnectorId}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def delete_user_from_source_connector( - self, - organization: StrictStr, - source_connector_id: StrictStr, - remove_user_from_source_connector_request: RemoveUserFromSourceConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RemoveUserFromSourceConnectorResponse: - """Delete a source connector user - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param remove_user_from_source_connector_request: (required) - :type remove_user_from_source_connector_request: RemoveUserFromSourceConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._delete_user_from_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - remove_user_from_source_connector_request=remove_user_from_source_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "RemoveUserFromSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def delete_user_from_source_connector_with_http_info( - self, - organization: StrictStr, - source_connector_id: StrictStr, - remove_user_from_source_connector_request: RemoveUserFromSourceConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[RemoveUserFromSourceConnectorResponse]: - """Delete a source connector user - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param remove_user_from_source_connector_request: (required) - :type remove_user_from_source_connector_request: RemoveUserFromSourceConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._delete_user_from_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - remove_user_from_source_connector_request=remove_user_from_source_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "RemoveUserFromSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def delete_user_from_source_connector_without_preload_content( - self, - organization: StrictStr, - source_connector_id: StrictStr, - remove_user_from_source_connector_request: RemoveUserFromSourceConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Delete a source connector user - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param remove_user_from_source_connector_request: (required) - :type remove_user_from_source_connector_request: RemoveUserFromSourceConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._delete_user_from_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - remove_user_from_source_connector_request=remove_user_from_source_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "RemoveUserFromSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _delete_user_from_source_connector_serialize( - self, - organization, - source_connector_id, - remove_user_from_source_connector_request, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if source_connector_id is not None: - _path_params['sourceConnectorId'] = source_connector_id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if remove_user_from_source_connector_request is not None: - _body_params = remove_user_from_source_connector_request - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='DELETE', - resource_path='/org/{organization}/connectors/sources/{sourceConnectorId}/users', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def get_ai_platform_connector( - self, - organization: StrictStr, - aiplatform_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> AIPlatform: - """Get an AI platform connector - - - :param organization: (required) - :type organization: str - :param aiplatform_id: (required) - :type aiplatform_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_ai_platform_connector_serialize( - organization=organization, - aiplatform_id=aiplatform_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "AIPlatform", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def get_ai_platform_connector_with_http_info( - self, - organization: StrictStr, - aiplatform_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[AIPlatform]: - """Get an AI platform connector - - - :param organization: (required) - :type organization: str - :param aiplatform_id: (required) - :type aiplatform_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_ai_platform_connector_serialize( - organization=organization, - aiplatform_id=aiplatform_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "AIPlatform", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def get_ai_platform_connector_without_preload_content( - self, - organization: StrictStr, - aiplatform_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Get an AI platform connector - - - :param organization: (required) - :type organization: str - :param aiplatform_id: (required) - :type aiplatform_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_ai_platform_connector_serialize( - organization=organization, - aiplatform_id=aiplatform_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "AIPlatform", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _get_ai_platform_connector_serialize( - self, - organization, - aiplatform_id, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if aiplatform_id is not None: - _path_params['aiplatformId'] = aiplatform_id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='GET', - resource_path='/org/{organization}/connectors/aiplatforms/{aiplatformId}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def get_ai_platform_connectors( - self, - organization: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> GetAIPlatformConnectors200Response: - """Get all existing AI Platform connectors - - - :param organization: (required) - :type organization: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_ai_platform_connectors_serialize( - organization=organization, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "GetAIPlatformConnectors200Response", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def get_ai_platform_connectors_with_http_info( - self, - organization: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[GetAIPlatformConnectors200Response]: - """Get all existing AI Platform connectors - - - :param organization: (required) - :type organization: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_ai_platform_connectors_serialize( - organization=organization, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "GetAIPlatformConnectors200Response", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def get_ai_platform_connectors_without_preload_content( - self, - organization: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Get all existing AI Platform connectors - - - :param organization: (required) - :type organization: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_ai_platform_connectors_serialize( - organization=organization, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "GetAIPlatformConnectors200Response", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _get_ai_platform_connectors_serialize( - self, - organization, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='GET', - resource_path='/org/{organization}/connectors/aiplatforms', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def get_destination_connector( - self, - organization: StrictStr, - destination_connector_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> DestinationConnector: - """Get a destination connector - - - :param organization: (required) - :type organization: str - :param destination_connector_id: (required) - :type destination_connector_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_destination_connector_serialize( - organization=organization, - destination_connector_id=destination_connector_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "DestinationConnector", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def get_destination_connector_with_http_info( - self, - organization: StrictStr, - destination_connector_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[DestinationConnector]: - """Get a destination connector - - - :param organization: (required) - :type organization: str - :param destination_connector_id: (required) - :type destination_connector_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_destination_connector_serialize( - organization=organization, - destination_connector_id=destination_connector_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "DestinationConnector", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def get_destination_connector_without_preload_content( - self, - organization: StrictStr, - destination_connector_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Get a destination connector - - - :param organization: (required) - :type organization: str - :param destination_connector_id: (required) - :type destination_connector_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_destination_connector_serialize( - organization=organization, - destination_connector_id=destination_connector_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "DestinationConnector", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _get_destination_connector_serialize( - self, - organization, - destination_connector_id, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if destination_connector_id is not None: - _path_params['destinationConnectorId'] = destination_connector_id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='GET', - resource_path='/org/{organization}/connectors/destinations/{destinationConnectorId}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def get_destination_connectors( - self, - organization: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> GetDestinationConnectors200Response: - """Get all existing destination connectors - - - :param organization: (required) - :type organization: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_destination_connectors_serialize( - organization=organization, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "GetDestinationConnectors200Response", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def get_destination_connectors_with_http_info( - self, - organization: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[GetDestinationConnectors200Response]: - """Get all existing destination connectors - - - :param organization: (required) - :type organization: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_destination_connectors_serialize( - organization=organization, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "GetDestinationConnectors200Response", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def get_destination_connectors_without_preload_content( - self, - organization: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Get all existing destination connectors - - - :param organization: (required) - :type organization: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_destination_connectors_serialize( - organization=organization, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "GetDestinationConnectors200Response", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _get_destination_connectors_serialize( - self, - organization, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='GET', - resource_path='/org/{organization}/connectors/destinations', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def get_source_connector( - self, - organization: StrictStr, - source_connector_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> SourceConnector: - """Get a source connector - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "SourceConnector", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def get_source_connector_with_http_info( - self, - organization: StrictStr, - source_connector_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[SourceConnector]: - """Get a source connector - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "SourceConnector", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def get_source_connector_without_preload_content( - self, - organization: StrictStr, - source_connector_id: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Get a source connector - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "SourceConnector", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _get_source_connector_serialize( - self, - organization, - source_connector_id, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if source_connector_id is not None: - _path_params['sourceConnectorId'] = source_connector_id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='GET', - resource_path='/org/{organization}/connectors/sources/{sourceConnectorId}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def get_source_connectors( - self, - organization: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> GetSourceConnectors200Response: - """Get all existing source connectors - - - :param organization: (required) - :type organization: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_source_connectors_serialize( - organization=organization, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "GetSourceConnectors200Response", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def get_source_connectors_with_http_info( - self, - organization: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[GetSourceConnectors200Response]: - """Get all existing source connectors - - - :param organization: (required) - :type organization: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_source_connectors_serialize( - organization=organization, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "GetSourceConnectors200Response", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def get_source_connectors_without_preload_content( - self, - organization: StrictStr, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Get all existing source connectors - - - :param organization: (required) - :type organization: str - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._get_source_connectors_serialize( - organization=organization, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "GetSourceConnectors200Response", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _get_source_connectors_serialize( - self, - organization, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='GET', - resource_path='/org/{organization}/connectors/sources', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def update_ai_platform_connector( - self, - organization: StrictStr, - aiplatform_id: StrictStr, - update_ai_platform_connector_request: UpdateAIPlatformConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> UpdateAIPlatformConnectorResponse: - """Update an AI Platform connector - - - :param organization: (required) - :type organization: str - :param aiplatform_id: (required) - :type aiplatform_id: str - :param update_ai_platform_connector_request: (required) - :type update_ai_platform_connector_request: UpdateAIPlatformConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._update_ai_platform_connector_serialize( - organization=organization, - aiplatform_id=aiplatform_id, - update_ai_platform_connector_request=update_ai_platform_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "UpdateAIPlatformConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def update_ai_platform_connector_with_http_info( - self, - organization: StrictStr, - aiplatform_id: StrictStr, - update_ai_platform_connector_request: UpdateAIPlatformConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[UpdateAIPlatformConnectorResponse]: - """Update an AI Platform connector - - - :param organization: (required) - :type organization: str - :param aiplatform_id: (required) - :type aiplatform_id: str - :param update_ai_platform_connector_request: (required) - :type update_ai_platform_connector_request: UpdateAIPlatformConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._update_ai_platform_connector_serialize( - organization=organization, - aiplatform_id=aiplatform_id, - update_ai_platform_connector_request=update_ai_platform_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "UpdateAIPlatformConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def update_ai_platform_connector_without_preload_content( - self, - organization: StrictStr, - aiplatform_id: StrictStr, - update_ai_platform_connector_request: UpdateAIPlatformConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Update an AI Platform connector - - - :param organization: (required) - :type organization: str - :param aiplatform_id: (required) - :type aiplatform_id: str - :param update_ai_platform_connector_request: (required) - :type update_ai_platform_connector_request: UpdateAIPlatformConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._update_ai_platform_connector_serialize( - organization=organization, - aiplatform_id=aiplatform_id, - update_ai_platform_connector_request=update_ai_platform_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "UpdateAIPlatformConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _update_ai_platform_connector_serialize( - self, - organization, - aiplatform_id, - update_ai_platform_connector_request, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if aiplatform_id is not None: - _path_params['aiplatformId'] = aiplatform_id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if update_ai_platform_connector_request is not None: - _body_params = update_ai_platform_connector_request - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='PATCH', - resource_path='/org/{organization}/connectors/aiplatforms/{aiplatformId}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def update_destination_connector( - self, - organization: StrictStr, - destination_connector_id: StrictStr, - update_destination_connector_request: UpdateDestinationConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> UpdateDestinationConnectorResponse: - """Update a destination connector - - - :param organization: (required) - :type organization: str - :param destination_connector_id: (required) - :type destination_connector_id: str - :param update_destination_connector_request: (required) - :type update_destination_connector_request: UpdateDestinationConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._update_destination_connector_serialize( - organization=organization, - destination_connector_id=destination_connector_id, - update_destination_connector_request=update_destination_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "UpdateDestinationConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def update_destination_connector_with_http_info( - self, - organization: StrictStr, - destination_connector_id: StrictStr, - update_destination_connector_request: UpdateDestinationConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[UpdateDestinationConnectorResponse]: - """Update a destination connector - - - :param organization: (required) - :type organization: str - :param destination_connector_id: (required) - :type destination_connector_id: str - :param update_destination_connector_request: (required) - :type update_destination_connector_request: UpdateDestinationConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._update_destination_connector_serialize( - organization=organization, - destination_connector_id=destination_connector_id, - update_destination_connector_request=update_destination_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "UpdateDestinationConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def update_destination_connector_without_preload_content( - self, - organization: StrictStr, - destination_connector_id: StrictStr, - update_destination_connector_request: UpdateDestinationConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Update a destination connector - - - :param organization: (required) - :type organization: str - :param destination_connector_id: (required) - :type destination_connector_id: str - :param update_destination_connector_request: (required) - :type update_destination_connector_request: UpdateDestinationConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._update_destination_connector_serialize( - organization=organization, - destination_connector_id=destination_connector_id, - update_destination_connector_request=update_destination_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "UpdateDestinationConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _update_destination_connector_serialize( - self, - organization, - destination_connector_id, - update_destination_connector_request, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if destination_connector_id is not None: - _path_params['destinationConnectorId'] = destination_connector_id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if update_destination_connector_request is not None: - _body_params = update_destination_connector_request - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='PATCH', - resource_path='/org/{organization}/connectors/destinations/{destinationConnectorId}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def update_source_connector( - self, - organization: StrictStr, - source_connector_id: StrictStr, - update_source_connector_request: UpdateSourceConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> UpdateSourceConnectorResponse: - """Update a source connector - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param update_source_connector_request: (required) - :type update_source_connector_request: UpdateSourceConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._update_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - update_source_connector_request=update_source_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "UpdateSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def update_source_connector_with_http_info( - self, - organization: StrictStr, - source_connector_id: StrictStr, - update_source_connector_request: UpdateSourceConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[UpdateSourceConnectorResponse]: - """Update a source connector - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param update_source_connector_request: (required) - :type update_source_connector_request: UpdateSourceConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._update_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - update_source_connector_request=update_source_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "UpdateSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def update_source_connector_without_preload_content( - self, - organization: StrictStr, - source_connector_id: StrictStr, - update_source_connector_request: UpdateSourceConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Update a source connector - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param update_source_connector_request: (required) - :type update_source_connector_request: UpdateSourceConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._update_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - update_source_connector_request=update_source_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "UpdateSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _update_source_connector_serialize( - self, - organization, - source_connector_id, - update_source_connector_request, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if source_connector_id is not None: - _path_params['sourceConnectorId'] = source_connector_id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if update_source_connector_request is not None: - _body_params = update_source_connector_request - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='PATCH', - resource_path='/org/{organization}/connectors/sources/{sourceConnectorId}', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - - - - @validate_call - def update_user_in_source_connector( - self, - organization: StrictStr, - source_connector_id: StrictStr, - update_user_in_source_connector_request: UpdateUserInSourceConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> UpdateUserInSourceConnectorResponse: - """Update a source connector user - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param update_user_in_source_connector_request: (required) - :type update_user_in_source_connector_request: UpdateUserInSourceConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._update_user_in_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - update_user_in_source_connector_request=update_user_in_source_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "UpdateUserInSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ).data - - - @validate_call - def update_user_in_source_connector_with_http_info( - self, - organization: StrictStr, - source_connector_id: StrictStr, - update_user_in_source_connector_request: UpdateUserInSourceConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> ApiResponse[UpdateUserInSourceConnectorResponse]: - """Update a source connector user - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param update_user_in_source_connector_request: (required) - :type update_user_in_source_connector_request: UpdateUserInSourceConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._update_user_in_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - update_user_in_source_connector_request=update_user_in_source_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "UpdateUserInSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - response_data.read() - return self.api_client.response_deserialize( - response_data=response_data, - response_types_map=_response_types_map, - ) - - - @validate_call - def update_user_in_source_connector_without_preload_content( - self, - organization: StrictStr, - source_connector_id: StrictStr, - update_user_in_source_connector_request: UpdateUserInSourceConnectorRequest, - _request_timeout: Union[ - None, - Annotated[StrictFloat, Field(gt=0)], - Tuple[ - Annotated[StrictFloat, Field(gt=0)], - Annotated[StrictFloat, Field(gt=0)] - ] - ] = None, - _request_auth: Optional[Dict[StrictStr, Any]] = None, - _content_type: Optional[StrictStr] = None, - _headers: Optional[Dict[StrictStr, Any]] = None, - _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, - ) -> RESTResponseType: - """Update a source connector user - - - :param organization: (required) - :type organization: str - :param source_connector_id: (required) - :type source_connector_id: str - :param update_user_in_source_connector_request: (required) - :type update_user_in_source_connector_request: UpdateUserInSourceConnectorRequest - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :type _request_timeout: int, tuple(int, int), optional - :param _request_auth: set to override the auth_settings for an a single - request; this effectively ignores the - authentication in the spec for a single request. - :type _request_auth: dict, optional - :param _content_type: force content-type for the request. - :type _content_type: str, Optional - :param _headers: set to override the headers for a single - request; this effectively ignores the headers - in the spec for a single request. - :type _headers: dict, optional - :param _host_index: set to override the host_index for a single - request; this effectively ignores the host_index - in the spec for a single request. - :type _host_index: int, optional - :return: Returns the result object. - """ # noqa: E501 - - _param = self._update_user_in_source_connector_serialize( - organization=organization, - source_connector_id=source_connector_id, - update_user_in_source_connector_request=update_user_in_source_connector_request, - _request_auth=_request_auth, - _content_type=_content_type, - _headers=_headers, - _host_index=_host_index - ) - - _response_types_map: Dict[str, Optional[str]] = { - '200': "UpdateUserInSourceConnectorResponse", - '400': "GetPipelines400Response", - '401': "GetPipelines400Response", - '403': "GetPipelines400Response", - '404': "GetPipelines400Response", - '500': "GetPipelines400Response", - } - response_data = self.api_client.call_api( - *_param, - _request_timeout=_request_timeout - ) - return response_data.response - - - def _update_user_in_source_connector_serialize( - self, - organization, - source_connector_id, - update_user_in_source_connector_request, - _request_auth, - _content_type, - _headers, - _host_index, - ) -> RequestSerialized: - - _host = None - - _collection_formats: Dict[str, str] = { - } - - _path_params: Dict[str, str] = {} - _query_params: List[Tuple[str, str]] = [] - _header_params: Dict[str, Optional[str]] = _headers or {} - _form_params: List[Tuple[str, str]] = [] - _files: Dict[ - str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] - ] = {} - _body_params: Optional[bytes] = None - - # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if source_connector_id is not None: - _path_params['sourceConnectorId'] = source_connector_id - # process the query parameters - # process the header parameters - # process the form parameters - # process the body parameter - if update_user_in_source_connector_request is not None: - _body_params = update_user_in_source_connector_request - - - # set the HTTP header `Accept` - if 'Accept' not in _header_params: - _header_params['Accept'] = self.api_client.select_header_accept( - [ - 'application/json' - ] - ) - - # set the HTTP header `Content-Type` - if _content_type: - _header_params['Content-Type'] = _content_type - else: - _default_content_type = ( - self.api_client.select_header_content_type( - [ - 'application/json' - ] - ) - ) - if _default_content_type is not None: - _header_params['Content-Type'] = _default_content_type - - # authentication setting - _auth_settings: List[str] = [ - 'bearerAuth' - ] - - return self.api_client.param_serialize( - method='PATCH', - resource_path='/org/{organization}/connectors/sources/{sourceConnectorId}/users', - path_params=_path_params, - query_params=_query_params, - header_params=_header_params, - body=_body_params, - post_params=_form_params, - files=_files, - auth_settings=_auth_settings, - collection_formats=_collection_formats, - _host=_host, - _request_auth=_request_auth - ) - - diff --git a/src/python/vectorize_client/models/__init__.py b/src/python/vectorize_client/models/__init__.py deleted file mode 100644 index 3a8daa9..0000000 --- a/src/python/vectorize_client/models/__init__.py +++ /dev/null @@ -1,99 +0,0 @@ -# coding: utf-8 - -# flake8: noqa -""" - Vectorize API (Beta) - - API for Vectorize services - - The version of the OpenAPI document: 0.0.1 - Generated by OpenAPI Generator (https://openapi-generator.tech) - - Do not edit the class manually. -""" # noqa: E501 - - -# import models into model package -from vectorize_client.models.ai_platform import AIPlatform -from vectorize_client.models.ai_platform_config_schema import AIPlatformConfigSchema -from vectorize_client.models.ai_platform_schema import AIPlatformSchema -from vectorize_client.models.ai_platform_type import AIPlatformType -from vectorize_client.models.add_user_from_source_connector_response import AddUserFromSourceConnectorResponse -from vectorize_client.models.add_user_to_source_connector_request import AddUserToSourceConnectorRequest -from vectorize_client.models.add_user_to_source_connector_request_selected_files_value import AddUserToSourceConnectorRequestSelectedFilesValue -from vectorize_client.models.create_ai_platform_connector import CreateAIPlatformConnector -from vectorize_client.models.create_ai_platform_connector_response import CreateAIPlatformConnectorResponse -from vectorize_client.models.create_destination_connector import CreateDestinationConnector -from vectorize_client.models.create_destination_connector_response import CreateDestinationConnectorResponse -from vectorize_client.models.create_pipeline_response import CreatePipelineResponse -from vectorize_client.models.create_pipeline_response_data import CreatePipelineResponseData -from vectorize_client.models.create_source_connector import CreateSourceConnector -from vectorize_client.models.create_source_connector_response import CreateSourceConnectorResponse -from vectorize_client.models.created_ai_platform_connector import CreatedAIPlatformConnector -from vectorize_client.models.created_destination_connector import CreatedDestinationConnector -from vectorize_client.models.created_source_connector import CreatedSourceConnector -from vectorize_client.models.deep_research_result import DeepResearchResult -from vectorize_client.models.delete_ai_platform_connector_response import DeleteAIPlatformConnectorResponse -from vectorize_client.models.delete_destination_connector_response import DeleteDestinationConnectorResponse -from vectorize_client.models.delete_file_response import DeleteFileResponse -from vectorize_client.models.delete_pipeline_response import DeletePipelineResponse -from vectorize_client.models.delete_source_connector_response import DeleteSourceConnectorResponse -from vectorize_client.models.destination_connector import DestinationConnector -from vectorize_client.models.destination_connector_schema import DestinationConnectorSchema -from vectorize_client.models.destination_connector_type import DestinationConnectorType -from vectorize_client.models.document import Document -from vectorize_client.models.extraction_chunking_strategy import ExtractionChunkingStrategy -from vectorize_client.models.extraction_result import ExtractionResult -from vectorize_client.models.extraction_result_response import ExtractionResultResponse -from vectorize_client.models.extraction_type import ExtractionType -from vectorize_client.models.get_ai_platform_connectors200_response import GetAIPlatformConnectors200Response -from vectorize_client.models.get_deep_research_response import GetDeepResearchResponse -from vectorize_client.models.get_destination_connectors200_response import GetDestinationConnectors200Response -from vectorize_client.models.get_pipeline_events_response import GetPipelineEventsResponse -from vectorize_client.models.get_pipeline_metrics_response import GetPipelineMetricsResponse -from vectorize_client.models.get_pipeline_response import GetPipelineResponse -from vectorize_client.models.get_pipelines400_response import GetPipelines400Response -from vectorize_client.models.get_pipelines_response import GetPipelinesResponse -from vectorize_client.models.get_source_connectors200_response import GetSourceConnectors200Response -from vectorize_client.models.get_upload_files_response import GetUploadFilesResponse -from vectorize_client.models.metadata_extraction_strategy import MetadataExtractionStrategy -from vectorize_client.models.metadata_extraction_strategy_schema import MetadataExtractionStrategySchema -from vectorize_client.models.n8_n_config import N8NConfig -from vectorize_client.models.pipeline_configuration_schema import PipelineConfigurationSchema -from vectorize_client.models.pipeline_events import PipelineEvents -from vectorize_client.models.pipeline_list_summary import PipelineListSummary -from vectorize_client.models.pipeline_metrics import PipelineMetrics -from vectorize_client.models.pipeline_summary import PipelineSummary -from vectorize_client.models.remove_user_from_source_connector_request import RemoveUserFromSourceConnectorRequest -from vectorize_client.models.remove_user_from_source_connector_response import RemoveUserFromSourceConnectorResponse -from vectorize_client.models.retrieve_context import RetrieveContext -from vectorize_client.models.retrieve_context_message import RetrieveContextMessage -from vectorize_client.models.retrieve_documents_request import RetrieveDocumentsRequest -from vectorize_client.models.retrieve_documents_response import RetrieveDocumentsResponse -from vectorize_client.models.schedule_schema import ScheduleSchema -from vectorize_client.models.schedule_schema_type import ScheduleSchemaType -from vectorize_client.models.source_connector import SourceConnector -from vectorize_client.models.source_connector_schema import SourceConnectorSchema -from vectorize_client.models.source_connector_type import SourceConnectorType -from vectorize_client.models.start_deep_research_request import StartDeepResearchRequest -from vectorize_client.models.start_deep_research_response import StartDeepResearchResponse -from vectorize_client.models.start_extraction_request import StartExtractionRequest -from vectorize_client.models.start_extraction_response import StartExtractionResponse -from vectorize_client.models.start_file_upload_request import StartFileUploadRequest -from vectorize_client.models.start_file_upload_response import StartFileUploadResponse -from vectorize_client.models.start_file_upload_to_connector_request import StartFileUploadToConnectorRequest -from vectorize_client.models.start_file_upload_to_connector_response import StartFileUploadToConnectorResponse -from vectorize_client.models.start_pipeline_response import StartPipelineResponse -from vectorize_client.models.stop_pipeline_response import StopPipelineResponse -from vectorize_client.models.update_ai_platform_connector_request import UpdateAIPlatformConnectorRequest -from vectorize_client.models.update_ai_platform_connector_response import UpdateAIPlatformConnectorResponse -from vectorize_client.models.update_destination_connector_request import UpdateDestinationConnectorRequest -from vectorize_client.models.update_destination_connector_response import UpdateDestinationConnectorResponse -from vectorize_client.models.update_source_connector_request import UpdateSourceConnectorRequest -from vectorize_client.models.update_source_connector_response import UpdateSourceConnectorResponse -from vectorize_client.models.update_source_connector_response_data import UpdateSourceConnectorResponseData -from vectorize_client.models.update_user_in_source_connector_request import UpdateUserInSourceConnectorRequest -from vectorize_client.models.update_user_in_source_connector_response import UpdateUserInSourceConnectorResponse -from vectorize_client.models.updated_ai_platform_connector_data import UpdatedAIPlatformConnectorData -from vectorize_client.models.updated_destination_connector_data import UpdatedDestinationConnectorData -from vectorize_client.models.upload_file import UploadFile diff --git a/src/ts/.openapi-generator-ignore b/src/ts/.openapi-generator-ignore new file mode 100644 index 0000000..7484ee5 --- /dev/null +++ b/src/ts/.openapi-generator-ignore @@ -0,0 +1,23 @@ +# OpenAPI Generator Ignore +# Generated by openapi-generator https://github.com/openapitools/openapi-generator + +# Use this file to prevent files from being overwritten by the generator. +# The patterns follow closely to .gitignore or .dockerignore. + +# As an example, the C# client generator defines ApiClient.cs. +# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: +#ApiClient.cs + +# You can match any string of characters against a directory, file or extension with a single asterisk (*): +#foo/*/qux +# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux + +# You can recursively match patterns against a directory, file or extension with a double asterisk (**): +#foo/**/qux +# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux + +# You can also negate patterns with an exclamation (!). +# For example, you can ignore all files in a docs folder with the file extension .md: +#docs/*.md +# Then explicitly reverse the ignore rule for a single file: +#!docs/README.md diff --git a/src/ts/.openapi-generator/FILES b/src/ts/.openapi-generator/FILES new file mode 100644 index 0000000..62eba8d --- /dev/null +++ b/src/ts/.openapi-generator/FILES @@ -0,0 +1,98 @@ +.gitignore +.npmignore +.openapi-generator-ignore +README.md +package.json +src/apis/ConnectorsApi.ts +src/apis/ExtractionApi.ts +src/apis/FilesApi.ts +src/apis/PipelinesApi.ts +src/apis/UploadsApi.ts +src/apis/index.ts +src/index.ts +src/models/AIPlatform.ts +src/models/AIPlatformConfigSchema.ts +src/models/AIPlatformSchema.ts +src/models/AIPlatformType.ts +src/models/AddUserFromSourceConnectorResponse.ts +src/models/AddUserToSourceConnectorRequest.ts +src/models/AddUserToSourceConnectorRequestSelectedFilesValue.ts +src/models/CreateAIPlatformConnector.ts +src/models/CreateAIPlatformConnectorResponse.ts +src/models/CreateDestinationConnector.ts +src/models/CreateDestinationConnectorResponse.ts +src/models/CreatePipelineResponse.ts +src/models/CreatePipelineResponseData.ts +src/models/CreateSourceConnector.ts +src/models/CreateSourceConnectorResponse.ts +src/models/CreatedAIPlatformConnector.ts +src/models/CreatedDestinationConnector.ts +src/models/CreatedSourceConnector.ts +src/models/DeepResearchResult.ts +src/models/DeleteAIPlatformConnectorResponse.ts +src/models/DeleteDestinationConnectorResponse.ts +src/models/DeleteFileResponse.ts +src/models/DeletePipelineResponse.ts +src/models/DeleteSourceConnectorResponse.ts +src/models/DestinationConnector.ts +src/models/DestinationConnectorSchema.ts +src/models/DestinationConnectorType.ts +src/models/Document.ts +src/models/ExtractionChunkingStrategy.ts +src/models/ExtractionResult.ts +src/models/ExtractionResultResponse.ts +src/models/ExtractionType.ts +src/models/GetAIPlatformConnectors200Response.ts +src/models/GetDeepResearchResponse.ts +src/models/GetDestinationConnectors200Response.ts +src/models/GetPipelineEventsResponse.ts +src/models/GetPipelineMetricsResponse.ts +src/models/GetPipelineResponse.ts +src/models/GetPipelines400Response.ts +src/models/GetPipelinesResponse.ts +src/models/GetSourceConnectors200Response.ts +src/models/GetUploadFilesResponse.ts +src/models/MetadataExtractionStrategy.ts +src/models/MetadataExtractionStrategySchema.ts +src/models/N8NConfig.ts +src/models/PipelineConfigurationSchema.ts +src/models/PipelineEvents.ts +src/models/PipelineListSummary.ts +src/models/PipelineMetrics.ts +src/models/PipelineSummary.ts +src/models/RemoveUserFromSourceConnectorRequest.ts +src/models/RemoveUserFromSourceConnectorResponse.ts +src/models/RetrieveContext.ts +src/models/RetrieveContextMessage.ts +src/models/RetrieveDocumentsRequest.ts +src/models/RetrieveDocumentsResponse.ts +src/models/ScheduleSchema.ts +src/models/ScheduleSchemaType.ts +src/models/SourceConnector.ts +src/models/SourceConnectorSchema.ts +src/models/SourceConnectorType.ts +src/models/StartDeepResearchRequest.ts +src/models/StartDeepResearchResponse.ts +src/models/StartExtractionRequest.ts +src/models/StartExtractionResponse.ts +src/models/StartFileUploadRequest.ts +src/models/StartFileUploadResponse.ts +src/models/StartFileUploadToConnectorRequest.ts +src/models/StartFileUploadToConnectorResponse.ts +src/models/StartPipelineResponse.ts +src/models/StopPipelineResponse.ts +src/models/UpdateAIPlatformConnectorRequest.ts +src/models/UpdateAIPlatformConnectorResponse.ts +src/models/UpdateDestinationConnectorRequest.ts +src/models/UpdateDestinationConnectorResponse.ts +src/models/UpdateSourceConnectorRequest.ts +src/models/UpdateSourceConnectorResponse.ts +src/models/UpdateSourceConnectorResponseData.ts +src/models/UpdateUserInSourceConnectorRequest.ts +src/models/UpdateUserInSourceConnectorResponse.ts +src/models/UpdatedAIPlatformConnectorData.ts +src/models/UpdatedDestinationConnectorData.ts +src/models/UploadFile.ts +src/models/index.ts +src/runtime.ts +tsconfig.json diff --git a/src/ts/.openapi-generator/VERSION b/src/ts/.openapi-generator/VERSION new file mode 100644 index 0000000..e465da4 --- /dev/null +++ b/src/ts/.openapi-generator/VERSION @@ -0,0 +1 @@ +7.14.0 diff --git a/src/ts/README.md b/src/ts/README.md index 3d1bd42..36f54eb 100644 --- a/src/ts/README.md +++ b/src/ts/README.md @@ -1,15 +1,15 @@ # Vectorize Client -Typescript Api Client for Vectorize -For more information, please visit [https://vectorize.io](https://vectorize.io) +Node Api Client for [Vectorize](https://vectorize.io). +For the full documentation, please visit [docs.vectorize.io](https://docs.vectorize.io/api/api-getting-started). -## Installation & Usage +## Installation ```sh npm install @vectorize-io/vectorize-client ``` ## Getting Started -Please follow the [installation procedure](#installation--usage) and then run the following: +List all your pipelines: ```typescript @@ -23,8 +23,4 @@ const pipelines = connectorsApi.getPipelines({ console.log(pipelines) ``` -## Documentation for API Endpoints - -All URIs are relative to *https://api.vectorize.io/v1* - -See the full [reference](https://vectorize.readme.io/reference) for more information. +Visit [docs.vectorize.io](https://docs.vectorize.io/api/api-getting-started) to learn more about the API. \ No newline at end of file diff --git a/src/ts/package.json b/src/ts/package.json index a6f3456..441d84a 100644 --- a/src/ts/package.json +++ b/src/ts/package.json @@ -16,7 +16,7 @@ "preinstall": "npm install typescript" }, "devDependencies": { - "typescript": "^5.8.3" + "typescript": "^4.0 || ^5.0" }, "publishConfig": { "registry": "https://registry.npmjs.org", diff --git a/src/ts/src/apis/ConnectorsApi.ts b/src/ts/src/apis/ConnectorsApi.ts index b9dedb2..43493aa 100644 --- a/src/ts/src/apis/ConnectorsApi.ts +++ b/src/ts/src/apis/ConnectorsApi.ts @@ -240,8 +240,13 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/sources/{sourceConnectorId}/users`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"sourceConnectorId"}}`, encodeURIComponent(String(requestParameters['sourceConnectorId']))); + const response = await this.request({ - path: `/org/{organization}/connectors/sources/{sourceConnectorId}/users`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"sourceConnectorId"}}`, encodeURIComponent(String(requestParameters['sourceConnectorId']))), + path: urlPath, method: 'POST', headers: headerParameters, query: queryParameters, @@ -291,8 +296,12 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/aiplatforms`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + const response = await this.request({ - path: `/org/{organization}/connectors/aiplatforms`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))), + path: urlPath, method: 'POST', headers: headerParameters, query: queryParameters, @@ -342,8 +351,12 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/destinations`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + const response = await this.request({ - path: `/org/{organization}/connectors/destinations`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))), + path: urlPath, method: 'POST', headers: headerParameters, query: queryParameters, @@ -393,8 +406,12 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/sources`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + const response = await this.request({ - path: `/org/{organization}/connectors/sources`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))), + path: urlPath, method: 'POST', headers: headerParameters, query: queryParameters, @@ -442,8 +459,13 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/aiplatforms/{aiplatformId}`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"aiplatformId"}}`, encodeURIComponent(String(requestParameters['aiplatformId']))); + const response = await this.request({ - path: `/org/{organization}/connectors/aiplatforms/{aiplatformId}`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"aiplatformId"}}`, encodeURIComponent(String(requestParameters['aiplatformId']))), + path: urlPath, method: 'DELETE', headers: headerParameters, query: queryParameters, @@ -490,8 +512,13 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/destinations/{destinationConnectorId}`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"destinationConnectorId"}}`, encodeURIComponent(String(requestParameters['destinationConnectorId']))); + const response = await this.request({ - path: `/org/{organization}/connectors/destinations/{destinationConnectorId}`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"destinationConnectorId"}}`, encodeURIComponent(String(requestParameters['destinationConnectorId']))), + path: urlPath, method: 'DELETE', headers: headerParameters, query: queryParameters, @@ -538,8 +565,13 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/sources/{sourceConnectorId}`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"sourceConnectorId"}}`, encodeURIComponent(String(requestParameters['sourceConnectorId']))); + const response = await this.request({ - path: `/org/{organization}/connectors/sources/{sourceConnectorId}`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"sourceConnectorId"}}`, encodeURIComponent(String(requestParameters['sourceConnectorId']))), + path: urlPath, method: 'DELETE', headers: headerParameters, query: queryParameters, @@ -595,8 +627,13 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/sources/{sourceConnectorId}/users`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"sourceConnectorId"}}`, encodeURIComponent(String(requestParameters['sourceConnectorId']))); + const response = await this.request({ - path: `/org/{organization}/connectors/sources/{sourceConnectorId}/users`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"sourceConnectorId"}}`, encodeURIComponent(String(requestParameters['sourceConnectorId']))), + path: urlPath, method: 'DELETE', headers: headerParameters, query: queryParameters, @@ -644,8 +681,13 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/aiplatforms/{aiplatformId}`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"aiplatformId"}}`, encodeURIComponent(String(requestParameters['aiplatformId']))); + const response = await this.request({ - path: `/org/{organization}/connectors/aiplatforms/{aiplatformId}`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"aiplatformId"}}`, encodeURIComponent(String(requestParameters['aiplatformId']))), + path: urlPath, method: 'GET', headers: headerParameters, query: queryParameters, @@ -685,8 +727,12 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/aiplatforms`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + const response = await this.request({ - path: `/org/{organization}/connectors/aiplatforms`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))), + path: urlPath, method: 'GET', headers: headerParameters, query: queryParameters, @@ -733,8 +779,13 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/destinations/{destinationConnectorId}`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"destinationConnectorId"}}`, encodeURIComponent(String(requestParameters['destinationConnectorId']))); + const response = await this.request({ - path: `/org/{organization}/connectors/destinations/{destinationConnectorId}`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"destinationConnectorId"}}`, encodeURIComponent(String(requestParameters['destinationConnectorId']))), + path: urlPath, method: 'GET', headers: headerParameters, query: queryParameters, @@ -774,8 +825,12 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/destinations`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + const response = await this.request({ - path: `/org/{organization}/connectors/destinations`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))), + path: urlPath, method: 'GET', headers: headerParameters, query: queryParameters, @@ -822,8 +877,13 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/sources/{sourceConnectorId}`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"sourceConnectorId"}}`, encodeURIComponent(String(requestParameters['sourceConnectorId']))); + const response = await this.request({ - path: `/org/{organization}/connectors/sources/{sourceConnectorId}`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"sourceConnectorId"}}`, encodeURIComponent(String(requestParameters['sourceConnectorId']))), + path: urlPath, method: 'GET', headers: headerParameters, query: queryParameters, @@ -863,8 +923,12 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/sources`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + const response = await this.request({ - path: `/org/{organization}/connectors/sources`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))), + path: urlPath, method: 'GET', headers: headerParameters, query: queryParameters, @@ -920,8 +984,13 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/aiplatforms/{aiplatformId}`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"aiplatformId"}}`, encodeURIComponent(String(requestParameters['aiplatformId']))); + const response = await this.request({ - path: `/org/{organization}/connectors/aiplatforms/{aiplatformId}`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"aiplatformId"}}`, encodeURIComponent(String(requestParameters['aiplatformId']))), + path: urlPath, method: 'PATCH', headers: headerParameters, query: queryParameters, @@ -978,8 +1047,13 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/destinations/{destinationConnectorId}`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"destinationConnectorId"}}`, encodeURIComponent(String(requestParameters['destinationConnectorId']))); + const response = await this.request({ - path: `/org/{organization}/connectors/destinations/{destinationConnectorId}`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"destinationConnectorId"}}`, encodeURIComponent(String(requestParameters['destinationConnectorId']))), + path: urlPath, method: 'PATCH', headers: headerParameters, query: queryParameters, @@ -1036,8 +1110,13 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/sources/{sourceConnectorId}`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"sourceConnectorId"}}`, encodeURIComponent(String(requestParameters['sourceConnectorId']))); + const response = await this.request({ - path: `/org/{organization}/connectors/sources/{sourceConnectorId}`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"sourceConnectorId"}}`, encodeURIComponent(String(requestParameters['sourceConnectorId']))), + path: urlPath, method: 'PATCH', headers: headerParameters, query: queryParameters, @@ -1094,8 +1173,13 @@ export class ConnectorsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/connectors/sources/{sourceConnectorId}/users`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"sourceConnectorId"}}`, encodeURIComponent(String(requestParameters['sourceConnectorId']))); + const response = await this.request({ - path: `/org/{organization}/connectors/sources/{sourceConnectorId}/users`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"sourceConnectorId"}}`, encodeURIComponent(String(requestParameters['sourceConnectorId']))), + path: urlPath, method: 'PATCH', headers: headerParameters, query: queryParameters, diff --git a/src/ts/src/apis/ExtractionApi.ts b/src/ts/src/apis/ExtractionApi.ts index bb45ab4..f85a523 100644 --- a/src/ts/src/apis/ExtractionApi.ts +++ b/src/ts/src/apis/ExtractionApi.ts @@ -76,8 +76,13 @@ export class ExtractionApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/extraction/{extractionId}`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"extractionId"}}`, encodeURIComponent(String(requestParameters['extractionId']))); + const response = await this.request({ - path: `/org/{organization}/extraction/{extractionId}`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"extractionId"}}`, encodeURIComponent(String(requestParameters['extractionId']))), + path: urlPath, method: 'GET', headers: headerParameters, query: queryParameters, @@ -126,8 +131,12 @@ export class ExtractionApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/extraction`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + const response = await this.request({ - path: `/org/{organization}/extraction`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))), + path: urlPath, method: 'POST', headers: headerParameters, query: queryParameters, diff --git a/src/ts/src/apis/FilesApi.ts b/src/ts/src/apis/FilesApi.ts index 5000be5..26b4b86 100644 --- a/src/ts/src/apis/FilesApi.ts +++ b/src/ts/src/apis/FilesApi.ts @@ -70,8 +70,12 @@ export class FilesApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/files`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + const response = await this.request({ - path: `/org/{organization}/files`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))), + path: urlPath, method: 'POST', headers: headerParameters, query: queryParameters, diff --git a/src/ts/src/apis/PipelinesApi.ts b/src/ts/src/apis/PipelinesApi.ts index ba7861a..7aa19b7 100644 --- a/src/ts/src/apis/PipelinesApi.ts +++ b/src/ts/src/apis/PipelinesApi.ts @@ -159,8 +159,12 @@ export class PipelinesApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/pipelines`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + const response = await this.request({ - path: `/org/{organization}/pipelines`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))), + path: urlPath, method: 'POST', headers: headerParameters, query: queryParameters, @@ -208,8 +212,13 @@ export class PipelinesApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/pipelines/{pipeline}`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))); + const response = await this.request({ - path: `/org/{organization}/pipelines/{pipeline}`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))), + path: urlPath, method: 'DELETE', headers: headerParameters, query: queryParameters, @@ -263,8 +272,14 @@ export class PipelinesApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/pipelines/{pipeline}/deep-research/{researchId}`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))); + urlPath = urlPath.replace(`{${"researchId"}}`, encodeURIComponent(String(requestParameters['researchId']))); + const response = await this.request({ - path: `/org/{organization}/pipelines/{pipeline}/deep-research/{researchId}`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))).replace(`{${"researchId"}}`, encodeURIComponent(String(requestParameters['researchId']))), + path: urlPath, method: 'GET', headers: headerParameters, query: queryParameters, @@ -311,8 +326,13 @@ export class PipelinesApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/pipelines/{pipeline}`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))); + const response = await this.request({ - path: `/org/{organization}/pipelines/{pipeline}`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))), + path: urlPath, method: 'GET', headers: headerParameters, query: queryParameters, @@ -363,8 +383,13 @@ export class PipelinesApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/pipelines/{pipeline}/events`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))); + const response = await this.request({ - path: `/org/{organization}/pipelines/{pipeline}/events`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))), + path: urlPath, method: 'GET', headers: headerParameters, query: queryParameters, @@ -411,8 +436,13 @@ export class PipelinesApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/pipelines/{pipeline}/metrics`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))); + const response = await this.request({ - path: `/org/{organization}/pipelines/{pipeline}/metrics`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))), + path: urlPath, method: 'GET', headers: headerParameters, query: queryParameters, @@ -452,8 +482,12 @@ export class PipelinesApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/pipelines`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + const response = await this.request({ - path: `/org/{organization}/pipelines`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))), + path: urlPath, method: 'GET', headers: headerParameters, query: queryParameters, @@ -509,8 +543,13 @@ export class PipelinesApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/pipelines/{pipeline}/retrieval`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))); + const response = await this.request({ - path: `/org/{organization}/pipelines/{pipeline}/retrieval`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))), + path: urlPath, method: 'POST', headers: headerParameters, query: queryParameters, @@ -567,8 +606,13 @@ export class PipelinesApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/pipelines/{pipeline}/deep-research`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))); + const response = await this.request({ - path: `/org/{organization}/pipelines/{pipeline}/deep-research`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))), + path: urlPath, method: 'POST', headers: headerParameters, query: queryParameters, @@ -616,8 +660,13 @@ export class PipelinesApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/pipelines/{pipeline}/start`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))); + const response = await this.request({ - path: `/org/{organization}/pipelines/{pipeline}/start`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))), + path: urlPath, method: 'POST', headers: headerParameters, query: queryParameters, @@ -664,8 +713,13 @@ export class PipelinesApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/pipelines/{pipeline}/stop`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))); + const response = await this.request({ - path: `/org/{organization}/pipelines/{pipeline}/stop`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"pipeline"}}`, encodeURIComponent(String(requestParameters['pipeline']))), + path: urlPath, method: 'POST', headers: headerParameters, query: queryParameters, diff --git a/src/ts/src/apis/UploadsApi.ts b/src/ts/src/apis/UploadsApi.ts index 4718e3e..c889182 100644 --- a/src/ts/src/apis/UploadsApi.ts +++ b/src/ts/src/apis/UploadsApi.ts @@ -85,8 +85,13 @@ export class UploadsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/uploads/{connectorId}/files`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"connectorId"}}`, encodeURIComponent(String(requestParameters['connectorId']))); + const response = await this.request({ - path: `/org/{organization}/uploads/{connectorId}/files`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"connectorId"}}`, encodeURIComponent(String(requestParameters['connectorId']))), + path: urlPath, method: 'DELETE', headers: headerParameters, query: queryParameters, @@ -133,8 +138,13 @@ export class UploadsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/uploads/{connectorId}/files`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"connectorId"}}`, encodeURIComponent(String(requestParameters['connectorId']))); + const response = await this.request({ - path: `/org/{organization}/uploads/{connectorId}/files`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"connectorId"}}`, encodeURIComponent(String(requestParameters['connectorId']))), + path: urlPath, method: 'GET', headers: headerParameters, query: queryParameters, @@ -190,8 +200,13 @@ export class UploadsApi extends runtime.BaseAPI { headerParameters["Authorization"] = `Bearer ${tokenString}`; } } + + let urlPath = `/org/{organization}/uploads/{connectorId}/files`; + urlPath = urlPath.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))); + urlPath = urlPath.replace(`{${"connectorId"}}`, encodeURIComponent(String(requestParameters['connectorId']))); + const response = await this.request({ - path: `/org/{organization}/uploads/{connectorId}/files`.replace(`{${"organization"}}`, encodeURIComponent(String(requestParameters['organization']))).replace(`{${"connectorId"}}`, encodeURIComponent(String(requestParameters['connectorId']))), + path: urlPath, method: 'PUT', headers: headerParameters, query: queryParameters, diff --git a/src/ts/src/runtime.ts b/src/ts/src/runtime.ts index 0d4f318..f38c132 100644 --- a/src/ts/src/runtime.ts +++ b/src/ts/src/runtime.ts @@ -343,10 +343,11 @@ export function exists(json: any, key: string) { } export function mapValues(data: any, fn: (item: any) => any) { - return Object.keys(data).reduce( - (acc, key) => ({ ...acc, [key]: fn(data[key]) }), - {} - ); + const result: { [key: string]: any } = {}; + for (const key of Object.keys(data)) { + result[key] = fn(data[key]); + } + return result; } export function canConsumeForm(consumes: Consume[]): boolean { diff --git a/test-requirements.txt b/test-requirements.txt new file mode 100644 index 0000000..e98555c --- /dev/null +++ b/test-requirements.txt @@ -0,0 +1,6 @@ +pytest >= 7.2.1 +pytest-cov >= 2.8.1 +tox >= 3.9.0 +flake8 >= 4.0.0 +types-python-dateutil >= 2.8.19.14 +mypy >= 1.5 diff --git a/src/python/vectorize_client/py.typed b/test/__init__.py similarity index 100% rename from src/python/vectorize_client/py.typed rename to test/__init__.py diff --git a/test/test_add_user_from_source_connector_response.py b/test/test_add_user_from_source_connector_response.py new file mode 100644 index 0000000..5b860a0 --- /dev/null +++ b/test/test_add_user_from_source_connector_response.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.add_user_from_source_connector_response import AddUserFromSourceConnectorResponse + +class TestAddUserFromSourceConnectorResponse(unittest.TestCase): + """AddUserFromSourceConnectorResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AddUserFromSourceConnectorResponse: + """Test AddUserFromSourceConnectorResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AddUserFromSourceConnectorResponse` + """ + model = AddUserFromSourceConnectorResponse() + if include_optional: + return AddUserFromSourceConnectorResponse( + message = '' + ) + else: + return AddUserFromSourceConnectorResponse( + message = '', + ) + """ + + def testAddUserFromSourceConnectorResponse(self): + """Test AddUserFromSourceConnectorResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_add_user_to_source_connector_request.py b/test/test_add_user_to_source_connector_request.py new file mode 100644 index 0000000..e8089b8 --- /dev/null +++ b/test/test_add_user_to_source_connector_request.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.add_user_to_source_connector_request import AddUserToSourceConnectorRequest + +class TestAddUserToSourceConnectorRequest(unittest.TestCase): + """AddUserToSourceConnectorRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AddUserToSourceConnectorRequest: + """Test AddUserToSourceConnectorRequest + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AddUserToSourceConnectorRequest` + """ + model = AddUserToSourceConnectorRequest() + if include_optional: + return AddUserToSourceConnectorRequest( + user_id = '', + selected_files = None, + refresh_token = '', + access_token = '' + ) + else: + return AddUserToSourceConnectorRequest( + user_id = '', + selected_files = None, + ) + """ + + def testAddUserToSourceConnectorRequest(self): + """Test AddUserToSourceConnectorRequest""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_add_user_to_source_connector_request_selected_files.py b/test/test_add_user_to_source_connector_request_selected_files.py new file mode 100644 index 0000000..cc43d94 --- /dev/null +++ b/test/test_add_user_to_source_connector_request_selected_files.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.add_user_to_source_connector_request_selected_files import AddUserToSourceConnectorRequestSelectedFiles + +class TestAddUserToSourceConnectorRequestSelectedFiles(unittest.TestCase): + """AddUserToSourceConnectorRequestSelectedFiles unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AddUserToSourceConnectorRequestSelectedFiles: + """Test AddUserToSourceConnectorRequestSelectedFiles + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AddUserToSourceConnectorRequestSelectedFiles` + """ + model = AddUserToSourceConnectorRequestSelectedFiles() + if include_optional: + return AddUserToSourceConnectorRequestSelectedFiles( + page_ids = [ + '' + ], + database_ids = [ + '' + ] + ) + else: + return AddUserToSourceConnectorRequestSelectedFiles( + ) + """ + + def testAddUserToSourceConnectorRequestSelectedFiles(self): + """Test AddUserToSourceConnectorRequestSelectedFiles""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_add_user_to_source_connector_request_selected_files_any_of.py b/test/test_add_user_to_source_connector_request_selected_files_any_of.py new file mode 100644 index 0000000..3fd7382 --- /dev/null +++ b/test/test_add_user_to_source_connector_request_selected_files_any_of.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.add_user_to_source_connector_request_selected_files_any_of import AddUserToSourceConnectorRequestSelectedFilesAnyOf + +class TestAddUserToSourceConnectorRequestSelectedFilesAnyOf(unittest.TestCase): + """AddUserToSourceConnectorRequestSelectedFilesAnyOf unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AddUserToSourceConnectorRequestSelectedFilesAnyOf: + """Test AddUserToSourceConnectorRequestSelectedFilesAnyOf + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AddUserToSourceConnectorRequestSelectedFilesAnyOf` + """ + model = AddUserToSourceConnectorRequestSelectedFilesAnyOf() + if include_optional: + return AddUserToSourceConnectorRequestSelectedFilesAnyOf( + page_ids = [ + '' + ], + database_ids = [ + '' + ] + ) + else: + return AddUserToSourceConnectorRequestSelectedFilesAnyOf( + ) + """ + + def testAddUserToSourceConnectorRequestSelectedFilesAnyOf(self): + """Test AddUserToSourceConnectorRequestSelectedFilesAnyOf""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_add_user_to_source_connector_request_selected_files_any_of_value.py b/test/test_add_user_to_source_connector_request_selected_files_any_of_value.py new file mode 100644 index 0000000..0187f1f --- /dev/null +++ b/test/test_add_user_to_source_connector_request_selected_files_any_of_value.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.add_user_to_source_connector_request_selected_files_any_of_value import AddUserToSourceConnectorRequestSelectedFilesAnyOfValue + +class TestAddUserToSourceConnectorRequestSelectedFilesAnyOfValue(unittest.TestCase): + """AddUserToSourceConnectorRequestSelectedFilesAnyOfValue unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AddUserToSourceConnectorRequestSelectedFilesAnyOfValue: + """Test AddUserToSourceConnectorRequestSelectedFilesAnyOfValue + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AddUserToSourceConnectorRequestSelectedFilesAnyOfValue` + """ + model = AddUserToSourceConnectorRequestSelectedFilesAnyOfValue() + if include_optional: + return AddUserToSourceConnectorRequestSelectedFilesAnyOfValue( + name = '', + mime_type = '' + ) + else: + return AddUserToSourceConnectorRequestSelectedFilesAnyOfValue( + name = '', + mime_type = '', + ) + """ + + def testAddUserToSourceConnectorRequestSelectedFilesAnyOfValue(self): + """Test AddUserToSourceConnectorRequestSelectedFilesAnyOfValue""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_advanced_query.py b/test/test_advanced_query.py new file mode 100644 index 0000000..d91e2c6 --- /dev/null +++ b/test/test_advanced_query.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.advanced_query import AdvancedQuery + +class TestAdvancedQuery(unittest.TestCase): + """AdvancedQuery unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AdvancedQuery: + """Test AdvancedQuery + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AdvancedQuery` + """ + model = AdvancedQuery() + if include_optional: + return AdvancedQuery( + mode = 'text', + text_fields = [ + '' + ], + match_type = 'match', + text_boost = 1.337, + filters = None + ) + else: + return AdvancedQuery( + ) + """ + + def testAdvancedQuery(self): + """Test AdvancedQuery""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_ai_platform.py b/test/test_ai_platform.py new file mode 100644 index 0000000..0a722c4 --- /dev/null +++ b/test/test_ai_platform.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.ai_platform import AIPlatform + +class TestAIPlatform(unittest.TestCase): + """AIPlatform unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AIPlatform: + """Test AIPlatform + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AIPlatform` + """ + model = AIPlatform() + if include_optional: + return AIPlatform( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '' + ) + else: + return AIPlatform( + id = '', + type = '', + name = '', + ) + """ + + def testAIPlatform(self): + """Test AIPlatform""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_ai_platform_config_schema.py b/test/test_ai_platform_config_schema.py new file mode 100644 index 0000000..21bd295 --- /dev/null +++ b/test/test_ai_platform_config_schema.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.ai_platform_config_schema import AIPlatformConfigSchema + +class TestAIPlatformConfigSchema(unittest.TestCase): + """AIPlatformConfigSchema unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AIPlatformConfigSchema: + """Test AIPlatformConfigSchema + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AIPlatformConfigSchema` + """ + model = AIPlatformConfigSchema() + if include_optional: + return AIPlatformConfigSchema( + embedding_model = 'VECTORIZE_OPEN_AI_TEXT_EMBEDDING_2', + chunking_strategy = 'FIXED', + chunk_size = 1, + chunk_overlap = 0, + dimensions = 1, + extraction_strategy = 'FAST' + ) + else: + return AIPlatformConfigSchema( + ) + """ + + def testAIPlatformConfigSchema(self): + """Test AIPlatformConfigSchema""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_ai_platform_input.py b/test/test_ai_platform_input.py new file mode 100644 index 0000000..0bea98a --- /dev/null +++ b/test/test_ai_platform_input.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.ai_platform_input import AIPlatformInput + +class TestAIPlatformInput(unittest.TestCase): + """AIPlatformInput unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AIPlatformInput: + """Test AIPlatformInput + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AIPlatformInput` + """ + model = AIPlatformInput() + if include_optional: + return AIPlatformInput( + id = '', + type = 'BEDROCK', + config = None + ) + else: + return AIPlatformInput( + id = '', + type = 'BEDROCK', + config = None, + ) + """ + + def testAIPlatformInput(self): + """Test AIPlatformInput""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_ai_platform_schema.py b/test/test_ai_platform_schema.py new file mode 100644 index 0000000..fc981f2 --- /dev/null +++ b/test/test_ai_platform_schema.py @@ -0,0 +1,68 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.ai_platform_schema import AIPlatformSchema + +class TestAIPlatformSchema(unittest.TestCase): + """AIPlatformSchema unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AIPlatformSchema: + """Test AIPlatformSchema + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AIPlatformSchema` + """ + model = AIPlatformSchema() + if include_optional: + return AIPlatformSchema( + id = '', + type = 'BEDROCK', + config = vectorize_client.models.ai_platform_config_schema.AIPlatformConfigSchema( + embedding_model = 'VECTORIZE_OPEN_AI_TEXT_EMBEDDING_2', + chunking_strategy = 'FIXED', + chunk_size = 1, + chunk_overlap = 0, + dimensions = 1, + extraction_strategy = 'FAST', ) + ) + else: + return AIPlatformSchema( + id = '', + type = 'BEDROCK', + config = vectorize_client.models.ai_platform_config_schema.AIPlatformConfigSchema( + embedding_model = 'VECTORIZE_OPEN_AI_TEXT_EMBEDDING_2', + chunking_strategy = 'FIXED', + chunk_size = 1, + chunk_overlap = 0, + dimensions = 1, + extraction_strategy = 'FAST', ), + ) + """ + + def testAIPlatformSchema(self): + """Test AIPlatformSchema""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_ai_platform_type.py b/test/test_ai_platform_type.py new file mode 100644 index 0000000..3b60676 --- /dev/null +++ b/test/test_ai_platform_type.py @@ -0,0 +1,33 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.ai_platform_type import AIPlatformType + +class TestAIPlatformType(unittest.TestCase): + """AIPlatformType unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def testAIPlatformType(self): + """Test AIPlatformType""" + # inst = AIPlatformType() + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_amazon_s3.py b/test/test_amazon_s3.py new file mode 100644 index 0000000..5ff514e --- /dev/null +++ b/test/test_amazon_s3.py @@ -0,0 +1,68 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.amazon_s3 import AmazonS3 + +class TestAmazonS3(unittest.TestCase): + """AmazonS3 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AmazonS3: + """Test AmazonS3 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AmazonS3` + """ + model = AmazonS3() + if include_optional: + return AmazonS3( + name = '', + type = 'AWS_S3', + config = vectorize_client.models.aws_s3_config.AWS_S3Config( + file_extensions = pdf, + idle_time = 1, + recursive = True, + path_prefix = '', + path_metadata_regex = '', + path_regex_group_names = '', ) + ) + else: + return AmazonS3( + name = '', + type = 'AWS_S3', + config = vectorize_client.models.aws_s3_config.AWS_S3Config( + file_extensions = pdf, + idle_time = 1, + recursive = True, + path_prefix = '', + path_metadata_regex = '', + path_regex_group_names = '', ), + ) + """ + + def testAmazonS3(self): + """Test AmazonS3""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_amazon_s31.py b/test/test_amazon_s31.py new file mode 100644 index 0000000..883b2e3 --- /dev/null +++ b/test/test_amazon_s31.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.amazon_s31 import AmazonS31 + +class TestAmazonS31(unittest.TestCase): + """AmazonS31 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AmazonS31: + """Test AmazonS31 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AmazonS31` + """ + model = AmazonS31() + if include_optional: + return AmazonS31( + config = vectorize_client.models.aws_s3_config.AWS_S3Config( + file_extensions = pdf, + idle_time = 1, + recursive = True, + path_prefix = '', + path_metadata_regex = '', + path_regex_group_names = '', ) + ) + else: + return AmazonS31( + ) + """ + + def testAmazonS31(self): + """Test AmazonS31""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_amazon_s32.py b/test/test_amazon_s32.py new file mode 100644 index 0000000..665836a --- /dev/null +++ b/test/test_amazon_s32.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.amazon_s32 import AmazonS32 + +class TestAmazonS32(unittest.TestCase): + """AmazonS32 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AmazonS32: + """Test AmazonS32 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AmazonS32` + """ + model = AmazonS32() + if include_optional: + return AmazonS32( + id = '', + type = 'AWS_S3' + ) + else: + return AmazonS32( + id = '', + type = 'AWS_S3', + ) + """ + + def testAmazonS32(self): + """Test AmazonS32""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_awss3_auth_config.py b/test/test_awss3_auth_config.py new file mode 100644 index 0000000..e7d82fb --- /dev/null +++ b/test/test_awss3_auth_config.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.awss3_auth_config import AWSS3AuthConfig + +class TestAWSS3AuthConfig(unittest.TestCase): + """AWSS3AuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AWSS3AuthConfig: + """Test AWSS3AuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AWSS3AuthConfig` + """ + model = AWSS3AuthConfig() + if include_optional: + return AWSS3AuthConfig( + name = '', + access_key = 'k', + secret_key = 'k', + bucket_name = '', + endpoint = '', + region = '', + archiver = True + ) + else: + return AWSS3AuthConfig( + name = '', + access_key = 'k', + secret_key = 'k', + bucket_name = '', + archiver = True, + ) + """ + + def testAWSS3AuthConfig(self): + """Test AWSS3AuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_awss3_config.py b/test/test_awss3_config.py new file mode 100644 index 0000000..9716077 --- /dev/null +++ b/test/test_awss3_config.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.awss3_config import AWSS3Config + +class TestAWSS3Config(unittest.TestCase): + """AWSS3Config unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AWSS3Config: + """Test AWSS3Config + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AWSS3Config` + """ + model = AWSS3Config() + if include_optional: + return AWSS3Config( + file_extensions = pdf, + idle_time = 1, + recursive = True, + path_prefix = '', + path_metadata_regex = '', + path_regex_group_names = '' + ) + else: + return AWSS3Config( + file_extensions = pdf, + idle_time = 1, + ) + """ + + def testAWSS3Config(self): + """Test AWSS3Config""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_azure_blob_storage.py b/test/test_azure_blob_storage.py new file mode 100644 index 0000000..ee50681 --- /dev/null +++ b/test/test_azure_blob_storage.py @@ -0,0 +1,68 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.azure_blob_storage import AzureBlobStorage + +class TestAzureBlobStorage(unittest.TestCase): + """AzureBlobStorage unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AzureBlobStorage: + """Test AzureBlobStorage + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AzureBlobStorage` + """ + model = AzureBlobStorage() + if include_optional: + return AzureBlobStorage( + name = '', + type = 'AZURE_BLOB', + config = vectorize_client.models.azure_blob_config.AZURE_BLOBConfig( + file_extensions = pdf, + idle_time = 1, + recursive = True, + path_prefix = '', + path_metadata_regex = '', + path_regex_group_names = '', ) + ) + else: + return AzureBlobStorage( + name = '', + type = 'AZURE_BLOB', + config = vectorize_client.models.azure_blob_config.AZURE_BLOBConfig( + file_extensions = pdf, + idle_time = 1, + recursive = True, + path_prefix = '', + path_metadata_regex = '', + path_regex_group_names = '', ), + ) + """ + + def testAzureBlobStorage(self): + """Test AzureBlobStorage""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_azure_blob_storage1.py b/test/test_azure_blob_storage1.py new file mode 100644 index 0000000..bd93273 --- /dev/null +++ b/test/test_azure_blob_storage1.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.azure_blob_storage1 import AzureBlobStorage1 + +class TestAzureBlobStorage1(unittest.TestCase): + """AzureBlobStorage1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AzureBlobStorage1: + """Test AzureBlobStorage1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AzureBlobStorage1` + """ + model = AzureBlobStorage1() + if include_optional: + return AzureBlobStorage1( + config = vectorize_client.models.azure_blob_config.AZURE_BLOBConfig( + file_extensions = pdf, + idle_time = 1, + recursive = True, + path_prefix = '', + path_metadata_regex = '', + path_regex_group_names = '', ) + ) + else: + return AzureBlobStorage1( + ) + """ + + def testAzureBlobStorage1(self): + """Test AzureBlobStorage1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_azure_blob_storage2.py b/test/test_azure_blob_storage2.py new file mode 100644 index 0000000..bcf6806 --- /dev/null +++ b/test/test_azure_blob_storage2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.azure_blob_storage2 import AzureBlobStorage2 + +class TestAzureBlobStorage2(unittest.TestCase): + """AzureBlobStorage2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AzureBlobStorage2: + """Test AzureBlobStorage2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AzureBlobStorage2` + """ + model = AzureBlobStorage2() + if include_optional: + return AzureBlobStorage2( + id = '', + type = 'AZURE_BLOB' + ) + else: + return AzureBlobStorage2( + id = '', + type = 'AZURE_BLOB', + ) + """ + + def testAzureBlobStorage2(self): + """Test AzureBlobStorage2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_azureaisearch.py b/test/test_azureaisearch.py new file mode 100644 index 0000000..43e718f --- /dev/null +++ b/test/test_azureaisearch.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.azureaisearch import Azureaisearch + +class TestAzureaisearch(unittest.TestCase): + """Azureaisearch unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Azureaisearch: + """Test Azureaisearch + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Azureaisearch` + """ + model = Azureaisearch() + if include_optional: + return Azureaisearch( + name = '', + type = 'AZUREAISEARCH', + config = vectorize_client.models.azureaisearch_config.AZUREAISEARCHConfig( + index = 'wr1c2v7s6djuy1zmetozkhdomha1b0', ) + ) + else: + return Azureaisearch( + name = '', + type = 'AZUREAISEARCH', + config = vectorize_client.models.azureaisearch_config.AZUREAISEARCHConfig( + index = 'wr1c2v7s6djuy1zmetozkhdomha1b0', ), + ) + """ + + def testAzureaisearch(self): + """Test Azureaisearch""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_azureaisearch1.py b/test/test_azureaisearch1.py new file mode 100644 index 0000000..1c47ae7 --- /dev/null +++ b/test/test_azureaisearch1.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.azureaisearch1 import Azureaisearch1 + +class TestAzureaisearch1(unittest.TestCase): + """Azureaisearch1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Azureaisearch1: + """Test Azureaisearch1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Azureaisearch1` + """ + model = Azureaisearch1() + if include_optional: + return Azureaisearch1( + config = vectorize_client.models.azureaisearch_config.AZUREAISEARCHConfig( + index = 'wr1c2v7s6djuy1zmetozkhdomha1b0', ) + ) + else: + return Azureaisearch1( + ) + """ + + def testAzureaisearch1(self): + """Test Azureaisearch1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_azureaisearch2.py b/test/test_azureaisearch2.py new file mode 100644 index 0000000..65848bc --- /dev/null +++ b/test/test_azureaisearch2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.azureaisearch2 import Azureaisearch2 + +class TestAzureaisearch2(unittest.TestCase): + """Azureaisearch2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Azureaisearch2: + """Test Azureaisearch2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Azureaisearch2` + """ + model = Azureaisearch2() + if include_optional: + return Azureaisearch2( + id = '', + type = 'AZUREAISEARCH' + ) + else: + return Azureaisearch2( + id = '', + type = 'AZUREAISEARCH', + ) + """ + + def testAzureaisearch2(self): + """Test Azureaisearch2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_azureaisearch_auth_config.py b/test/test_azureaisearch_auth_config.py new file mode 100644 index 0000000..b6b43d3 --- /dev/null +++ b/test/test_azureaisearch_auth_config.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.azureaisearch_auth_config import AZUREAISEARCHAuthConfig + +class TestAZUREAISEARCHAuthConfig(unittest.TestCase): + """AZUREAISEARCHAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AZUREAISEARCHAuthConfig: + """Test AZUREAISEARCHAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AZUREAISEARCHAuthConfig` + """ + model = AZUREAISEARCHAuthConfig() + if include_optional: + return AZUREAISEARCHAuthConfig( + name = '', + service_name = '', + api_key = 'k' + ) + else: + return AZUREAISEARCHAuthConfig( + name = '', + service_name = '', + api_key = 'k', + ) + """ + + def testAZUREAISEARCHAuthConfig(self): + """Test AZUREAISEARCHAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_azureaisearch_config.py b/test/test_azureaisearch_config.py new file mode 100644 index 0000000..b3f6dbb --- /dev/null +++ b/test/test_azureaisearch_config.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.azureaisearch_config import AZUREAISEARCHConfig + +class TestAZUREAISEARCHConfig(unittest.TestCase): + """AZUREAISEARCHConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AZUREAISEARCHConfig: + """Test AZUREAISEARCHConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AZUREAISEARCHConfig` + """ + model = AZUREAISEARCHConfig() + if include_optional: + return AZUREAISEARCHConfig( + index = 'wr1c2v7s6djuy1zmetozkhdomha1b0' + ) + else: + return AZUREAISEARCHConfig( + index = 'wr1c2v7s6djuy1zmetozkhdomha1b0', + ) + """ + + def testAZUREAISEARCHConfig(self): + """Test AZUREAISEARCHConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_azureblob_auth_config.py b/test/test_azureblob_auth_config.py new file mode 100644 index 0000000..4c02972 --- /dev/null +++ b/test/test_azureblob_auth_config.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.azureblob_auth_config import AZUREBLOBAuthConfig + +class TestAZUREBLOBAuthConfig(unittest.TestCase): + """AZUREBLOBAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AZUREBLOBAuthConfig: + """Test AZUREBLOBAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AZUREBLOBAuthConfig` + """ + model = AZUREBLOBAuthConfig() + if include_optional: + return AZUREBLOBAuthConfig( + name = '', + storage_account_name = '', + storage_account_key = '', + container = '', + endpoint = '' + ) + else: + return AZUREBLOBAuthConfig( + name = '', + storage_account_name = '', + storage_account_key = '', + container = '', + ) + """ + + def testAZUREBLOBAuthConfig(self): + """Test AZUREBLOBAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_azureblob_config.py b/test/test_azureblob_config.py new file mode 100644 index 0000000..f380aa4 --- /dev/null +++ b/test/test_azureblob_config.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.azureblob_config import AZUREBLOBConfig + +class TestAZUREBLOBConfig(unittest.TestCase): + """AZUREBLOBConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AZUREBLOBConfig: + """Test AZUREBLOBConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AZUREBLOBConfig` + """ + model = AZUREBLOBConfig() + if include_optional: + return AZUREBLOBConfig( + file_extensions = pdf, + idle_time = 1, + recursive = True, + path_prefix = '', + path_metadata_regex = '', + path_regex_group_names = '' + ) + else: + return AZUREBLOBConfig( + file_extensions = pdf, + idle_time = 1, + ) + """ + + def testAZUREBLOBConfig(self): + """Test AZUREBLOBConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_bedrock.py b/test/test_bedrock.py new file mode 100644 index 0000000..5599577 --- /dev/null +++ b/test/test_bedrock.py @@ -0,0 +1,64 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.bedrock import Bedrock + +class TestBedrock(unittest.TestCase): + """Bedrock unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Bedrock: + """Test Bedrock + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Bedrock` + """ + model = Bedrock() + if include_optional: + return Bedrock( + name = '', + type = 'BEDROCK', + config = vectorize_client.models.bedrock_auth_config.BEDROCKAuthConfig( + name = '', + access_key = 'k', + key = 'k', + region = '', ) + ) + else: + return Bedrock( + name = '', + type = 'BEDROCK', + config = vectorize_client.models.bedrock_auth_config.BEDROCKAuthConfig( + name = '', + access_key = 'k', + key = 'k', + region = '', ), + ) + """ + + def testBedrock(self): + """Test Bedrock""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_bedrock1.py b/test/test_bedrock1.py new file mode 100644 index 0000000..6ec6e80 --- /dev/null +++ b/test/test_bedrock1.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.bedrock1 import Bedrock1 + +class TestBedrock1(unittest.TestCase): + """Bedrock1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Bedrock1: + """Test Bedrock1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Bedrock1` + """ + model = Bedrock1() + if include_optional: + return Bedrock1( + config = vectorize_client.models.bedrock_auth_config.BEDROCKAuthConfig( + name = '', + access_key = 'k', + key = 'k', + region = '', ) + ) + else: + return Bedrock1( + ) + """ + + def testBedrock1(self): + """Test Bedrock1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_bedrock2.py b/test/test_bedrock2.py new file mode 100644 index 0000000..0ca99b2 --- /dev/null +++ b/test/test_bedrock2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.bedrock2 import Bedrock2 + +class TestBedrock2(unittest.TestCase): + """Bedrock2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Bedrock2: + """Test Bedrock2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Bedrock2` + """ + model = Bedrock2() + if include_optional: + return Bedrock2( + id = '', + type = 'BEDROCK' + ) + else: + return Bedrock2( + id = '', + type = 'BEDROCK', + ) + """ + + def testBedrock2(self): + """Test Bedrock2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_bedrock_auth_config.py b/test/test_bedrock_auth_config.py new file mode 100644 index 0000000..74488ad --- /dev/null +++ b/test/test_bedrock_auth_config.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.bedrock_auth_config import BEDROCKAuthConfig + +class TestBEDROCKAuthConfig(unittest.TestCase): + """BEDROCKAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> BEDROCKAuthConfig: + """Test BEDROCKAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `BEDROCKAuthConfig` + """ + model = BEDROCKAuthConfig() + if include_optional: + return BEDROCKAuthConfig( + name = '', + access_key = 'k', + key = 'k', + region = '' + ) + else: + return BEDROCKAuthConfig( + name = '', + access_key = 'k', + key = 'k', + region = '', + ) + """ + + def testBEDROCKAuthConfig(self): + """Test BEDROCKAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_capella.py b/test/test_capella.py new file mode 100644 index 0000000..54bbb26 --- /dev/null +++ b/test/test_capella.py @@ -0,0 +1,64 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.capella import Capella + +class TestCapella(unittest.TestCase): + """Capella unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Capella: + """Test Capella + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Capella` + """ + model = Capella() + if include_optional: + return Capella( + name = '', + type = 'CAPELLA', + config = vectorize_client.models.capella_config.CAPELLAConfig( + bucket = '', + scope = '', + collection = '', + index = '', ) + ) + else: + return Capella( + name = '', + type = 'CAPELLA', + config = vectorize_client.models.capella_config.CAPELLAConfig( + bucket = '', + scope = '', + collection = '', + index = '', ), + ) + """ + + def testCapella(self): + """Test Capella""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_capella1.py b/test/test_capella1.py new file mode 100644 index 0000000..e11a4c1 --- /dev/null +++ b/test/test_capella1.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.capella1 import Capella1 + +class TestCapella1(unittest.TestCase): + """Capella1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Capella1: + """Test Capella1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Capella1` + """ + model = Capella1() + if include_optional: + return Capella1( + config = vectorize_client.models.capella_config.CAPELLAConfig( + bucket = '', + scope = '', + collection = '', + index = '', ) + ) + else: + return Capella1( + ) + """ + + def testCapella1(self): + """Test Capella1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_capella2.py b/test/test_capella2.py new file mode 100644 index 0000000..355ab0b --- /dev/null +++ b/test/test_capella2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.capella2 import Capella2 + +class TestCapella2(unittest.TestCase): + """Capella2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Capella2: + """Test Capella2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Capella2` + """ + model = Capella2() + if include_optional: + return Capella2( + id = '', + type = 'CAPELLA' + ) + else: + return Capella2( + id = '', + type = 'CAPELLA', + ) + """ + + def testCapella2(self): + """Test Capella2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_capella_auth_config.py b/test/test_capella_auth_config.py new file mode 100644 index 0000000..ae4746b --- /dev/null +++ b/test/test_capella_auth_config.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.capella_auth_config import CAPELLAAuthConfig + +class TestCAPELLAAuthConfig(unittest.TestCase): + """CAPELLAAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CAPELLAAuthConfig: + """Test CAPELLAAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CAPELLAAuthConfig` + """ + model = CAPELLAAuthConfig() + if include_optional: + return CAPELLAAuthConfig( + name = '', + username = '', + password = '', + connection_string = '' + ) + else: + return CAPELLAAuthConfig( + name = '', + username = '', + password = '', + connection_string = '', + ) + """ + + def testCAPELLAAuthConfig(self): + """Test CAPELLAAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_capella_config.py b/test/test_capella_config.py new file mode 100644 index 0000000..5015845 --- /dev/null +++ b/test/test_capella_config.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.capella_config import CAPELLAConfig + +class TestCAPELLAConfig(unittest.TestCase): + """CAPELLAConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CAPELLAConfig: + """Test CAPELLAConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CAPELLAConfig` + """ + model = CAPELLAConfig() + if include_optional: + return CAPELLAConfig( + bucket = '', + scope = '', + collection = '', + index = '' + ) + else: + return CAPELLAConfig( + bucket = '', + scope = '', + collection = '', + index = '', + ) + """ + + def testCAPELLAConfig(self): + """Test CAPELLAConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_confluence.py b/test/test_confluence.py new file mode 100644 index 0000000..f1aff33 --- /dev/null +++ b/test/test_confluence.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.confluence import Confluence + +class TestConfluence(unittest.TestCase): + """Confluence unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Confluence: + """Test Confluence + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Confluence` + """ + model = Confluence() + if include_optional: + return Confluence( + name = '', + type = 'CONFLUENCE', + config = vectorize_client.models.confluence_config.CONFLUENCEConfig( + spaces = '', + root_parents = '', ) + ) + else: + return Confluence( + name = '', + type = 'CONFLUENCE', + config = vectorize_client.models.confluence_config.CONFLUENCEConfig( + spaces = '', + root_parents = '', ), + ) + """ + + def testConfluence(self): + """Test Confluence""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_confluence1.py b/test/test_confluence1.py new file mode 100644 index 0000000..55ac6e9 --- /dev/null +++ b/test/test_confluence1.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.confluence1 import Confluence1 + +class TestConfluence1(unittest.TestCase): + """Confluence1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Confluence1: + """Test Confluence1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Confluence1` + """ + model = Confluence1() + if include_optional: + return Confluence1( + config = vectorize_client.models.confluence_config.CONFLUENCEConfig( + spaces = '', + root_parents = '', ) + ) + else: + return Confluence1( + ) + """ + + def testConfluence1(self): + """Test Confluence1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_confluence2.py b/test/test_confluence2.py new file mode 100644 index 0000000..56f8994 --- /dev/null +++ b/test/test_confluence2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.confluence2 import Confluence2 + +class TestConfluence2(unittest.TestCase): + """Confluence2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Confluence2: + """Test Confluence2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Confluence2` + """ + model = Confluence2() + if include_optional: + return Confluence2( + id = '', + type = 'CONFLUENCE' + ) + else: + return Confluence2( + id = '', + type = 'CONFLUENCE', + ) + """ + + def testConfluence2(self): + """Test Confluence2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_confluence_auth_config.py b/test/test_confluence_auth_config.py new file mode 100644 index 0000000..e2dcd4d --- /dev/null +++ b/test/test_confluence_auth_config.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.confluence_auth_config import CONFLUENCEAuthConfig + +class TestCONFLUENCEAuthConfig(unittest.TestCase): + """CONFLUENCEAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CONFLUENCEAuthConfig: + """Test CONFLUENCEAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CONFLUENCEAuthConfig` + """ + model = CONFLUENCEAuthConfig() + if include_optional: + return CONFLUENCEAuthConfig( + name = '', + username = '', + api_token = 'k', + domain = '' + ) + else: + return CONFLUENCEAuthConfig( + name = '', + username = '', + api_token = 'k', + domain = '', + ) + """ + + def testCONFLUENCEAuthConfig(self): + """Test CONFLUENCEAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_confluence_config.py b/test/test_confluence_config.py new file mode 100644 index 0000000..3318157 --- /dev/null +++ b/test/test_confluence_config.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.confluence_config import CONFLUENCEConfig + +class TestCONFLUENCEConfig(unittest.TestCase): + """CONFLUENCEConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CONFLUENCEConfig: + """Test CONFLUENCEConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CONFLUENCEConfig` + """ + model = CONFLUENCEConfig() + if include_optional: + return CONFLUENCEConfig( + spaces = '', + root_parents = '' + ) + else: + return CONFLUENCEConfig( + spaces = '', + ) + """ + + def testCONFLUENCEConfig(self): + """Test CONFLUENCEConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_connectors_ai_platforms_api.py b/test/test_connectors_ai_platforms_api.py new file mode 100644 index 0000000..d6e90a9 --- /dev/null +++ b/test/test_connectors_ai_platforms_api.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.api.connectors_ai_platforms_api import ConnectorsAIPlatformsApi + + +class TestConnectorsAIPlatformsApi(unittest.TestCase): + """ConnectorsAIPlatformsApi unit test stubs""" + + def setUp(self) -> None: + self.api = ConnectorsAIPlatformsApi() + + def tearDown(self) -> None: + pass + + def test_create_ai_platform_connector(self) -> None: + """Test case for create_ai_platform_connector + + Create a new AI platform connector + """ + pass + + def test_delete_ai_platform(self) -> None: + """Test case for delete_ai_platform + + Delete an AI platform connector + """ + pass + + def test_get_ai_platform_connector(self) -> None: + """Test case for get_ai_platform_connector + + Get an AI platform connector + """ + pass + + def test_get_ai_platform_connectors(self) -> None: + """Test case for get_ai_platform_connectors + + Get all existing AI Platform connectors + """ + pass + + def test_update_ai_platform_connector(self) -> None: + """Test case for update_ai_platform_connector + + Update an AI Platform connector + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_connectors_destination_connectors_api.py b/test/test_connectors_destination_connectors_api.py new file mode 100644 index 0000000..5676173 --- /dev/null +++ b/test/test_connectors_destination_connectors_api.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.api.connectors_destination_connectors_api import ConnectorsDestinationConnectorsApi + + +class TestConnectorsDestinationConnectorsApi(unittest.TestCase): + """ConnectorsDestinationConnectorsApi unit test stubs""" + + def setUp(self) -> None: + self.api = ConnectorsDestinationConnectorsApi() + + def tearDown(self) -> None: + pass + + def test_create_destination_connector(self) -> None: + """Test case for create_destination_connector + + Create a new destination connector + """ + pass + + def test_delete_destination_connector(self) -> None: + """Test case for delete_destination_connector + + Delete a destination connector + """ + pass + + def test_get_destination_connector(self) -> None: + """Test case for get_destination_connector + + Get a destination connector + """ + pass + + def test_get_destination_connectors(self) -> None: + """Test case for get_destination_connectors + + Get all existing destination connectors + """ + pass + + def test_update_destination_connector(self) -> None: + """Test case for update_destination_connector + + Update a destination connector + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_connectors_source_connectors_api.py b/test/test_connectors_source_connectors_api.py new file mode 100644 index 0000000..bc6bebd --- /dev/null +++ b/test/test_connectors_source_connectors_api.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.api.connectors_source_connectors_api import ConnectorsSourceConnectorsApi + + +class TestConnectorsSourceConnectorsApi(unittest.TestCase): + """ConnectorsSourceConnectorsApi unit test stubs""" + + def setUp(self) -> None: + self.api = ConnectorsSourceConnectorsApi() + + def tearDown(self) -> None: + pass + + def test_add_user_to_source_connector(self) -> None: + """Test case for add_user_to_source_connector + + Add a user to a source connector + """ + pass + + def test_create_source_connector(self) -> None: + """Test case for create_source_connector + + Create a new source connector + """ + pass + + def test_delete_source_connector(self) -> None: + """Test case for delete_source_connector + + Delete a source connector + """ + pass + + def test_delete_user_from_source_connector(self) -> None: + """Test case for delete_user_from_source_connector + + Delete a source connector user + """ + pass + + def test_get_source_connector(self) -> None: + """Test case for get_source_connector + + Get a source connector + """ + pass + + def test_get_source_connectors(self) -> None: + """Test case for get_source_connectors + + Get all existing source connectors + """ + pass + + def test_update_source_connector(self) -> None: + """Test case for update_source_connector + + Update a source connector + """ + pass + + def test_update_user_in_source_connector(self) -> None: + """Test case for update_user_in_source_connector + + Update a source connector user + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_create_ai_platform_connector.py b/test/test_create_ai_platform_connector.py new file mode 100644 index 0000000..9360251 --- /dev/null +++ b/test/test_create_ai_platform_connector.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.create_ai_platform_connector import CreateAIPlatformConnector + +class TestCreateAIPlatformConnector(unittest.TestCase): + """CreateAIPlatformConnector unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CreateAIPlatformConnector: + """Test CreateAIPlatformConnector + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CreateAIPlatformConnector` + """ + model = CreateAIPlatformConnector() + if include_optional: + return CreateAIPlatformConnector( + name = '', + type = 'BEDROCK', + config = { + 'key' : null + } + ) + else: + return CreateAIPlatformConnector( + name = '', + type = 'BEDROCK', + ) + """ + + def testCreateAIPlatformConnector(self): + """Test CreateAIPlatformConnector""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_create_ai_platform_connector_request_inner.py b/test/test_create_ai_platform_connector_request_inner.py new file mode 100644 index 0000000..f5676e1 --- /dev/null +++ b/test/test_create_ai_platform_connector_request_inner.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.create_ai_platform_connector_request_inner import CreateAIPlatformConnectorRequestInner + +class TestCreateAIPlatformConnectorRequestInner(unittest.TestCase): + """CreateAIPlatformConnectorRequestInner unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CreateAIPlatformConnectorRequestInner: + """Test CreateAIPlatformConnectorRequestInner + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CreateAIPlatformConnectorRequestInner` + """ + model = CreateAIPlatformConnectorRequestInner() + if include_optional: + return CreateAIPlatformConnectorRequestInner( + name = '', + type = 'BEDROCK', + config = vectorize_client.models.voyage_auth_config.VOYAGEAuthConfig( + name = '', + key = 'k', ) + ) + else: + return CreateAIPlatformConnectorRequestInner( + name = '', + type = 'BEDROCK', + config = vectorize_client.models.voyage_auth_config.VOYAGEAuthConfig( + name = '', + key = 'k', ), + ) + """ + + def testCreateAIPlatformConnectorRequestInner(self): + """Test CreateAIPlatformConnectorRequestInner""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_create_ai_platform_connector_response.py b/test/test_create_ai_platform_connector_response.py new file mode 100644 index 0000000..180c965 --- /dev/null +++ b/test/test_create_ai_platform_connector_response.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.create_ai_platform_connector_response import CreateAIPlatformConnectorResponse + +class TestCreateAIPlatformConnectorResponse(unittest.TestCase): + """CreateAIPlatformConnectorResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CreateAIPlatformConnectorResponse: + """Test CreateAIPlatformConnectorResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CreateAIPlatformConnectorResponse` + """ + model = CreateAIPlatformConnectorResponse() + if include_optional: + return CreateAIPlatformConnectorResponse( + message = '', + connectors = [ + vectorize_client.models.created_ai_platform_connector.CreatedAIPlatformConnector( + name = '', + id = '', ) + ] + ) + else: + return CreateAIPlatformConnectorResponse( + message = '', + connectors = [ + vectorize_client.models.created_ai_platform_connector.CreatedAIPlatformConnector( + name = '', + id = '', ) + ], + ) + """ + + def testCreateAIPlatformConnectorResponse(self): + """Test CreateAIPlatformConnectorResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_create_destination_connector.py b/test/test_create_destination_connector.py new file mode 100644 index 0000000..2964feb --- /dev/null +++ b/test/test_create_destination_connector.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.create_destination_connector import CreateDestinationConnector + +class TestCreateDestinationConnector(unittest.TestCase): + """CreateDestinationConnector unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CreateDestinationConnector: + """Test CreateDestinationConnector + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CreateDestinationConnector` + """ + model = CreateDestinationConnector() + if include_optional: + return CreateDestinationConnector( + name = '', + type = 'CAPELLA', + config = { + 'key' : null + } + ) + else: + return CreateDestinationConnector( + name = '', + type = 'CAPELLA', + ) + """ + + def testCreateDestinationConnector(self): + """Test CreateDestinationConnector""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_create_destination_connector_request_inner.py b/test/test_create_destination_connector_request_inner.py new file mode 100644 index 0000000..81bbad6 --- /dev/null +++ b/test/test_create_destination_connector_request_inner.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.create_destination_connector_request_inner import CreateDestinationConnectorRequestInner + +class TestCreateDestinationConnectorRequestInner(unittest.TestCase): + """CreateDestinationConnectorRequestInner unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CreateDestinationConnectorRequestInner: + """Test CreateDestinationConnectorRequestInner + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CreateDestinationConnectorRequestInner` + """ + model = CreateDestinationConnectorRequestInner() + if include_optional: + return CreateDestinationConnectorRequestInner( + name = '', + type = 'CAPELLA', + config = vectorize_client.models.turbopuffer_config.TURBOPUFFERConfig( + namespace = '', ) + ) + else: + return CreateDestinationConnectorRequestInner( + name = '', + type = 'CAPELLA', + config = vectorize_client.models.turbopuffer_config.TURBOPUFFERConfig( + namespace = '', ), + ) + """ + + def testCreateDestinationConnectorRequestInner(self): + """Test CreateDestinationConnectorRequestInner""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_create_destination_connector_response.py b/test/test_create_destination_connector_response.py new file mode 100644 index 0000000..6930994 --- /dev/null +++ b/test/test_create_destination_connector_response.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.create_destination_connector_response import CreateDestinationConnectorResponse + +class TestCreateDestinationConnectorResponse(unittest.TestCase): + """CreateDestinationConnectorResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CreateDestinationConnectorResponse: + """Test CreateDestinationConnectorResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CreateDestinationConnectorResponse` + """ + model = CreateDestinationConnectorResponse() + if include_optional: + return CreateDestinationConnectorResponse( + message = '', + connectors = [ + vectorize_client.models.created_destination_connector.CreatedDestinationConnector( + name = '', + id = '', ) + ] + ) + else: + return CreateDestinationConnectorResponse( + message = '', + connectors = [ + vectorize_client.models.created_destination_connector.CreatedDestinationConnector( + name = '', + id = '', ) + ], + ) + """ + + def testCreateDestinationConnectorResponse(self): + """Test CreateDestinationConnectorResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_create_pipeline_response.py b/test/test_create_pipeline_response.py new file mode 100644 index 0000000..83bce6c --- /dev/null +++ b/test/test_create_pipeline_response.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.create_pipeline_response import CreatePipelineResponse + +class TestCreatePipelineResponse(unittest.TestCase): + """CreatePipelineResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CreatePipelineResponse: + """Test CreatePipelineResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CreatePipelineResponse` + """ + model = CreatePipelineResponse() + if include_optional: + return CreatePipelineResponse( + message = '', + data = vectorize_client.models.create_pipeline_response_data.CreatePipelineResponse_data( + id = '', ) + ) + else: + return CreatePipelineResponse( + message = '', + data = vectorize_client.models.create_pipeline_response_data.CreatePipelineResponse_data( + id = '', ), + ) + """ + + def testCreatePipelineResponse(self): + """Test CreatePipelineResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_create_pipeline_response_data.py b/test/test_create_pipeline_response_data.py new file mode 100644 index 0000000..02c392d --- /dev/null +++ b/test/test_create_pipeline_response_data.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.create_pipeline_response_data import CreatePipelineResponseData + +class TestCreatePipelineResponseData(unittest.TestCase): + """CreatePipelineResponseData unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CreatePipelineResponseData: + """Test CreatePipelineResponseData + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CreatePipelineResponseData` + """ + model = CreatePipelineResponseData() + if include_optional: + return CreatePipelineResponseData( + id = '' + ) + else: + return CreatePipelineResponseData( + id = '', + ) + """ + + def testCreatePipelineResponseData(self): + """Test CreatePipelineResponseData""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_create_source_connector.py b/test/test_create_source_connector.py new file mode 100644 index 0000000..b0d691f --- /dev/null +++ b/test/test_create_source_connector.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.create_source_connector import CreateSourceConnector + +class TestCreateSourceConnector(unittest.TestCase): + """CreateSourceConnector unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CreateSourceConnector: + """Test CreateSourceConnector + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CreateSourceConnector` + """ + model = CreateSourceConnector() + if include_optional: + return CreateSourceConnector( + name = '', + type = 'AWS_S3', + config = { + 'key' : null + } + ) + else: + return CreateSourceConnector( + name = '', + type = 'AWS_S3', + ) + """ + + def testCreateSourceConnector(self): + """Test CreateSourceConnector""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_create_source_connector_request_inner.py b/test/test_create_source_connector_request_inner.py new file mode 100644 index 0000000..43e63c3 --- /dev/null +++ b/test/test_create_source_connector_request_inner.py @@ -0,0 +1,70 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.create_source_connector_request_inner import CreateSourceConnectorRequestInner + +class TestCreateSourceConnectorRequestInner(unittest.TestCase): + """CreateSourceConnectorRequestInner unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CreateSourceConnectorRequestInner: + """Test CreateSourceConnectorRequestInner + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CreateSourceConnectorRequestInner` + """ + model = CreateSourceConnectorRequestInner() + if include_optional: + return CreateSourceConnectorRequestInner( + name = '', + type = 'AWS_S3', + config = vectorize_client.models.fireflies_config.FIREFLIESConfig( + start_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + end_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + title_filter_type = 'AND', + title_filter = '', + participant_filter_type = 'AND', + participant_filter = '', + max_meetings = 1.337, ) + ) + else: + return CreateSourceConnectorRequestInner( + name = '', + type = 'AWS_S3', + config = vectorize_client.models.fireflies_config.FIREFLIESConfig( + start_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + end_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + title_filter_type = 'AND', + title_filter = '', + participant_filter_type = 'AND', + participant_filter = '', + max_meetings = 1.337, ), + ) + """ + + def testCreateSourceConnectorRequestInner(self): + """Test CreateSourceConnectorRequestInner""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_create_source_connector_response.py b/test/test_create_source_connector_response.py new file mode 100644 index 0000000..ea4c592 --- /dev/null +++ b/test/test_create_source_connector_response.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.create_source_connector_response import CreateSourceConnectorResponse + +class TestCreateSourceConnectorResponse(unittest.TestCase): + """CreateSourceConnectorResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CreateSourceConnectorResponse: + """Test CreateSourceConnectorResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CreateSourceConnectorResponse` + """ + model = CreateSourceConnectorResponse() + if include_optional: + return CreateSourceConnectorResponse( + message = '', + connectors = [ + vectorize_client.models.created_source_connector.CreatedSourceConnector( + name = '', + id = '', ) + ] + ) + else: + return CreateSourceConnectorResponse( + message = '', + connectors = [ + vectorize_client.models.created_source_connector.CreatedSourceConnector( + name = '', + id = '', ) + ], + ) + """ + + def testCreateSourceConnectorResponse(self): + """Test CreateSourceConnectorResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_created_ai_platform_connector.py b/test/test_created_ai_platform_connector.py new file mode 100644 index 0000000..07318e4 --- /dev/null +++ b/test/test_created_ai_platform_connector.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.created_ai_platform_connector import CreatedAIPlatformConnector + +class TestCreatedAIPlatformConnector(unittest.TestCase): + """CreatedAIPlatformConnector unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CreatedAIPlatformConnector: + """Test CreatedAIPlatformConnector + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CreatedAIPlatformConnector` + """ + model = CreatedAIPlatformConnector() + if include_optional: + return CreatedAIPlatformConnector( + name = '', + id = '' + ) + else: + return CreatedAIPlatformConnector( + name = '', + id = '', + ) + """ + + def testCreatedAIPlatformConnector(self): + """Test CreatedAIPlatformConnector""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_created_destination_connector.py b/test/test_created_destination_connector.py new file mode 100644 index 0000000..c8c3d2e --- /dev/null +++ b/test/test_created_destination_connector.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.created_destination_connector import CreatedDestinationConnector + +class TestCreatedDestinationConnector(unittest.TestCase): + """CreatedDestinationConnector unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CreatedDestinationConnector: + """Test CreatedDestinationConnector + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CreatedDestinationConnector` + """ + model = CreatedDestinationConnector() + if include_optional: + return CreatedDestinationConnector( + name = '', + id = '' + ) + else: + return CreatedDestinationConnector( + name = '', + id = '', + ) + """ + + def testCreatedDestinationConnector(self): + """Test CreatedDestinationConnector""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_created_source_connector.py b/test/test_created_source_connector.py new file mode 100644 index 0000000..9ee69a5 --- /dev/null +++ b/test/test_created_source_connector.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.created_source_connector import CreatedSourceConnector + +class TestCreatedSourceConnector(unittest.TestCase): + """CreatedSourceConnector unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CreatedSourceConnector: + """Test CreatedSourceConnector + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CreatedSourceConnector` + """ + model = CreatedSourceConnector() + if include_optional: + return CreatedSourceConnector( + name = '', + id = '' + ) + else: + return CreatedSourceConnector( + name = '', + id = '', + ) + """ + + def testCreatedSourceConnector(self): + """Test CreatedSourceConnector""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_datastax.py b/test/test_datastax.py new file mode 100644 index 0000000..8abaab6 --- /dev/null +++ b/test/test_datastax.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.datastax import Datastax + +class TestDatastax(unittest.TestCase): + """Datastax unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Datastax: + """Test Datastax + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Datastax` + """ + model = Datastax() + if include_optional: + return Datastax( + name = '', + type = 'DATASTAX', + config = vectorize_client.models.datastax_config.DATASTAXConfig( + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua', ) + ) + else: + return Datastax( + name = '', + type = 'DATASTAX', + config = vectorize_client.models.datastax_config.DATASTAXConfig( + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua', ), + ) + """ + + def testDatastax(self): + """Test Datastax""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_datastax1.py b/test/test_datastax1.py new file mode 100644 index 0000000..b7f98c7 --- /dev/null +++ b/test/test_datastax1.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.datastax1 import Datastax1 + +class TestDatastax1(unittest.TestCase): + """Datastax1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Datastax1: + """Test Datastax1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Datastax1` + """ + model = Datastax1() + if include_optional: + return Datastax1( + config = vectorize_client.models.datastax_config.DATASTAXConfig( + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua', ) + ) + else: + return Datastax1( + ) + """ + + def testDatastax1(self): + """Test Datastax1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_datastax2.py b/test/test_datastax2.py new file mode 100644 index 0000000..f038d2c --- /dev/null +++ b/test/test_datastax2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.datastax2 import Datastax2 + +class TestDatastax2(unittest.TestCase): + """Datastax2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Datastax2: + """Test Datastax2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Datastax2` + """ + model = Datastax2() + if include_optional: + return Datastax2( + id = '', + type = 'DATASTAX' + ) + else: + return Datastax2( + id = '', + type = 'DATASTAX', + ) + """ + + def testDatastax2(self): + """Test Datastax2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_datastax_auth_config.py b/test/test_datastax_auth_config.py new file mode 100644 index 0000000..a4b3c58 --- /dev/null +++ b/test/test_datastax_auth_config.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.datastax_auth_config import DATASTAXAuthConfig + +class TestDATASTAXAuthConfig(unittest.TestCase): + """DATASTAXAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DATASTAXAuthConfig: + """Test DATASTAXAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DATASTAXAuthConfig` + """ + model = DATASTAXAuthConfig() + if include_optional: + return DATASTAXAuthConfig( + name = '', + endpoint_secret = '', + token = 'k' + ) + else: + return DATASTAXAuthConfig( + name = '', + endpoint_secret = '', + token = 'k', + ) + """ + + def testDATASTAXAuthConfig(self): + """Test DATASTAXAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_datastax_config.py b/test/test_datastax_config.py new file mode 100644 index 0000000..3159802 --- /dev/null +++ b/test/test_datastax_config.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.datastax_config import DATASTAXConfig + +class TestDATASTAXConfig(unittest.TestCase): + """DATASTAXConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DATASTAXConfig: + """Test DATASTAXConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DATASTAXConfig` + """ + model = DATASTAXConfig() + if include_optional: + return DATASTAXConfig( + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua' + ) + else: + return DATASTAXConfig( + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua', + ) + """ + + def testDATASTAXConfig(self): + """Test DATASTAXConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_deep_research_result.py b/test/test_deep_research_result.py new file mode 100644 index 0000000..3ae3128 --- /dev/null +++ b/test/test_deep_research_result.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.deep_research_result import DeepResearchResult + +class TestDeepResearchResult(unittest.TestCase): + """DeepResearchResult unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DeepResearchResult: + """Test DeepResearchResult + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DeepResearchResult` + """ + model = DeepResearchResult() + if include_optional: + return DeepResearchResult( + success = True, + events = [ + '' + ], + markdown = '', + error = '' + ) + else: + return DeepResearchResult( + success = True, + ) + """ + + def testDeepResearchResult(self): + """Test DeepResearchResult""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_delete_ai_platform_connector_response.py b/test/test_delete_ai_platform_connector_response.py new file mode 100644 index 0000000..04ce996 --- /dev/null +++ b/test/test_delete_ai_platform_connector_response.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.delete_ai_platform_connector_response import DeleteAIPlatformConnectorResponse + +class TestDeleteAIPlatformConnectorResponse(unittest.TestCase): + """DeleteAIPlatformConnectorResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DeleteAIPlatformConnectorResponse: + """Test DeleteAIPlatformConnectorResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DeleteAIPlatformConnectorResponse` + """ + model = DeleteAIPlatformConnectorResponse() + if include_optional: + return DeleteAIPlatformConnectorResponse( + message = '' + ) + else: + return DeleteAIPlatformConnectorResponse( + message = '', + ) + """ + + def testDeleteAIPlatformConnectorResponse(self): + """Test DeleteAIPlatformConnectorResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_delete_destination_connector_response.py b/test/test_delete_destination_connector_response.py new file mode 100644 index 0000000..5186c27 --- /dev/null +++ b/test/test_delete_destination_connector_response.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.delete_destination_connector_response import DeleteDestinationConnectorResponse + +class TestDeleteDestinationConnectorResponse(unittest.TestCase): + """DeleteDestinationConnectorResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DeleteDestinationConnectorResponse: + """Test DeleteDestinationConnectorResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DeleteDestinationConnectorResponse` + """ + model = DeleteDestinationConnectorResponse() + if include_optional: + return DeleteDestinationConnectorResponse( + message = '' + ) + else: + return DeleteDestinationConnectorResponse( + message = '', + ) + """ + + def testDeleteDestinationConnectorResponse(self): + """Test DeleteDestinationConnectorResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_delete_file_response.py b/test/test_delete_file_response.py new file mode 100644 index 0000000..d37fbbd --- /dev/null +++ b/test/test_delete_file_response.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.delete_file_response import DeleteFileResponse + +class TestDeleteFileResponse(unittest.TestCase): + """DeleteFileResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DeleteFileResponse: + """Test DeleteFileResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DeleteFileResponse` + """ + model = DeleteFileResponse() + if include_optional: + return DeleteFileResponse( + message = '', + file_name = '' + ) + else: + return DeleteFileResponse( + message = '', + file_name = '', + ) + """ + + def testDeleteFileResponse(self): + """Test DeleteFileResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_delete_pipeline_response.py b/test/test_delete_pipeline_response.py new file mode 100644 index 0000000..4f2afc6 --- /dev/null +++ b/test/test_delete_pipeline_response.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.delete_pipeline_response import DeletePipelineResponse + +class TestDeletePipelineResponse(unittest.TestCase): + """DeletePipelineResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DeletePipelineResponse: + """Test DeletePipelineResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DeletePipelineResponse` + """ + model = DeletePipelineResponse() + if include_optional: + return DeletePipelineResponse( + message = '' + ) + else: + return DeletePipelineResponse( + message = '', + ) + """ + + def testDeletePipelineResponse(self): + """Test DeletePipelineResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_delete_source_connector_response.py b/test/test_delete_source_connector_response.py new file mode 100644 index 0000000..238230e --- /dev/null +++ b/test/test_delete_source_connector_response.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.delete_source_connector_response import DeleteSourceConnectorResponse + +class TestDeleteSourceConnectorResponse(unittest.TestCase): + """DeleteSourceConnectorResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DeleteSourceConnectorResponse: + """Test DeleteSourceConnectorResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DeleteSourceConnectorResponse` + """ + model = DeleteSourceConnectorResponse() + if include_optional: + return DeleteSourceConnectorResponse( + message = '' + ) + else: + return DeleteSourceConnectorResponse( + message = '', + ) + """ + + def testDeleteSourceConnectorResponse(self): + """Test DeleteSourceConnectorResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_destination_connector.py b/test/test_destination_connector.py new file mode 100644 index 0000000..c84b147 --- /dev/null +++ b/test/test_destination_connector.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.destination_connector import DestinationConnector + +class TestDestinationConnector(unittest.TestCase): + """DestinationConnector unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DestinationConnector: + """Test DestinationConnector + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DestinationConnector` + """ + model = DestinationConnector() + if include_optional: + return DestinationConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '' + ) + else: + return DestinationConnector( + id = '', + type = '', + name = '', + ) + """ + + def testDestinationConnector(self): + """Test DestinationConnector""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_destination_connector_input.py b/test/test_destination_connector_input.py new file mode 100644 index 0000000..0018451 --- /dev/null +++ b/test/test_destination_connector_input.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.destination_connector_input import DestinationConnectorInput + +class TestDestinationConnectorInput(unittest.TestCase): + """DestinationConnectorInput unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DestinationConnectorInput: + """Test DestinationConnectorInput + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DestinationConnectorInput` + """ + model = DestinationConnectorInput() + if include_optional: + return DestinationConnectorInput( + id = '', + type = 'CAPELLA', + config = None + ) + else: + return DestinationConnectorInput( + id = '', + type = 'CAPELLA', + config = None, + ) + """ + + def testDestinationConnectorInput(self): + """Test DestinationConnectorInput""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_destination_connector_input_config.py b/test/test_destination_connector_input_config.py new file mode 100644 index 0000000..ba1bf0d --- /dev/null +++ b/test/test_destination_connector_input_config.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.destination_connector_input_config import DestinationConnectorInputConfig + +class TestDestinationConnectorInputConfig(unittest.TestCase): + """DestinationConnectorInputConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DestinationConnectorInputConfig: + """Test DestinationConnectorInputConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DestinationConnectorInputConfig` + """ + model = DestinationConnectorInputConfig() + if include_optional: + return DestinationConnectorInputConfig( + bucket = '', + scope = '', + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua', + index = 'wr1c2v7s6djuy1zmetozkhdomha1b0', + namespace = '', + table = 'jUR,Z#}eta.3mh2lcafqw3zheseh1' + ) + else: + return DestinationConnectorInputConfig( + bucket = '', + scope = '', + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua', + index = 'wr1c2v7s6djuy1zmetozkhdomha1b0', + namespace = '', + table = 'jUR,Z#}eta.3mh2lcafqw3zheseh1', + ) + """ + + def testDestinationConnectorInputConfig(self): + """Test DestinationConnectorInputConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_destination_connector_schema.py b/test/test_destination_connector_schema.py new file mode 100644 index 0000000..d962f82 --- /dev/null +++ b/test/test_destination_connector_schema.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.destination_connector_schema import DestinationConnectorSchema + +class TestDestinationConnectorSchema(unittest.TestCase): + """DestinationConnectorSchema unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DestinationConnectorSchema: + """Test DestinationConnectorSchema + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DestinationConnectorSchema` + """ + model = DestinationConnectorSchema() + if include_optional: + return DestinationConnectorSchema( + id = '', + type = 'CAPELLA', + config = { + 'key' : null + } + ) + else: + return DestinationConnectorSchema( + id = '', + type = 'CAPELLA', + ) + """ + + def testDestinationConnectorSchema(self): + """Test DestinationConnectorSchema""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_destination_connector_type.py b/test/test_destination_connector_type.py new file mode 100644 index 0000000..eff1fc8 --- /dev/null +++ b/test/test_destination_connector_type.py @@ -0,0 +1,33 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.destination_connector_type import DestinationConnectorType + +class TestDestinationConnectorType(unittest.TestCase): + """DestinationConnectorType unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def testDestinationConnectorType(self): + """Test DestinationConnectorType""" + # inst = DestinationConnectorType() + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_discord.py b/test/test_discord.py new file mode 100644 index 0000000..83984d3 --- /dev/null +++ b/test/test_discord.py @@ -0,0 +1,70 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.discord import Discord + +class TestDiscord(unittest.TestCase): + """Discord unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Discord: + """Test Discord + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Discord` + """ + model = Discord() + if include_optional: + return Discord( + name = '', + type = 'DISCORD', + config = vectorize_client.models.discord_config.DISCORDConfig( + emoji = '', + author = '', + ignore_author = '', + limit = 1, + thread_message_inclusion = 'ALL', + filter_logic = 'AND', + thread_message_mode = 'CONCATENATE', ) + ) + else: + return Discord( + name = '', + type = 'DISCORD', + config = vectorize_client.models.discord_config.DISCORDConfig( + emoji = '', + author = '', + ignore_author = '', + limit = 1, + thread_message_inclusion = 'ALL', + filter_logic = 'AND', + thread_message_mode = 'CONCATENATE', ), + ) + """ + + def testDiscord(self): + """Test Discord""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_discord1.py b/test/test_discord1.py new file mode 100644 index 0000000..7ca0936 --- /dev/null +++ b/test/test_discord1.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.discord1 import Discord1 + +class TestDiscord1(unittest.TestCase): + """Discord1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Discord1: + """Test Discord1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Discord1` + """ + model = Discord1() + if include_optional: + return Discord1( + config = vectorize_client.models.discord_config.DISCORDConfig( + emoji = '', + author = '', + ignore_author = '', + limit = 1, + thread_message_inclusion = 'ALL', + filter_logic = 'AND', + thread_message_mode = 'CONCATENATE', ) + ) + else: + return Discord1( + ) + """ + + def testDiscord1(self): + """Test Discord1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_discord2.py b/test/test_discord2.py new file mode 100644 index 0000000..9eb1f14 --- /dev/null +++ b/test/test_discord2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.discord2 import Discord2 + +class TestDiscord2(unittest.TestCase): + """Discord2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Discord2: + """Test Discord2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Discord2` + """ + model = Discord2() + if include_optional: + return Discord2( + id = '', + type = 'DISCORD' + ) + else: + return Discord2( + id = '', + type = 'DISCORD', + ) + """ + + def testDiscord2(self): + """Test Discord2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_discord_auth_config.py b/test/test_discord_auth_config.py new file mode 100644 index 0000000..1685e10 --- /dev/null +++ b/test/test_discord_auth_config.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.discord_auth_config import DISCORDAuthConfig + +class TestDISCORDAuthConfig(unittest.TestCase): + """DISCORDAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DISCORDAuthConfig: + """Test DISCORDAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DISCORDAuthConfig` + """ + model = DISCORDAuthConfig() + if include_optional: + return DISCORDAuthConfig( + name = '', + server_id = '', + bot_token = 'k', + channel_ids = '' + ) + else: + return DISCORDAuthConfig( + name = '', + server_id = '', + bot_token = 'k', + channel_ids = '', + ) + """ + + def testDISCORDAuthConfig(self): + """Test DISCORDAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_discord_config.py b/test/test_discord_config.py new file mode 100644 index 0000000..c8c2a74 --- /dev/null +++ b/test/test_discord_config.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.discord_config import DISCORDConfig + +class TestDISCORDConfig(unittest.TestCase): + """DISCORDConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DISCORDConfig: + """Test DISCORDConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DISCORDConfig` + """ + model = DISCORDConfig() + if include_optional: + return DISCORDConfig( + emoji = '', + author = '', + ignore_author = '', + limit = 1, + thread_message_inclusion = 'ALL', + filter_logic = 'AND', + thread_message_mode = 'CONCATENATE' + ) + else: + return DISCORDConfig( + ) + """ + + def testDISCORDConfig(self): + """Test DISCORDConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_document.py b/test/test_document.py new file mode 100644 index 0000000..cc3c36d --- /dev/null +++ b/test/test_document.py @@ -0,0 +1,74 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.document import Document + +class TestDocument(unittest.TestCase): + """Document unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Document: + """Test Document + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Document` + """ + model = Document() + if include_optional: + return Document( + relevancy = 1.337, + id = '', + text = '', + chunk_id = '', + total_chunks = '', + origin = '', + origin_id = '', + similarity = 1.337, + source = '', + unique_source = '', + source_display_name = '', + pipeline_id = '', + org_id = '' + ) + else: + return Document( + relevancy = 1.337, + id = '', + text = '', + chunk_id = '', + total_chunks = '', + origin = '', + origin_id = '', + similarity = 1.337, + source = '', + unique_source = '', + source_display_name = '', + ) + """ + + def testDocument(self): + """Test Document""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropbox.py b/test/test_dropbox.py new file mode 100644 index 0000000..81836ff --- /dev/null +++ b/test/test_dropbox.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropbox import Dropbox + +class TestDropbox(unittest.TestCase): + """Dropbox unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Dropbox: + """Test Dropbox + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Dropbox` + """ + model = Dropbox() + if include_optional: + return Dropbox( + name = '', + type = 'DROPBOX', + config = vectorize_client.models.dropbox_config.DROPBOXConfig( + path_prefix = '/jUR,rZ#UM/?R,Fp^l6$ARj', ) + ) + else: + return Dropbox( + name = '', + type = 'DROPBOX', + config = vectorize_client.models.dropbox_config.DROPBOXConfig( + path_prefix = '/jUR,rZ#UM/?R,Fp^l6$ARj', ), + ) + """ + + def testDropbox(self): + """Test Dropbox""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropbox1.py b/test/test_dropbox1.py new file mode 100644 index 0000000..6bf163b --- /dev/null +++ b/test/test_dropbox1.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropbox1 import Dropbox1 + +class TestDropbox1(unittest.TestCase): + """Dropbox1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Dropbox1: + """Test Dropbox1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Dropbox1` + """ + model = Dropbox1() + if include_optional: + return Dropbox1( + config = vectorize_client.models.dropbox_config.DROPBOXConfig( + path_prefix = '/jUR,rZ#UM/?R,Fp^l6$ARj', ) + ) + else: + return Dropbox1( + ) + """ + + def testDropbox1(self): + """Test Dropbox1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropbox2.py b/test/test_dropbox2.py new file mode 100644 index 0000000..1609cd8 --- /dev/null +++ b/test/test_dropbox2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropbox2 import Dropbox2 + +class TestDropbox2(unittest.TestCase): + """Dropbox2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Dropbox2: + """Test Dropbox2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Dropbox2` + """ + model = Dropbox2() + if include_optional: + return Dropbox2( + id = '', + type = 'DROPBOX' + ) + else: + return Dropbox2( + id = '', + type = 'DROPBOX', + ) + """ + + def testDropbox2(self): + """Test Dropbox2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropbox_auth_config.py b/test/test_dropbox_auth_config.py new file mode 100644 index 0000000..e7f58d0 --- /dev/null +++ b/test/test_dropbox_auth_config.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropbox_auth_config import DROPBOXAuthConfig + +class TestDROPBOXAuthConfig(unittest.TestCase): + """DROPBOXAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DROPBOXAuthConfig: + """Test DROPBOXAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DROPBOXAuthConfig` + """ + model = DROPBOXAuthConfig() + if include_optional: + return DROPBOXAuthConfig( + name = '', + refresh_token = 'k' + ) + else: + return DROPBOXAuthConfig( + name = '', + refresh_token = 'k', + ) + """ + + def testDROPBOXAuthConfig(self): + """Test DROPBOXAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropbox_config.py b/test/test_dropbox_config.py new file mode 100644 index 0000000..2cca717 --- /dev/null +++ b/test/test_dropbox_config.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropbox_config import DROPBOXConfig + +class TestDROPBOXConfig(unittest.TestCase): + """DROPBOXConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DROPBOXConfig: + """Test DROPBOXConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DROPBOXConfig` + """ + model = DROPBOXConfig() + if include_optional: + return DROPBOXConfig( + path_prefix = '/jUR,rZ#UM/?R,Fp^l6$ARj' + ) + else: + return DROPBOXConfig( + ) + """ + + def testDROPBOXConfig(self): + """Test DROPBOXConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropbox_oauth.py b/test/test_dropbox_oauth.py new file mode 100644 index 0000000..0270255 --- /dev/null +++ b/test/test_dropbox_oauth.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropbox_oauth import DropboxOauth + +class TestDropboxOauth(unittest.TestCase): + """DropboxOauth unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DropboxOauth: + """Test DropboxOauth + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DropboxOauth` + """ + model = DropboxOauth() + if include_optional: + return DropboxOauth( + name = '', + type = 'DROPBOX_OAUTH', + config = vectorize_client.models.dropbox_oauth_auth_config.DROPBOX_OAUTHAuthConfig( + name = '', + authorized_user = '', + selection_details = '', + edited_users = '', + reconnect_users = '', ) + ) + else: + return DropboxOauth( + name = '', + type = 'DROPBOX_OAUTH', + config = vectorize_client.models.dropbox_oauth_auth_config.DROPBOX_OAUTHAuthConfig( + name = '', + authorized_user = '', + selection_details = '', + edited_users = '', + reconnect_users = '', ), + ) + """ + + def testDropboxOauth(self): + """Test DropboxOauth""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropbox_oauth1.py b/test/test_dropbox_oauth1.py new file mode 100644 index 0000000..8e2fdb0 --- /dev/null +++ b/test/test_dropbox_oauth1.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropbox_oauth1 import DropboxOauth1 + +class TestDropboxOauth1(unittest.TestCase): + """DropboxOauth1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DropboxOauth1: + """Test DropboxOauth1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DropboxOauth1` + """ + model = DropboxOauth1() + if include_optional: + return DropboxOauth1( + config = vectorize_client.models.dropbox_oauth_auth_config.DROPBOX_OAUTHAuthConfig( + name = '', + authorized_user = '', + selection_details = '', + edited_users = '', + reconnect_users = '', ) + ) + else: + return DropboxOauth1( + ) + """ + + def testDropboxOauth1(self): + """Test DropboxOauth1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropbox_oauth2.py b/test/test_dropbox_oauth2.py new file mode 100644 index 0000000..50a76f6 --- /dev/null +++ b/test/test_dropbox_oauth2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropbox_oauth2 import DropboxOauth2 + +class TestDropboxOauth2(unittest.TestCase): + """DropboxOauth2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DropboxOauth2: + """Test DropboxOauth2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DropboxOauth2` + """ + model = DropboxOauth2() + if include_optional: + return DropboxOauth2( + id = '', + type = 'DROPBOX_OAUTH' + ) + else: + return DropboxOauth2( + id = '', + type = 'DROPBOX_OAUTH', + ) + """ + + def testDropboxOauth2(self): + """Test DropboxOauth2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropbox_oauth_multi.py b/test/test_dropbox_oauth_multi.py new file mode 100644 index 0000000..cb52d75 --- /dev/null +++ b/test/test_dropbox_oauth_multi.py @@ -0,0 +1,64 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropbox_oauth_multi import DropboxOauthMulti + +class TestDropboxOauthMulti(unittest.TestCase): + """DropboxOauthMulti unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DropboxOauthMulti: + """Test DropboxOauthMulti + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DropboxOauthMulti` + """ + model = DropboxOauthMulti() + if include_optional: + return DropboxOauthMulti( + name = '', + type = 'DROPBOX_OAUTH_MULTI', + config = vectorize_client.models.dropbox_oauth_multi_auth_config.DROPBOX_OAUTH_MULTIAuthConfig( + name = '', + authorized_users = '', + edited_users = '', + deleted_users = '', ) + ) + else: + return DropboxOauthMulti( + name = '', + type = 'DROPBOX_OAUTH_MULTI', + config = vectorize_client.models.dropbox_oauth_multi_auth_config.DROPBOX_OAUTH_MULTIAuthConfig( + name = '', + authorized_users = '', + edited_users = '', + deleted_users = '', ), + ) + """ + + def testDropboxOauthMulti(self): + """Test DropboxOauthMulti""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropbox_oauth_multi1.py b/test/test_dropbox_oauth_multi1.py new file mode 100644 index 0000000..70482ea --- /dev/null +++ b/test/test_dropbox_oauth_multi1.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropbox_oauth_multi1 import DropboxOauthMulti1 + +class TestDropboxOauthMulti1(unittest.TestCase): + """DropboxOauthMulti1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DropboxOauthMulti1: + """Test DropboxOauthMulti1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DropboxOauthMulti1` + """ + model = DropboxOauthMulti1() + if include_optional: + return DropboxOauthMulti1( + config = vectorize_client.models.dropbox_oauth_multi_auth_config.DROPBOX_OAUTH_MULTIAuthConfig( + name = '', + authorized_users = '', + edited_users = '', + deleted_users = '', ) + ) + else: + return DropboxOauthMulti1( + ) + """ + + def testDropboxOauthMulti1(self): + """Test DropboxOauthMulti1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropbox_oauth_multi2.py b/test/test_dropbox_oauth_multi2.py new file mode 100644 index 0000000..45be85f --- /dev/null +++ b/test/test_dropbox_oauth_multi2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropbox_oauth_multi2 import DropboxOauthMulti2 + +class TestDropboxOauthMulti2(unittest.TestCase): + """DropboxOauthMulti2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DropboxOauthMulti2: + """Test DropboxOauthMulti2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DropboxOauthMulti2` + """ + model = DropboxOauthMulti2() + if include_optional: + return DropboxOauthMulti2( + id = '', + type = 'DROPBOX_OAUTH_MULTI' + ) + else: + return DropboxOauthMulti2( + id = '', + type = 'DROPBOX_OAUTH_MULTI', + ) + """ + + def testDropboxOauthMulti2(self): + """Test DropboxOauthMulti2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropbox_oauth_multi_custom.py b/test/test_dropbox_oauth_multi_custom.py new file mode 100644 index 0000000..40a2dbd --- /dev/null +++ b/test/test_dropbox_oauth_multi_custom.py @@ -0,0 +1,68 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropbox_oauth_multi_custom import DropboxOauthMultiCustom + +class TestDropboxOauthMultiCustom(unittest.TestCase): + """DropboxOauthMultiCustom unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DropboxOauthMultiCustom: + """Test DropboxOauthMultiCustom + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DropboxOauthMultiCustom` + """ + model = DropboxOauthMultiCustom() + if include_optional: + return DropboxOauthMultiCustom( + name = '', + type = 'DROPBOX_OAUTH_MULTI_CUSTOM', + config = vectorize_client.models.dropbox_oauth_multi_custom_auth_config.DROPBOX_OAUTH_MULTI_CUSTOMAuthConfig( + name = '', + app_key = '', + app_secret = '', + authorized_users = '', + edited_users = '', + deleted_users = '', ) + ) + else: + return DropboxOauthMultiCustom( + name = '', + type = 'DROPBOX_OAUTH_MULTI_CUSTOM', + config = vectorize_client.models.dropbox_oauth_multi_custom_auth_config.DROPBOX_OAUTH_MULTI_CUSTOMAuthConfig( + name = '', + app_key = '', + app_secret = '', + authorized_users = '', + edited_users = '', + deleted_users = '', ), + ) + """ + + def testDropboxOauthMultiCustom(self): + """Test DropboxOauthMultiCustom""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropbox_oauth_multi_custom1.py b/test/test_dropbox_oauth_multi_custom1.py new file mode 100644 index 0000000..1129a33 --- /dev/null +++ b/test/test_dropbox_oauth_multi_custom1.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropbox_oauth_multi_custom1 import DropboxOauthMultiCustom1 + +class TestDropboxOauthMultiCustom1(unittest.TestCase): + """DropboxOauthMultiCustom1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DropboxOauthMultiCustom1: + """Test DropboxOauthMultiCustom1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DropboxOauthMultiCustom1` + """ + model = DropboxOauthMultiCustom1() + if include_optional: + return DropboxOauthMultiCustom1( + config = vectorize_client.models.dropbox_oauth_multi_custom_auth_config.DROPBOX_OAUTH_MULTI_CUSTOMAuthConfig( + name = '', + app_key = '', + app_secret = '', + authorized_users = '', + edited_users = '', + deleted_users = '', ) + ) + else: + return DropboxOauthMultiCustom1( + ) + """ + + def testDropboxOauthMultiCustom1(self): + """Test DropboxOauthMultiCustom1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropbox_oauth_multi_custom2.py b/test/test_dropbox_oauth_multi_custom2.py new file mode 100644 index 0000000..5aa3ef1 --- /dev/null +++ b/test/test_dropbox_oauth_multi_custom2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropbox_oauth_multi_custom2 import DropboxOauthMultiCustom2 + +class TestDropboxOauthMultiCustom2(unittest.TestCase): + """DropboxOauthMultiCustom2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DropboxOauthMultiCustom2: + """Test DropboxOauthMultiCustom2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DropboxOauthMultiCustom2` + """ + model = DropboxOauthMultiCustom2() + if include_optional: + return DropboxOauthMultiCustom2( + id = '', + type = 'DROPBOX_OAUTH_MULTI_CUSTOM' + ) + else: + return DropboxOauthMultiCustom2( + id = '', + type = 'DROPBOX_OAUTH_MULTI_CUSTOM', + ) + """ + + def testDropboxOauthMultiCustom2(self): + """Test DropboxOauthMultiCustom2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropboxoauth_auth_config.py b/test/test_dropboxoauth_auth_config.py new file mode 100644 index 0000000..ea23ebc --- /dev/null +++ b/test/test_dropboxoauth_auth_config.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropboxoauth_auth_config import DROPBOXOAUTHAuthConfig + +class TestDROPBOXOAUTHAuthConfig(unittest.TestCase): + """DROPBOXOAUTHAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DROPBOXOAUTHAuthConfig: + """Test DROPBOXOAUTHAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DROPBOXOAUTHAuthConfig` + """ + model = DROPBOXOAUTHAuthConfig() + if include_optional: + return DROPBOXOAUTHAuthConfig( + name = '', + authorized_user = '', + selection_details = '', + edited_users = '', + reconnect_users = '' + ) + else: + return DROPBOXOAUTHAuthConfig( + name = '', + selection_details = '', + ) + """ + + def testDROPBOXOAUTHAuthConfig(self): + """Test DROPBOXOAUTHAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropboxoauthmulti_auth_config.py b/test/test_dropboxoauthmulti_auth_config.py new file mode 100644 index 0000000..c6bd259 --- /dev/null +++ b/test/test_dropboxoauthmulti_auth_config.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropboxoauthmulti_auth_config import DROPBOXOAUTHMULTIAuthConfig + +class TestDROPBOXOAUTHMULTIAuthConfig(unittest.TestCase): + """DROPBOXOAUTHMULTIAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DROPBOXOAUTHMULTIAuthConfig: + """Test DROPBOXOAUTHMULTIAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DROPBOXOAUTHMULTIAuthConfig` + """ + model = DROPBOXOAUTHMULTIAuthConfig() + if include_optional: + return DROPBOXOAUTHMULTIAuthConfig( + name = '', + authorized_users = '', + edited_users = '', + deleted_users = '' + ) + else: + return DROPBOXOAUTHMULTIAuthConfig( + name = '', + ) + """ + + def testDROPBOXOAUTHMULTIAuthConfig(self): + """Test DROPBOXOAUTHMULTIAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_dropboxoauthmulticustom_auth_config.py b/test/test_dropboxoauthmulticustom_auth_config.py new file mode 100644 index 0000000..f260df3 --- /dev/null +++ b/test/test_dropboxoauthmulticustom_auth_config.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.dropboxoauthmulticustom_auth_config import DROPBOXOAUTHMULTICUSTOMAuthConfig + +class TestDROPBOXOAUTHMULTICUSTOMAuthConfig(unittest.TestCase): + """DROPBOXOAUTHMULTICUSTOMAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DROPBOXOAUTHMULTICUSTOMAuthConfig: + """Test DROPBOXOAUTHMULTICUSTOMAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DROPBOXOAUTHMULTICUSTOMAuthConfig` + """ + model = DROPBOXOAUTHMULTICUSTOMAuthConfig() + if include_optional: + return DROPBOXOAUTHMULTICUSTOMAuthConfig( + name = '', + app_key = '', + app_secret = '', + authorized_users = '', + edited_users = '', + deleted_users = '' + ) + else: + return DROPBOXOAUTHMULTICUSTOMAuthConfig( + name = '', + app_key = '', + app_secret = '', + ) + """ + + def testDROPBOXOAUTHMULTICUSTOMAuthConfig(self): + """Test DROPBOXOAUTHMULTICUSTOMAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_elastic.py b/test/test_elastic.py new file mode 100644 index 0000000..a82b2d0 --- /dev/null +++ b/test/test_elastic.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.elastic import Elastic + +class TestElastic(unittest.TestCase): + """Elastic unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Elastic: + """Test Elastic + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Elastic` + """ + model = Elastic() + if include_optional: + return Elastic( + name = '', + type = 'ELASTIC', + config = ERROR_TO_EXAMPLE_VALUE + ) + else: + return Elastic( + name = '', + type = 'ELASTIC', + config = ERROR_TO_EXAMPLE_VALUE, + ) + """ + + def testElastic(self): + """Test Elastic""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_elastic1.py b/test/test_elastic1.py new file mode 100644 index 0000000..6fe92ba --- /dev/null +++ b/test/test_elastic1.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.elastic1 import Elastic1 + +class TestElastic1(unittest.TestCase): + """Elastic1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Elastic1: + """Test Elastic1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Elastic1` + """ + model = Elastic1() + if include_optional: + return Elastic1( + config = ERROR_TO_EXAMPLE_VALUE + ) + else: + return Elastic1( + ) + """ + + def testElastic1(self): + """Test Elastic1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_elastic2.py b/test/test_elastic2.py new file mode 100644 index 0000000..1984b0a --- /dev/null +++ b/test/test_elastic2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.elastic2 import Elastic2 + +class TestElastic2(unittest.TestCase): + """Elastic2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Elastic2: + """Test Elastic2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Elastic2` + """ + model = Elastic2() + if include_optional: + return Elastic2( + id = '', + type = 'ELASTIC' + ) + else: + return Elastic2( + id = '', + type = 'ELASTIC', + ) + """ + + def testElastic2(self): + """Test Elastic2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_elastic_auth_config.py b/test/test_elastic_auth_config.py new file mode 100644 index 0000000..cd22e5e --- /dev/null +++ b/test/test_elastic_auth_config.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.elastic_auth_config import ELASTICAuthConfig + +class TestELASTICAuthConfig(unittest.TestCase): + """ELASTICAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ELASTICAuthConfig: + """Test ELASTICAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ELASTICAuthConfig` + """ + model = ELASTICAuthConfig() + if include_optional: + return ELASTICAuthConfig( + name = '', + host = '', + port = '', + api_key = 'k' + ) + else: + return ELASTICAuthConfig( + name = '', + host = '', + port = '', + api_key = 'k', + ) + """ + + def testELASTICAuthConfig(self): + """Test ELASTICAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_elastic_config.py b/test/test_elastic_config.py new file mode 100644 index 0000000..0f7cc97 --- /dev/null +++ b/test/test_elastic_config.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.elastic_config import ELASTICConfig + +class TestELASTICConfig(unittest.TestCase): + """ELASTICConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ELASTICConfig: + """Test ELASTICConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ELASTICConfig` + """ + model = ELASTICConfig() + if include_optional: + return ELASTICConfig( + index = ERROR_TO_EXAMPLE_VALUE + ) + else: + return ELASTICConfig( + index = ERROR_TO_EXAMPLE_VALUE, + ) + """ + + def testELASTICConfig(self): + """Test ELASTICConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_extraction_api.py b/test/test_extraction_api.py new file mode 100644 index 0000000..f8fa94c --- /dev/null +++ b/test/test_extraction_api.py @@ -0,0 +1,45 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.api.extraction_api import ExtractionApi + + +class TestExtractionApi(unittest.TestCase): + """ExtractionApi unit test stubs""" + + def setUp(self) -> None: + self.api = ExtractionApi() + + def tearDown(self) -> None: + pass + + def test_get_extraction_result(self) -> None: + """Test case for get_extraction_result + + Get extraction result + """ + pass + + def test_start_extraction(self) -> None: + """Test case for start_extraction + + Start content extraction from a file + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_extraction_chunking_strategy.py b/test/test_extraction_chunking_strategy.py new file mode 100644 index 0000000..6f5f877 --- /dev/null +++ b/test/test_extraction_chunking_strategy.py @@ -0,0 +1,33 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.extraction_chunking_strategy import ExtractionChunkingStrategy + +class TestExtractionChunkingStrategy(unittest.TestCase): + """ExtractionChunkingStrategy unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def testExtractionChunkingStrategy(self): + """Test ExtractionChunkingStrategy""" + # inst = ExtractionChunkingStrategy() + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_extraction_result.py b/test/test_extraction_result.py new file mode 100644 index 0000000..e16f67c --- /dev/null +++ b/test/test_extraction_result.py @@ -0,0 +1,65 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.extraction_result import ExtractionResult + +class TestExtractionResult(unittest.TestCase): + """ExtractionResult unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ExtractionResult: + """Test ExtractionResult + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ExtractionResult` + """ + model = ExtractionResult() + if include_optional: + return ExtractionResult( + success = True, + chunks = [ + '' + ], + text = '', + metadata = '', + metadata_schema = '', + chunks_metadata = [ + '' + ], + chunks_schema = [ + '' + ], + error = '' + ) + else: + return ExtractionResult( + success = True, + ) + """ + + def testExtractionResult(self): + """Test ExtractionResult""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_extraction_result_response.py b/test/test_extraction_result_response.py new file mode 100644 index 0000000..16c230a --- /dev/null +++ b/test/test_extraction_result_response.py @@ -0,0 +1,67 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.extraction_result_response import ExtractionResultResponse + +class TestExtractionResultResponse(unittest.TestCase): + """ExtractionResultResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ExtractionResultResponse: + """Test ExtractionResultResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ExtractionResultResponse` + """ + model = ExtractionResultResponse() + if include_optional: + return ExtractionResultResponse( + ready = True, + data = vectorize_client.models.extraction_result.ExtractionResult( + success = True, + chunks = [ + '' + ], + text = '', + metadata = '', + metadata_schema = '', + chunks_metadata = [ + '' + ], + chunks_schema = [ + '' + ], + error = '', ) + ) + else: + return ExtractionResultResponse( + ready = True, + ) + """ + + def testExtractionResultResponse(self): + """Test ExtractionResultResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_extraction_type.py b/test/test_extraction_type.py new file mode 100644 index 0000000..58d7197 --- /dev/null +++ b/test/test_extraction_type.py @@ -0,0 +1,33 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.extraction_type import ExtractionType + +class TestExtractionType(unittest.TestCase): + """ExtractionType unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def testExtractionType(self): + """Test ExtractionType""" + # inst = ExtractionType() + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_file_upload.py b/test/test_file_upload.py new file mode 100644 index 0000000..61eabb6 --- /dev/null +++ b/test/test_file_upload.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.file_upload import FileUpload + +class TestFileUpload(unittest.TestCase): + """FileUpload unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> FileUpload: + """Test FileUpload + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `FileUpload` + """ + model = FileUpload() + if include_optional: + return FileUpload( + name = '', + type = 'FILE_UPLOAD', + config = vectorize_client.models.file_upload_auth_config.FILE_UPLOADAuthConfig( + name = '', + path_prefix = '', + files = [ + '' + ], ) + ) + else: + return FileUpload( + name = '', + type = 'FILE_UPLOAD', + config = vectorize_client.models.file_upload_auth_config.FILE_UPLOADAuthConfig( + name = '', + path_prefix = '', + files = [ + '' + ], ), + ) + """ + + def testFileUpload(self): + """Test FileUpload""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_file_upload1.py b/test/test_file_upload1.py new file mode 100644 index 0000000..e85b0a1 --- /dev/null +++ b/test/test_file_upload1.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.file_upload1 import FileUpload1 + +class TestFileUpload1(unittest.TestCase): + """FileUpload1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> FileUpload1: + """Test FileUpload1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `FileUpload1` + """ + model = FileUpload1() + if include_optional: + return FileUpload1( + config = vectorize_client.models.file_upload_auth_config.FILE_UPLOADAuthConfig( + name = '', + path_prefix = '', + files = [ + '' + ], ) + ) + else: + return FileUpload1( + ) + """ + + def testFileUpload1(self): + """Test FileUpload1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_file_upload2.py b/test/test_file_upload2.py new file mode 100644 index 0000000..99a4e2c --- /dev/null +++ b/test/test_file_upload2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.file_upload2 import FileUpload2 + +class TestFileUpload2(unittest.TestCase): + """FileUpload2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> FileUpload2: + """Test FileUpload2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `FileUpload2` + """ + model = FileUpload2() + if include_optional: + return FileUpload2( + id = '', + type = 'FILE_UPLOAD' + ) + else: + return FileUpload2( + id = '', + type = 'FILE_UPLOAD', + ) + """ + + def testFileUpload2(self): + """Test FileUpload2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_files_api.py b/test/test_files_api.py new file mode 100644 index 0000000..e86bc81 --- /dev/null +++ b/test/test_files_api.py @@ -0,0 +1,38 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.api.files_api import FilesApi + + +class TestFilesApi(unittest.TestCase): + """FilesApi unit test stubs""" + + def setUp(self) -> None: + self.api = FilesApi() + + def tearDown(self) -> None: + pass + + def test_start_file_upload(self) -> None: + """Test case for start_file_upload + + Upload a generic file to the platform + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_fileupload_auth_config.py b/test/test_fileupload_auth_config.py new file mode 100644 index 0000000..baff43a --- /dev/null +++ b/test/test_fileupload_auth_config.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.fileupload_auth_config import FILEUPLOADAuthConfig + +class TestFILEUPLOADAuthConfig(unittest.TestCase): + """FILEUPLOADAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> FILEUPLOADAuthConfig: + """Test FILEUPLOADAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `FILEUPLOADAuthConfig` + """ + model = FILEUPLOADAuthConfig() + if include_optional: + return FILEUPLOADAuthConfig( + name = '', + path_prefix = '', + files = [ + '' + ] + ) + else: + return FILEUPLOADAuthConfig( + name = '', + ) + """ + + def testFILEUPLOADAuthConfig(self): + """Test FILEUPLOADAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_firecrawl.py b/test/test_firecrawl.py new file mode 100644 index 0000000..868bc08 --- /dev/null +++ b/test/test_firecrawl.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.firecrawl import Firecrawl + +class TestFirecrawl(unittest.TestCase): + """Firecrawl unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Firecrawl: + """Test Firecrawl + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Firecrawl` + """ + model = Firecrawl() + if include_optional: + return Firecrawl( + name = '', + type = 'FIRECRAWL', + config = vectorize_client.models.firecrawl_config.FIRECRAWLConfig( + endpoint = 'Crawl', + request = vectorize_client.models.request.request(), ) + ) + else: + return Firecrawl( + name = '', + type = 'FIRECRAWL', + config = vectorize_client.models.firecrawl_config.FIRECRAWLConfig( + endpoint = 'Crawl', + request = vectorize_client.models.request.request(), ), + ) + """ + + def testFirecrawl(self): + """Test Firecrawl""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_firecrawl1.py b/test/test_firecrawl1.py new file mode 100644 index 0000000..fd87c2c --- /dev/null +++ b/test/test_firecrawl1.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.firecrawl1 import Firecrawl1 + +class TestFirecrawl1(unittest.TestCase): + """Firecrawl1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Firecrawl1: + """Test Firecrawl1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Firecrawl1` + """ + model = Firecrawl1() + if include_optional: + return Firecrawl1( + config = vectorize_client.models.firecrawl_config.FIRECRAWLConfig( + endpoint = 'Crawl', + request = vectorize_client.models.request.request(), ) + ) + else: + return Firecrawl1( + ) + """ + + def testFirecrawl1(self): + """Test Firecrawl1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_firecrawl2.py b/test/test_firecrawl2.py new file mode 100644 index 0000000..2f27a24 --- /dev/null +++ b/test/test_firecrawl2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.firecrawl2 import Firecrawl2 + +class TestFirecrawl2(unittest.TestCase): + """Firecrawl2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Firecrawl2: + """Test Firecrawl2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Firecrawl2` + """ + model = Firecrawl2() + if include_optional: + return Firecrawl2( + id = '', + type = 'FIRECRAWL' + ) + else: + return Firecrawl2( + id = '', + type = 'FIRECRAWL', + ) + """ + + def testFirecrawl2(self): + """Test Firecrawl2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_firecrawl_auth_config.py b/test/test_firecrawl_auth_config.py new file mode 100644 index 0000000..c19b6d7 --- /dev/null +++ b/test/test_firecrawl_auth_config.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.firecrawl_auth_config import FIRECRAWLAuthConfig + +class TestFIRECRAWLAuthConfig(unittest.TestCase): + """FIRECRAWLAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> FIRECRAWLAuthConfig: + """Test FIRECRAWLAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `FIRECRAWLAuthConfig` + """ + model = FIRECRAWLAuthConfig() + if include_optional: + return FIRECRAWLAuthConfig( + name = '', + api_key = '' + ) + else: + return FIRECRAWLAuthConfig( + name = '', + api_key = '', + ) + """ + + def testFIRECRAWLAuthConfig(self): + """Test FIRECRAWLAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_firecrawl_config.py b/test/test_firecrawl_config.py new file mode 100644 index 0000000..5ad333b --- /dev/null +++ b/test/test_firecrawl_config.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.firecrawl_config import FIRECRAWLConfig + +class TestFIRECRAWLConfig(unittest.TestCase): + """FIRECRAWLConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> FIRECRAWLConfig: + """Test FIRECRAWLConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `FIRECRAWLConfig` + """ + model = FIRECRAWLConfig() + if include_optional: + return FIRECRAWLConfig( + endpoint = 'Crawl', + request = None + ) + else: + return FIRECRAWLConfig( + endpoint = 'Crawl', + request = None, + ) + """ + + def testFIRECRAWLConfig(self): + """Test FIRECRAWLConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_fireflies.py b/test/test_fireflies.py new file mode 100644 index 0000000..90a7fc0 --- /dev/null +++ b/test/test_fireflies.py @@ -0,0 +1,70 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.fireflies import Fireflies + +class TestFireflies(unittest.TestCase): + """Fireflies unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Fireflies: + """Test Fireflies + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Fireflies` + """ + model = Fireflies() + if include_optional: + return Fireflies( + name = '', + type = 'FIREFLIES', + config = vectorize_client.models.fireflies_config.FIREFLIESConfig( + start_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + end_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + title_filter_type = 'AND', + title_filter = '', + participant_filter_type = 'AND', + participant_filter = '', + max_meetings = 1.337, ) + ) + else: + return Fireflies( + name = '', + type = 'FIREFLIES', + config = vectorize_client.models.fireflies_config.FIREFLIESConfig( + start_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + end_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + title_filter_type = 'AND', + title_filter = '', + participant_filter_type = 'AND', + participant_filter = '', + max_meetings = 1.337, ), + ) + """ + + def testFireflies(self): + """Test Fireflies""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_fireflies1.py b/test/test_fireflies1.py new file mode 100644 index 0000000..121cee9 --- /dev/null +++ b/test/test_fireflies1.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.fireflies1 import Fireflies1 + +class TestFireflies1(unittest.TestCase): + """Fireflies1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Fireflies1: + """Test Fireflies1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Fireflies1` + """ + model = Fireflies1() + if include_optional: + return Fireflies1( + config = vectorize_client.models.fireflies_config.FIREFLIESConfig( + start_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + end_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + title_filter_type = 'AND', + title_filter = '', + participant_filter_type = 'AND', + participant_filter = '', + max_meetings = 1.337, ) + ) + else: + return Fireflies1( + ) + """ + + def testFireflies1(self): + """Test Fireflies1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_fireflies2.py b/test/test_fireflies2.py new file mode 100644 index 0000000..8e37776 --- /dev/null +++ b/test/test_fireflies2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.fireflies2 import Fireflies2 + +class TestFireflies2(unittest.TestCase): + """Fireflies2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Fireflies2: + """Test Fireflies2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Fireflies2` + """ + model = Fireflies2() + if include_optional: + return Fireflies2( + id = '', + type = 'FIREFLIES' + ) + else: + return Fireflies2( + id = '', + type = 'FIREFLIES', + ) + """ + + def testFireflies2(self): + """Test Fireflies2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_fireflies_auth_config.py b/test/test_fireflies_auth_config.py new file mode 100644 index 0000000..2467ea1 --- /dev/null +++ b/test/test_fireflies_auth_config.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.fireflies_auth_config import FIREFLIESAuthConfig + +class TestFIREFLIESAuthConfig(unittest.TestCase): + """FIREFLIESAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> FIREFLIESAuthConfig: + """Test FIREFLIESAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `FIREFLIESAuthConfig` + """ + model = FIREFLIESAuthConfig() + if include_optional: + return FIREFLIESAuthConfig( + name = '', + api_key = 'k' + ) + else: + return FIREFLIESAuthConfig( + name = '', + api_key = 'k', + ) + """ + + def testFIREFLIESAuthConfig(self): + """Test FIREFLIESAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_fireflies_config.py b/test/test_fireflies_config.py new file mode 100644 index 0000000..c6c0380 --- /dev/null +++ b/test/test_fireflies_config.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.fireflies_config import FIREFLIESConfig + +class TestFIREFLIESConfig(unittest.TestCase): + """FIREFLIESConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> FIREFLIESConfig: + """Test FIREFLIESConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `FIREFLIESConfig` + """ + model = FIREFLIESConfig() + if include_optional: + return FIREFLIESConfig( + start_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + end_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + title_filter_type = 'AND', + title_filter = '', + participant_filter_type = 'AND', + participant_filter = '', + max_meetings = 1.337 + ) + else: + return FIREFLIESConfig( + start_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + title_filter_type = 'AND', + participant_filter_type = 'AND', + ) + """ + + def testFIREFLIESConfig(self): + """Test FIREFLIESConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_gcs_auth_config.py b/test/test_gcs_auth_config.py new file mode 100644 index 0000000..cb4998f --- /dev/null +++ b/test/test_gcs_auth_config.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.gcs_auth_config import GCSAuthConfig + +class TestGCSAuthConfig(unittest.TestCase): + """GCSAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GCSAuthConfig: + """Test GCSAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GCSAuthConfig` + """ + model = GCSAuthConfig() + if include_optional: + return GCSAuthConfig( + name = '', + service_account_json = '', + bucket_name = '' + ) + else: + return GCSAuthConfig( + name = '', + service_account_json = '', + bucket_name = '', + ) + """ + + def testGCSAuthConfig(self): + """Test GCSAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_gcs_config.py b/test/test_gcs_config.py new file mode 100644 index 0000000..511d9de --- /dev/null +++ b/test/test_gcs_config.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.gcs_config import GCSConfig + +class TestGCSConfig(unittest.TestCase): + """GCSConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GCSConfig: + """Test GCSConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GCSConfig` + """ + model = GCSConfig() + if include_optional: + return GCSConfig( + file_extensions = pdf, + idle_time = 1, + recursive = True, + path_prefix = '', + path_metadata_regex = '', + path_regex_group_names = '' + ) + else: + return GCSConfig( + file_extensions = pdf, + idle_time = 1, + ) + """ + + def testGCSConfig(self): + """Test GCSConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_get_ai_platform_connectors200_response.py b/test/test_get_ai_platform_connectors200_response.py new file mode 100644 index 0000000..73b4468 --- /dev/null +++ b/test/test_get_ai_platform_connectors200_response.py @@ -0,0 +1,82 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.get_ai_platform_connectors200_response import GetAIPlatformConnectors200Response + +class TestGetAIPlatformConnectors200Response(unittest.TestCase): + """GetAIPlatformConnectors200Response unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GetAIPlatformConnectors200Response: + """Test GetAIPlatformConnectors200Response + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GetAIPlatformConnectors200Response` + """ + model = GetAIPlatformConnectors200Response() + if include_optional: + return GetAIPlatformConnectors200Response( + ai_platform_connectors = [ + vectorize_client.models.ai_platform.AIPlatform( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ] + ) + else: + return GetAIPlatformConnectors200Response( + ai_platform_connectors = [ + vectorize_client.models.ai_platform.AIPlatform( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ], + ) + """ + + def testGetAIPlatformConnectors200Response(self): + """Test GetAIPlatformConnectors200Response""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_get_deep_research_response.py b/test/test_get_deep_research_response.py new file mode 100644 index 0000000..83c813c --- /dev/null +++ b/test/test_get_deep_research_response.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.get_deep_research_response import GetDeepResearchResponse + +class TestGetDeepResearchResponse(unittest.TestCase): + """GetDeepResearchResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GetDeepResearchResponse: + """Test GetDeepResearchResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GetDeepResearchResponse` + """ + model = GetDeepResearchResponse() + if include_optional: + return GetDeepResearchResponse( + ready = True, + data = vectorize_client.models.deep_research_result.DeepResearchResult( + success = True, + events = [ + '' + ], + markdown = '', + error = '', ) + ) + else: + return GetDeepResearchResponse( + ready = True, + ) + """ + + def testGetDeepResearchResponse(self): + """Test GetDeepResearchResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_get_destination_connectors200_response.py b/test/test_get_destination_connectors200_response.py new file mode 100644 index 0000000..70afa19 --- /dev/null +++ b/test/test_get_destination_connectors200_response.py @@ -0,0 +1,82 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.get_destination_connectors200_response import GetDestinationConnectors200Response + +class TestGetDestinationConnectors200Response(unittest.TestCase): + """GetDestinationConnectors200Response unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GetDestinationConnectors200Response: + """Test GetDestinationConnectors200Response + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GetDestinationConnectors200Response` + """ + model = GetDestinationConnectors200Response() + if include_optional: + return GetDestinationConnectors200Response( + destination_connectors = [ + vectorize_client.models.destination_connector.DestinationConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ] + ) + else: + return GetDestinationConnectors200Response( + destination_connectors = [ + vectorize_client.models.destination_connector.DestinationConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ], + ) + """ + + def testGetDestinationConnectors200Response(self): + """Test GetDestinationConnectors200Response""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_get_pipeline_events_response.py b/test/test_get_pipeline_events_response.py new file mode 100644 index 0000000..dcb6975 --- /dev/null +++ b/test/test_get_pipeline_events_response.py @@ -0,0 +1,77 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.get_pipeline_events_response import GetPipelineEventsResponse + +class TestGetPipelineEventsResponse(unittest.TestCase): + """GetPipelineEventsResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GetPipelineEventsResponse: + """Test GetPipelineEventsResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GetPipelineEventsResponse` + """ + model = GetPipelineEventsResponse() + if include_optional: + return GetPipelineEventsResponse( + message = '', + next_token = '', + data = [ + vectorize_client.models.pipeline_events.PipelineEvents( + id = '', + type = '', + timestamp = '', + details = { + 'key' : null + }, + summary = { + 'key' : null + }, ) + ] + ) + else: + return GetPipelineEventsResponse( + message = '', + data = [ + vectorize_client.models.pipeline_events.PipelineEvents( + id = '', + type = '', + timestamp = '', + details = { + 'key' : null + }, + summary = { + 'key' : null + }, ) + ], + ) + """ + + def testGetPipelineEventsResponse(self): + """Test GetPipelineEventsResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_get_pipeline_metrics_response.py b/test/test_get_pipeline_metrics_response.py new file mode 100644 index 0000000..dec5311 --- /dev/null +++ b/test/test_get_pipeline_metrics_response.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.get_pipeline_metrics_response import GetPipelineMetricsResponse + +class TestGetPipelineMetricsResponse(unittest.TestCase): + """GetPipelineMetricsResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GetPipelineMetricsResponse: + """Test GetPipelineMetricsResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GetPipelineMetricsResponse` + """ + model = GetPipelineMetricsResponse() + if include_optional: + return GetPipelineMetricsResponse( + message = '', + data = [ + vectorize_client.models.pipeline_metrics.PipelineMetrics( + timestamp = '', + new_objects = 1.337, + changed_objects = 1.337, + deleted_objects = 1.337, ) + ] + ) + else: + return GetPipelineMetricsResponse( + message = '', + data = [ + vectorize_client.models.pipeline_metrics.PipelineMetrics( + timestamp = '', + new_objects = 1.337, + changed_objects = 1.337, + deleted_objects = 1.337, ) + ], + ) + """ + + def testGetPipelineMetricsResponse(self): + """Test GetPipelineMetricsResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_get_pipeline_response.py b/test/test_get_pipeline_response.py new file mode 100644 index 0000000..c25d03c --- /dev/null +++ b/test/test_get_pipeline_response.py @@ -0,0 +1,186 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.get_pipeline_response import GetPipelineResponse + +class TestGetPipelineResponse(unittest.TestCase): + """GetPipelineResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GetPipelineResponse: + """Test GetPipelineResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GetPipelineResponse` + """ + model = GetPipelineResponse() + if include_optional: + return GetPipelineResponse( + message = '', + data = vectorize_client.models.pipeline_summary.PipelineSummary( + id = '', + name = '', + document_count = 1.337, + source_connector_auth_ids = [ + '' + ], + destination_connector_auth_ids = [ + '' + ], + ai_platform_auth_ids = [ + '' + ], + source_connector_types = [ + '' + ], + destination_connector_types = [ + '' + ], + ai_platform_types = [ + '' + ], + created_at = '', + created_by = '', + status = '', + config_doc = { + 'key' : null + }, + source_connectors = [ + vectorize_client.models.source_connector.SourceConnector( + id = '', + type = '', + name = '', + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ], + destination_connectors = [ + vectorize_client.models.destination_connector.DestinationConnector( + id = '', + type = '', + name = '', + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ], + ai_platforms = [ + vectorize_client.models.ai_platform.AIPlatform( + id = '', + type = '', + name = '', + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ], ) + ) + else: + return GetPipelineResponse( + message = '', + data = vectorize_client.models.pipeline_summary.PipelineSummary( + id = '', + name = '', + document_count = 1.337, + source_connector_auth_ids = [ + '' + ], + destination_connector_auth_ids = [ + '' + ], + ai_platform_auth_ids = [ + '' + ], + source_connector_types = [ + '' + ], + destination_connector_types = [ + '' + ], + ai_platform_types = [ + '' + ], + created_at = '', + created_by = '', + status = '', + config_doc = { + 'key' : null + }, + source_connectors = [ + vectorize_client.models.source_connector.SourceConnector( + id = '', + type = '', + name = '', + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ], + destination_connectors = [ + vectorize_client.models.destination_connector.DestinationConnector( + id = '', + type = '', + name = '', + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ], + ai_platforms = [ + vectorize_client.models.ai_platform.AIPlatform( + id = '', + type = '', + name = '', + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ], ), + ) + """ + + def testGetPipelineResponse(self): + """Test GetPipelineResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_get_pipelines400_response.py b/test/test_get_pipelines400_response.py new file mode 100644 index 0000000..8da3612 --- /dev/null +++ b/test/test_get_pipelines400_response.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.get_pipelines400_response import GetPipelines400Response + +class TestGetPipelines400Response(unittest.TestCase): + """GetPipelines400Response unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GetPipelines400Response: + """Test GetPipelines400Response + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GetPipelines400Response` + """ + model = GetPipelines400Response() + if include_optional: + return GetPipelines400Response( + error = '', + details = '', + failed_updates = [ + '' + ], + successful_updates = [ + '' + ] + ) + else: + return GetPipelines400Response( + error = '', + ) + """ + + def testGetPipelines400Response(self): + """Test GetPipelines400Response""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_get_pipelines_response.py b/test/test_get_pipelines_response.py new file mode 100644 index 0000000..37e25c7 --- /dev/null +++ b/test/test_get_pipelines_response.py @@ -0,0 +1,112 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.get_pipelines_response import GetPipelinesResponse + +class TestGetPipelinesResponse(unittest.TestCase): + """GetPipelinesResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GetPipelinesResponse: + """Test GetPipelinesResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GetPipelinesResponse` + """ + model = GetPipelinesResponse() + if include_optional: + return GetPipelinesResponse( + message = '', + data = [ + vectorize_client.models.pipeline_list_summary.PipelineListSummary( + id = '', + name = '', + document_count = 1.337, + source_connector_auth_ids = [ + '' + ], + destination_connector_auth_ids = [ + '' + ], + ai_platform_auth_ids = [ + '' + ], + source_connector_types = [ + '' + ], + destination_connector_types = [ + '' + ], + ai_platform_types = [ + '' + ], + created_at = '', + created_by = '', + status = '', + config_doc = { + 'key' : null + }, ) + ] + ) + else: + return GetPipelinesResponse( + message = '', + data = [ + vectorize_client.models.pipeline_list_summary.PipelineListSummary( + id = '', + name = '', + document_count = 1.337, + source_connector_auth_ids = [ + '' + ], + destination_connector_auth_ids = [ + '' + ], + ai_platform_auth_ids = [ + '' + ], + source_connector_types = [ + '' + ], + destination_connector_types = [ + '' + ], + ai_platform_types = [ + '' + ], + created_at = '', + created_by = '', + status = '', + config_doc = { + 'key' : null + }, ) + ], + ) + """ + + def testGetPipelinesResponse(self): + """Test GetPipelinesResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_get_source_connectors200_response.py b/test/test_get_source_connectors200_response.py new file mode 100644 index 0000000..93e0b90 --- /dev/null +++ b/test/test_get_source_connectors200_response.py @@ -0,0 +1,82 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.get_source_connectors200_response import GetSourceConnectors200Response + +class TestGetSourceConnectors200Response(unittest.TestCase): + """GetSourceConnectors200Response unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GetSourceConnectors200Response: + """Test GetSourceConnectors200Response + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GetSourceConnectors200Response` + """ + model = GetSourceConnectors200Response() + if include_optional: + return GetSourceConnectors200Response( + source_connectors = [ + vectorize_client.models.source_connector.SourceConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ] + ) + else: + return GetSourceConnectors200Response( + source_connectors = [ + vectorize_client.models.source_connector.SourceConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ], + ) + """ + + def testGetSourceConnectors200Response(self): + """Test GetSourceConnectors200Response""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_get_upload_files_response.py b/test/test_get_upload_files_response.py new file mode 100644 index 0000000..8501f98 --- /dev/null +++ b/test/test_get_upload_files_response.py @@ -0,0 +1,74 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.get_upload_files_response import GetUploadFilesResponse + +class TestGetUploadFilesResponse(unittest.TestCase): + """GetUploadFilesResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GetUploadFilesResponse: + """Test GetUploadFilesResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GetUploadFilesResponse` + """ + model = GetUploadFilesResponse() + if include_optional: + return GetUploadFilesResponse( + message = '', + files = [ + vectorize_client.models.upload_file.UploadFile( + key = '', + name = '', + size = 1.337, + extension = '', + last_modified = '', + metadata = { + 'key' : '' + }, ) + ] + ) + else: + return GetUploadFilesResponse( + message = '', + files = [ + vectorize_client.models.upload_file.UploadFile( + key = '', + name = '', + size = 1.337, + extension = '', + last_modified = '', + metadata = { + 'key' : '' + }, ) + ], + ) + """ + + def testGetUploadFilesResponse(self): + """Test GetUploadFilesResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_github.py b/test/test_github.py new file mode 100644 index 0000000..2ec9f0b --- /dev/null +++ b/test/test_github.py @@ -0,0 +1,74 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.github import Github + +class TestGithub(unittest.TestCase): + """Github unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Github: + """Test Github + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Github` + """ + model = Github() + if include_optional: + return Github( + name = '', + type = 'GITHUB', + config = vectorize_client.models.github_config.GITHUBConfig( + repositories = 'G/WzyBAw2ZuufUOHOEhA8IcFQXnuaZcdyyvKX7HzK', + include_pull_requests = True, + pull_request_status = 'all', + pull_request_labels = '', + include_issues = True, + issue_status = 'all', + issue_labels = '', + max_items = 1.337, + created_after = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), ) + ) + else: + return Github( + name = '', + type = 'GITHUB', + config = vectorize_client.models.github_config.GITHUBConfig( + repositories = 'G/WzyBAw2ZuufUOHOEhA8IcFQXnuaZcdyyvKX7HzK', + include_pull_requests = True, + pull_request_status = 'all', + pull_request_labels = '', + include_issues = True, + issue_status = 'all', + issue_labels = '', + max_items = 1.337, + created_after = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), ), + ) + """ + + def testGithub(self): + """Test Github""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_github1.py b/test/test_github1.py new file mode 100644 index 0000000..f8b4ac5 --- /dev/null +++ b/test/test_github1.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.github1 import Github1 + +class TestGithub1(unittest.TestCase): + """Github1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Github1: + """Test Github1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Github1` + """ + model = Github1() + if include_optional: + return Github1( + config = vectorize_client.models.github_config.GITHUBConfig( + repositories = 'G/WzyBAw2ZuufUOHOEhA8IcFQXnuaZcdyyvKX7HzK', + include_pull_requests = True, + pull_request_status = 'all', + pull_request_labels = '', + include_issues = True, + issue_status = 'all', + issue_labels = '', + max_items = 1.337, + created_after = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), ) + ) + else: + return Github1( + ) + """ + + def testGithub1(self): + """Test Github1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_github2.py b/test/test_github2.py new file mode 100644 index 0000000..21433fb --- /dev/null +++ b/test/test_github2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.github2 import Github2 + +class TestGithub2(unittest.TestCase): + """Github2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Github2: + """Test Github2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Github2` + """ + model = Github2() + if include_optional: + return Github2( + id = '', + type = 'GITHUB' + ) + else: + return Github2( + id = '', + type = 'GITHUB', + ) + """ + + def testGithub2(self): + """Test Github2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_github_auth_config.py b/test/test_github_auth_config.py new file mode 100644 index 0000000..561d32d --- /dev/null +++ b/test/test_github_auth_config.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.github_auth_config import GITHUBAuthConfig + +class TestGITHUBAuthConfig(unittest.TestCase): + """GITHUBAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GITHUBAuthConfig: + """Test GITHUBAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GITHUBAuthConfig` + """ + model = GITHUBAuthConfig() + if include_optional: + return GITHUBAuthConfig( + name = '', + oauth_token = 'k' + ) + else: + return GITHUBAuthConfig( + name = '', + oauth_token = 'k', + ) + """ + + def testGITHUBAuthConfig(self): + """Test GITHUBAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_github_config.py b/test/test_github_config.py new file mode 100644 index 0000000..2b3dc07 --- /dev/null +++ b/test/test_github_config.py @@ -0,0 +1,65 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.github_config import GITHUBConfig + +class TestGITHUBConfig(unittest.TestCase): + """GITHUBConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GITHUBConfig: + """Test GITHUBConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GITHUBConfig` + """ + model = GITHUBConfig() + if include_optional: + return GITHUBConfig( + repositories = 'G/WzyBAw2ZuufUOHOEhA8IcFQXnuaZcdyyvKX7HzK', + include_pull_requests = True, + pull_request_status = 'all', + pull_request_labels = '', + include_issues = True, + issue_status = 'all', + issue_labels = '', + max_items = 1.337, + created_after = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date() + ) + else: + return GITHUBConfig( + repositories = 'G/WzyBAw2ZuufUOHOEhA8IcFQXnuaZcdyyvKX7HzK', + include_pull_requests = True, + pull_request_status = 'all', + include_issues = True, + issue_status = 'all', + max_items = 1.337, + ) + """ + + def testGITHUBConfig(self): + """Test GITHUBConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_google_cloud_storage.py b/test/test_google_cloud_storage.py new file mode 100644 index 0000000..f3494ae --- /dev/null +++ b/test/test_google_cloud_storage.py @@ -0,0 +1,68 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.google_cloud_storage import GoogleCloudStorage + +class TestGoogleCloudStorage(unittest.TestCase): + """GoogleCloudStorage unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GoogleCloudStorage: + """Test GoogleCloudStorage + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GoogleCloudStorage` + """ + model = GoogleCloudStorage() + if include_optional: + return GoogleCloudStorage( + name = '', + type = 'GCS', + config = vectorize_client.models.gcs_config.GCSConfig( + file_extensions = pdf, + idle_time = 1, + recursive = True, + path_prefix = '', + path_metadata_regex = '', + path_regex_group_names = '', ) + ) + else: + return GoogleCloudStorage( + name = '', + type = 'GCS', + config = vectorize_client.models.gcs_config.GCSConfig( + file_extensions = pdf, + idle_time = 1, + recursive = True, + path_prefix = '', + path_metadata_regex = '', + path_regex_group_names = '', ), + ) + """ + + def testGoogleCloudStorage(self): + """Test GoogleCloudStorage""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_google_cloud_storage1.py b/test/test_google_cloud_storage1.py new file mode 100644 index 0000000..2e2ad8a --- /dev/null +++ b/test/test_google_cloud_storage1.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.google_cloud_storage1 import GoogleCloudStorage1 + +class TestGoogleCloudStorage1(unittest.TestCase): + """GoogleCloudStorage1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GoogleCloudStorage1: + """Test GoogleCloudStorage1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GoogleCloudStorage1` + """ + model = GoogleCloudStorage1() + if include_optional: + return GoogleCloudStorage1( + config = vectorize_client.models.gcs_config.GCSConfig( + file_extensions = pdf, + idle_time = 1, + recursive = True, + path_prefix = '', + path_metadata_regex = '', + path_regex_group_names = '', ) + ) + else: + return GoogleCloudStorage1( + ) + """ + + def testGoogleCloudStorage1(self): + """Test GoogleCloudStorage1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_google_cloud_storage2.py b/test/test_google_cloud_storage2.py new file mode 100644 index 0000000..ae5ab18 --- /dev/null +++ b/test/test_google_cloud_storage2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.google_cloud_storage2 import GoogleCloudStorage2 + +class TestGoogleCloudStorage2(unittest.TestCase): + """GoogleCloudStorage2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GoogleCloudStorage2: + """Test GoogleCloudStorage2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GoogleCloudStorage2` + """ + model = GoogleCloudStorage2() + if include_optional: + return GoogleCloudStorage2( + id = '', + type = 'GCS' + ) + else: + return GoogleCloudStorage2( + id = '', + type = 'GCS', + ) + """ + + def testGoogleCloudStorage2(self): + """Test GoogleCloudStorage2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_google_drive.py b/test/test_google_drive.py new file mode 100644 index 0000000..36c5022 --- /dev/null +++ b/test/test_google_drive.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.google_drive import GoogleDrive + +class TestGoogleDrive(unittest.TestCase): + """GoogleDrive unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GoogleDrive: + """Test GoogleDrive + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GoogleDrive` + """ + model = GoogleDrive() + if include_optional: + return GoogleDrive( + name = '', + type = 'GOOGLE_DRIVE', + config = vectorize_client.models.google_drive_config.GOOGLE_DRIVEConfig( + file_extensions = pdf, + root_parents = 'https://drive.google.com/drive/u/80728/folders/v2D2ylmgd10Z3UB6UkJSISSB512iz', + idle_time = 1.337, ) + ) + else: + return GoogleDrive( + name = '', + type = 'GOOGLE_DRIVE', + config = vectorize_client.models.google_drive_config.GOOGLE_DRIVEConfig( + file_extensions = pdf, + root_parents = 'https://drive.google.com/drive/u/80728/folders/v2D2ylmgd10Z3UB6UkJSISSB512iz', + idle_time = 1.337, ), + ) + """ + + def testGoogleDrive(self): + """Test GoogleDrive""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_google_drive1.py b/test/test_google_drive1.py new file mode 100644 index 0000000..a197466 --- /dev/null +++ b/test/test_google_drive1.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.google_drive1 import GoogleDrive1 + +class TestGoogleDrive1(unittest.TestCase): + """GoogleDrive1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GoogleDrive1: + """Test GoogleDrive1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GoogleDrive1` + """ + model = GoogleDrive1() + if include_optional: + return GoogleDrive1( + config = vectorize_client.models.google_drive_config.GOOGLE_DRIVEConfig( + file_extensions = pdf, + root_parents = 'https://drive.google.com/drive/u/80728/folders/v2D2ylmgd10Z3UB6UkJSISSB512iz', + idle_time = 1.337, ) + ) + else: + return GoogleDrive1( + ) + """ + + def testGoogleDrive1(self): + """Test GoogleDrive1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_google_drive2.py b/test/test_google_drive2.py new file mode 100644 index 0000000..3060751 --- /dev/null +++ b/test/test_google_drive2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.google_drive2 import GoogleDrive2 + +class TestGoogleDrive2(unittest.TestCase): + """GoogleDrive2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GoogleDrive2: + """Test GoogleDrive2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GoogleDrive2` + """ + model = GoogleDrive2() + if include_optional: + return GoogleDrive2( + id = '', + type = 'GOOGLE_DRIVE' + ) + else: + return GoogleDrive2( + id = '', + type = 'GOOGLE_DRIVE', + ) + """ + + def testGoogleDrive2(self): + """Test GoogleDrive2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_google_drive_o_auth.py b/test/test_google_drive_o_auth.py new file mode 100644 index 0000000..c510945 --- /dev/null +++ b/test/test_google_drive_o_auth.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.google_drive_o_auth import GoogleDriveOAuth + +class TestGoogleDriveOAuth(unittest.TestCase): + """GoogleDriveOAuth unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GoogleDriveOAuth: + """Test GoogleDriveOAuth + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GoogleDriveOAuth` + """ + model = GoogleDriveOAuth() + if include_optional: + return GoogleDriveOAuth( + name = '', + type = 'GOOGLE_DRIVE_OAUTH', + config = vectorize_client.models.google_drive_oauth_config.GOOGLE_DRIVE_OAUTHConfig( + file_extensions = pdf, + idle_time = 1.337, ) + ) + else: + return GoogleDriveOAuth( + name = '', + type = 'GOOGLE_DRIVE_OAUTH', + config = vectorize_client.models.google_drive_oauth_config.GOOGLE_DRIVE_OAUTHConfig( + file_extensions = pdf, + idle_time = 1.337, ), + ) + """ + + def testGoogleDriveOAuth(self): + """Test GoogleDriveOAuth""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_google_drive_o_auth1.py b/test/test_google_drive_o_auth1.py new file mode 100644 index 0000000..3465b54 --- /dev/null +++ b/test/test_google_drive_o_auth1.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.google_drive_o_auth1 import GoogleDriveOAuth1 + +class TestGoogleDriveOAuth1(unittest.TestCase): + """GoogleDriveOAuth1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GoogleDriveOAuth1: + """Test GoogleDriveOAuth1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GoogleDriveOAuth1` + """ + model = GoogleDriveOAuth1() + if include_optional: + return GoogleDriveOAuth1( + config = vectorize_client.models.google_drive_oauth_config.GOOGLE_DRIVE_OAUTHConfig( + file_extensions = pdf, + idle_time = 1.337, ) + ) + else: + return GoogleDriveOAuth1( + ) + """ + + def testGoogleDriveOAuth1(self): + """Test GoogleDriveOAuth1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_google_drive_o_auth2.py b/test/test_google_drive_o_auth2.py new file mode 100644 index 0000000..8beff4a --- /dev/null +++ b/test/test_google_drive_o_auth2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.google_drive_o_auth2 import GoogleDriveOAuth2 + +class TestGoogleDriveOAuth2(unittest.TestCase): + """GoogleDriveOAuth2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GoogleDriveOAuth2: + """Test GoogleDriveOAuth2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GoogleDriveOAuth2` + """ + model = GoogleDriveOAuth2() + if include_optional: + return GoogleDriveOAuth2( + id = '', + type = 'GOOGLE_DRIVE_OAUTH' + ) + else: + return GoogleDriveOAuth2( + id = '', + type = 'GOOGLE_DRIVE_OAUTH', + ) + """ + + def testGoogleDriveOAuth2(self): + """Test GoogleDriveOAuth2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_google_drive_oauth_multi.py b/test/test_google_drive_oauth_multi.py new file mode 100644 index 0000000..2c1d346 --- /dev/null +++ b/test/test_google_drive_oauth_multi.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.google_drive_oauth_multi import GoogleDriveOauthMulti + +class TestGoogleDriveOauthMulti(unittest.TestCase): + """GoogleDriveOauthMulti unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GoogleDriveOauthMulti: + """Test GoogleDriveOauthMulti + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GoogleDriveOauthMulti` + """ + model = GoogleDriveOauthMulti() + if include_optional: + return GoogleDriveOauthMulti( + name = '', + type = 'GOOGLE_DRIVE_OAUTH_MULTI', + config = vectorize_client.models.google_drive_oauth_multi_config.GOOGLE_DRIVE_OAUTH_MULTIConfig( + file_extensions = pdf, + idle_time = 1.337, ) + ) + else: + return GoogleDriveOauthMulti( + name = '', + type = 'GOOGLE_DRIVE_OAUTH_MULTI', + config = vectorize_client.models.google_drive_oauth_multi_config.GOOGLE_DRIVE_OAUTH_MULTIConfig( + file_extensions = pdf, + idle_time = 1.337, ), + ) + """ + + def testGoogleDriveOauthMulti(self): + """Test GoogleDriveOauthMulti""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_google_drive_oauth_multi1.py b/test/test_google_drive_oauth_multi1.py new file mode 100644 index 0000000..d9b2f4d --- /dev/null +++ b/test/test_google_drive_oauth_multi1.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.google_drive_oauth_multi1 import GoogleDriveOauthMulti1 + +class TestGoogleDriveOauthMulti1(unittest.TestCase): + """GoogleDriveOauthMulti1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GoogleDriveOauthMulti1: + """Test GoogleDriveOauthMulti1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GoogleDriveOauthMulti1` + """ + model = GoogleDriveOauthMulti1() + if include_optional: + return GoogleDriveOauthMulti1( + config = vectorize_client.models.google_drive_oauth_multi_config.GOOGLE_DRIVE_OAUTH_MULTIConfig( + file_extensions = pdf, + idle_time = 1.337, ) + ) + else: + return GoogleDriveOauthMulti1( + ) + """ + + def testGoogleDriveOauthMulti1(self): + """Test GoogleDriveOauthMulti1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_google_drive_oauth_multi2.py b/test/test_google_drive_oauth_multi2.py new file mode 100644 index 0000000..572a497 --- /dev/null +++ b/test/test_google_drive_oauth_multi2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.google_drive_oauth_multi2 import GoogleDriveOauthMulti2 + +class TestGoogleDriveOauthMulti2(unittest.TestCase): + """GoogleDriveOauthMulti2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GoogleDriveOauthMulti2: + """Test GoogleDriveOauthMulti2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GoogleDriveOauthMulti2` + """ + model = GoogleDriveOauthMulti2() + if include_optional: + return GoogleDriveOauthMulti2( + id = '', + type = 'GOOGLE_DRIVE_OAUTH_MULTI' + ) + else: + return GoogleDriveOauthMulti2( + id = '', + type = 'GOOGLE_DRIVE_OAUTH_MULTI', + ) + """ + + def testGoogleDriveOauthMulti2(self): + """Test GoogleDriveOauthMulti2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_google_drive_oauth_multi_custom.py b/test/test_google_drive_oauth_multi_custom.py new file mode 100644 index 0000000..29f59ce --- /dev/null +++ b/test/test_google_drive_oauth_multi_custom.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.google_drive_oauth_multi_custom import GoogleDriveOauthMultiCustom + +class TestGoogleDriveOauthMultiCustom(unittest.TestCase): + """GoogleDriveOauthMultiCustom unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GoogleDriveOauthMultiCustom: + """Test GoogleDriveOauthMultiCustom + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GoogleDriveOauthMultiCustom` + """ + model = GoogleDriveOauthMultiCustom() + if include_optional: + return GoogleDriveOauthMultiCustom( + name = '', + type = 'GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM', + config = vectorize_client.models.google_drive_oauth_multi_custom_config.GOOGLE_DRIVE_OAUTH_MULTI_CUSTOMConfig( + file_extensions = pdf, + idle_time = 1.337, ) + ) + else: + return GoogleDriveOauthMultiCustom( + name = '', + type = 'GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM', + config = vectorize_client.models.google_drive_oauth_multi_custom_config.GOOGLE_DRIVE_OAUTH_MULTI_CUSTOMConfig( + file_extensions = pdf, + idle_time = 1.337, ), + ) + """ + + def testGoogleDriveOauthMultiCustom(self): + """Test GoogleDriveOauthMultiCustom""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_google_drive_oauth_multi_custom1.py b/test/test_google_drive_oauth_multi_custom1.py new file mode 100644 index 0000000..d14cd57 --- /dev/null +++ b/test/test_google_drive_oauth_multi_custom1.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.google_drive_oauth_multi_custom1 import GoogleDriveOauthMultiCustom1 + +class TestGoogleDriveOauthMultiCustom1(unittest.TestCase): + """GoogleDriveOauthMultiCustom1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GoogleDriveOauthMultiCustom1: + """Test GoogleDriveOauthMultiCustom1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GoogleDriveOauthMultiCustom1` + """ + model = GoogleDriveOauthMultiCustom1() + if include_optional: + return GoogleDriveOauthMultiCustom1( + config = vectorize_client.models.google_drive_oauth_multi_custom_config.GOOGLE_DRIVE_OAUTH_MULTI_CUSTOMConfig( + file_extensions = pdf, + idle_time = 1.337, ) + ) + else: + return GoogleDriveOauthMultiCustom1( + ) + """ + + def testGoogleDriveOauthMultiCustom1(self): + """Test GoogleDriveOauthMultiCustom1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_google_drive_oauth_multi_custom2.py b/test/test_google_drive_oauth_multi_custom2.py new file mode 100644 index 0000000..305f7f3 --- /dev/null +++ b/test/test_google_drive_oauth_multi_custom2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.google_drive_oauth_multi_custom2 import GoogleDriveOauthMultiCustom2 + +class TestGoogleDriveOauthMultiCustom2(unittest.TestCase): + """GoogleDriveOauthMultiCustom2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GoogleDriveOauthMultiCustom2: + """Test GoogleDriveOauthMultiCustom2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GoogleDriveOauthMultiCustom2` + """ + model = GoogleDriveOauthMultiCustom2() + if include_optional: + return GoogleDriveOauthMultiCustom2( + id = '', + type = 'GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM' + ) + else: + return GoogleDriveOauthMultiCustom2( + id = '', + type = 'GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM', + ) + """ + + def testGoogleDriveOauthMultiCustom2(self): + """Test GoogleDriveOauthMultiCustom2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_googledrive_auth_config.py b/test/test_googledrive_auth_config.py new file mode 100644 index 0000000..b2f93a1 --- /dev/null +++ b/test/test_googledrive_auth_config.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.googledrive_auth_config import GOOGLEDRIVEAuthConfig + +class TestGOOGLEDRIVEAuthConfig(unittest.TestCase): + """GOOGLEDRIVEAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GOOGLEDRIVEAuthConfig: + """Test GOOGLEDRIVEAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GOOGLEDRIVEAuthConfig` + """ + model = GOOGLEDRIVEAuthConfig() + if include_optional: + return GOOGLEDRIVEAuthConfig( + name = '', + service_account_json = '' + ) + else: + return GOOGLEDRIVEAuthConfig( + name = '', + service_account_json = '', + ) + """ + + def testGOOGLEDRIVEAuthConfig(self): + """Test GOOGLEDRIVEAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_googledrive_config.py b/test/test_googledrive_config.py new file mode 100644 index 0000000..296a7f6 --- /dev/null +++ b/test/test_googledrive_config.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.googledrive_config import GOOGLEDRIVEConfig + +class TestGOOGLEDRIVEConfig(unittest.TestCase): + """GOOGLEDRIVEConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GOOGLEDRIVEConfig: + """Test GOOGLEDRIVEConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GOOGLEDRIVEConfig` + """ + model = GOOGLEDRIVEConfig() + if include_optional: + return GOOGLEDRIVEConfig( + file_extensions = pdf, + root_parents = 'https://drive.google.com/drive/u/80728/folders/v2D2ylmgd10Z3UB6UkJSISSB512iz', + idle_time = 1.337 + ) + else: + return GOOGLEDRIVEConfig( + file_extensions = pdf, + ) + """ + + def testGOOGLEDRIVEConfig(self): + """Test GOOGLEDRIVEConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_googledriveoauth_auth_config.py b/test/test_googledriveoauth_auth_config.py new file mode 100644 index 0000000..6798928 --- /dev/null +++ b/test/test_googledriveoauth_auth_config.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.googledriveoauth_auth_config import GOOGLEDRIVEOAUTHAuthConfig + +class TestGOOGLEDRIVEOAUTHAuthConfig(unittest.TestCase): + """GOOGLEDRIVEOAUTHAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GOOGLEDRIVEOAUTHAuthConfig: + """Test GOOGLEDRIVEOAUTHAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GOOGLEDRIVEOAUTHAuthConfig` + """ + model = GOOGLEDRIVEOAUTHAuthConfig() + if include_optional: + return GOOGLEDRIVEOAUTHAuthConfig( + name = '', + authorized_user = '', + selection_details = '', + edited_users = '', + reconnect_users = '' + ) + else: + return GOOGLEDRIVEOAUTHAuthConfig( + name = '', + selection_details = '', + ) + """ + + def testGOOGLEDRIVEOAUTHAuthConfig(self): + """Test GOOGLEDRIVEOAUTHAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_googledriveoauth_config.py b/test/test_googledriveoauth_config.py new file mode 100644 index 0000000..7b265c7 --- /dev/null +++ b/test/test_googledriveoauth_config.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.googledriveoauth_config import GOOGLEDRIVEOAUTHConfig + +class TestGOOGLEDRIVEOAUTHConfig(unittest.TestCase): + """GOOGLEDRIVEOAUTHConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GOOGLEDRIVEOAUTHConfig: + """Test GOOGLEDRIVEOAUTHConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GOOGLEDRIVEOAUTHConfig` + """ + model = GOOGLEDRIVEOAUTHConfig() + if include_optional: + return GOOGLEDRIVEOAUTHConfig( + file_extensions = pdf, + idle_time = 1.337 + ) + else: + return GOOGLEDRIVEOAUTHConfig( + file_extensions = pdf, + ) + """ + + def testGOOGLEDRIVEOAUTHConfig(self): + """Test GOOGLEDRIVEOAUTHConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_googledriveoauthmulti_auth_config.py b/test/test_googledriveoauthmulti_auth_config.py new file mode 100644 index 0000000..71734e2 --- /dev/null +++ b/test/test_googledriveoauthmulti_auth_config.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.googledriveoauthmulti_auth_config import GOOGLEDRIVEOAUTHMULTIAuthConfig + +class TestGOOGLEDRIVEOAUTHMULTIAuthConfig(unittest.TestCase): + """GOOGLEDRIVEOAUTHMULTIAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GOOGLEDRIVEOAUTHMULTIAuthConfig: + """Test GOOGLEDRIVEOAUTHMULTIAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GOOGLEDRIVEOAUTHMULTIAuthConfig` + """ + model = GOOGLEDRIVEOAUTHMULTIAuthConfig() + if include_optional: + return GOOGLEDRIVEOAUTHMULTIAuthConfig( + name = '', + authorized_users = '', + edited_users = '', + deleted_users = '' + ) + else: + return GOOGLEDRIVEOAUTHMULTIAuthConfig( + name = '', + ) + """ + + def testGOOGLEDRIVEOAUTHMULTIAuthConfig(self): + """Test GOOGLEDRIVEOAUTHMULTIAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_googledriveoauthmulti_config.py b/test/test_googledriveoauthmulti_config.py new file mode 100644 index 0000000..2f1c859 --- /dev/null +++ b/test/test_googledriveoauthmulti_config.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.googledriveoauthmulti_config import GOOGLEDRIVEOAUTHMULTIConfig + +class TestGOOGLEDRIVEOAUTHMULTIConfig(unittest.TestCase): + """GOOGLEDRIVEOAUTHMULTIConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GOOGLEDRIVEOAUTHMULTIConfig: + """Test GOOGLEDRIVEOAUTHMULTIConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GOOGLEDRIVEOAUTHMULTIConfig` + """ + model = GOOGLEDRIVEOAUTHMULTIConfig() + if include_optional: + return GOOGLEDRIVEOAUTHMULTIConfig( + file_extensions = pdf, + idle_time = 1.337 + ) + else: + return GOOGLEDRIVEOAUTHMULTIConfig( + file_extensions = pdf, + ) + """ + + def testGOOGLEDRIVEOAUTHMULTIConfig(self): + """Test GOOGLEDRIVEOAUTHMULTIConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_googledriveoauthmulticustom_auth_config.py b/test/test_googledriveoauthmulticustom_auth_config.py new file mode 100644 index 0000000..7a8f7ba --- /dev/null +++ b/test/test_googledriveoauthmulticustom_auth_config.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.googledriveoauthmulticustom_auth_config import GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig + +class TestGOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig(unittest.TestCase): + """GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig: + """Test GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig` + """ + model = GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig() + if include_optional: + return GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig( + name = '', + oauth2_client_id = '', + oauth2_client_secret = '', + authorized_users = '', + edited_users = '', + deleted_users = '' + ) + else: + return GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig( + name = '', + oauth2_client_id = '', + oauth2_client_secret = '', + ) + """ + + def testGOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig(self): + """Test GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_googledriveoauthmulticustom_config.py b/test/test_googledriveoauthmulticustom_config.py new file mode 100644 index 0000000..f52057d --- /dev/null +++ b/test/test_googledriveoauthmulticustom_config.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.googledriveoauthmulticustom_config import GOOGLEDRIVEOAUTHMULTICUSTOMConfig + +class TestGOOGLEDRIVEOAUTHMULTICUSTOMConfig(unittest.TestCase): + """GOOGLEDRIVEOAUTHMULTICUSTOMConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GOOGLEDRIVEOAUTHMULTICUSTOMConfig: + """Test GOOGLEDRIVEOAUTHMULTICUSTOMConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GOOGLEDRIVEOAUTHMULTICUSTOMConfig` + """ + model = GOOGLEDRIVEOAUTHMULTICUSTOMConfig() + if include_optional: + return GOOGLEDRIVEOAUTHMULTICUSTOMConfig( + file_extensions = pdf, + idle_time = 1.337 + ) + else: + return GOOGLEDRIVEOAUTHMULTICUSTOMConfig( + file_extensions = pdf, + ) + """ + + def testGOOGLEDRIVEOAUTHMULTICUSTOMConfig(self): + """Test GOOGLEDRIVEOAUTHMULTICUSTOMConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_intercom.py b/test/test_intercom.py new file mode 100644 index 0000000..15d55ba --- /dev/null +++ b/test/test_intercom.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.intercom import Intercom + +class TestIntercom(unittest.TestCase): + """Intercom unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Intercom: + """Test Intercom + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Intercom` + """ + model = Intercom() + if include_optional: + return Intercom( + name = '', + type = 'INTERCOM', + config = vectorize_client.models.intercom_config.INTERCOMConfig( + created_at = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + updated_at = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + state = open, ) + ) + else: + return Intercom( + name = '', + type = 'INTERCOM', + config = vectorize_client.models.intercom_config.INTERCOMConfig( + created_at = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + updated_at = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + state = open, ), + ) + """ + + def testIntercom(self): + """Test Intercom""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_intercom1.py b/test/test_intercom1.py new file mode 100644 index 0000000..084d1b8 --- /dev/null +++ b/test/test_intercom1.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.intercom1 import Intercom1 + +class TestIntercom1(unittest.TestCase): + """Intercom1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Intercom1: + """Test Intercom1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Intercom1` + """ + model = Intercom1() + if include_optional: + return Intercom1( + config = vectorize_client.models.intercom_config.INTERCOMConfig( + created_at = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + updated_at = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + state = open, ) + ) + else: + return Intercom1( + ) + """ + + def testIntercom1(self): + """Test Intercom1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_intercom2.py b/test/test_intercom2.py new file mode 100644 index 0000000..94ed189 --- /dev/null +++ b/test/test_intercom2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.intercom2 import Intercom2 + +class TestIntercom2(unittest.TestCase): + """Intercom2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Intercom2: + """Test Intercom2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Intercom2` + """ + model = Intercom2() + if include_optional: + return Intercom2( + id = '', + type = 'INTERCOM' + ) + else: + return Intercom2( + id = '', + type = 'INTERCOM', + ) + """ + + def testIntercom2(self): + """Test Intercom2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_intercom_auth_config.py b/test/test_intercom_auth_config.py new file mode 100644 index 0000000..b0d1d0f --- /dev/null +++ b/test/test_intercom_auth_config.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.intercom_auth_config import INTERCOMAuthConfig + +class TestINTERCOMAuthConfig(unittest.TestCase): + """INTERCOMAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> INTERCOMAuthConfig: + """Test INTERCOMAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `INTERCOMAuthConfig` + """ + model = INTERCOMAuthConfig() + if include_optional: + return INTERCOMAuthConfig( + name = '', + token = '' + ) + else: + return INTERCOMAuthConfig( + name = '', + token = '', + ) + """ + + def testINTERCOMAuthConfig(self): + """Test INTERCOMAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_intercom_config.py b/test/test_intercom_config.py new file mode 100644 index 0000000..a369c52 --- /dev/null +++ b/test/test_intercom_config.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.intercom_config import INTERCOMConfig + +class TestINTERCOMConfig(unittest.TestCase): + """INTERCOMConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> INTERCOMConfig: + """Test INTERCOMConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `INTERCOMConfig` + """ + model = INTERCOMConfig() + if include_optional: + return INTERCOMConfig( + created_at = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + updated_at = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + state = open + ) + else: + return INTERCOMConfig( + created_at = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + ) + """ + + def testINTERCOMConfig(self): + """Test INTERCOMConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_metadata_extraction_strategy.py b/test/test_metadata_extraction_strategy.py new file mode 100644 index 0000000..41f9cff --- /dev/null +++ b/test/test_metadata_extraction_strategy.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.metadata_extraction_strategy import MetadataExtractionStrategy + +class TestMetadataExtractionStrategy(unittest.TestCase): + """MetadataExtractionStrategy unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MetadataExtractionStrategy: + """Test MetadataExtractionStrategy + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MetadataExtractionStrategy` + """ + model = MetadataExtractionStrategy() + if include_optional: + return MetadataExtractionStrategy( + schemas = [ + vectorize_client.models.metadata_extraction_strategy_schema.MetadataExtractionStrategySchema( + id = '', + schema = '', ) + ], + infer_schema = True + ) + else: + return MetadataExtractionStrategy( + ) + """ + + def testMetadataExtractionStrategy(self): + """Test MetadataExtractionStrategy""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_metadata_extraction_strategy_schema.py b/test/test_metadata_extraction_strategy_schema.py new file mode 100644 index 0000000..be310e1 --- /dev/null +++ b/test/test_metadata_extraction_strategy_schema.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.metadata_extraction_strategy_schema import MetadataExtractionStrategySchema + +class TestMetadataExtractionStrategySchema(unittest.TestCase): + """MetadataExtractionStrategySchema unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MetadataExtractionStrategySchema: + """Test MetadataExtractionStrategySchema + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MetadataExtractionStrategySchema` + """ + model = MetadataExtractionStrategySchema() + if include_optional: + return MetadataExtractionStrategySchema( + id = '', + var_schema = '' + ) + else: + return MetadataExtractionStrategySchema( + id = '', + var_schema = '', + ) + """ + + def testMetadataExtractionStrategySchema(self): + """Test MetadataExtractionStrategySchema""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_milvus.py b/test/test_milvus.py new file mode 100644 index 0000000..23b6421 --- /dev/null +++ b/test/test_milvus.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.milvus import Milvus + +class TestMilvus(unittest.TestCase): + """Milvus unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Milvus: + """Test Milvus + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Milvus` + """ + model = Milvus() + if include_optional: + return Milvus( + name = '', + type = 'MILVUS', + config = vectorize_client.models.milvus_config.MILVUSConfig( + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua', ) + ) + else: + return Milvus( + name = '', + type = 'MILVUS', + config = vectorize_client.models.milvus_config.MILVUSConfig( + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua', ), + ) + """ + + def testMilvus(self): + """Test Milvus""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_milvus1.py b/test/test_milvus1.py new file mode 100644 index 0000000..56ff48b --- /dev/null +++ b/test/test_milvus1.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.milvus1 import Milvus1 + +class TestMilvus1(unittest.TestCase): + """Milvus1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Milvus1: + """Test Milvus1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Milvus1` + """ + model = Milvus1() + if include_optional: + return Milvus1( + config = vectorize_client.models.milvus_config.MILVUSConfig( + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua', ) + ) + else: + return Milvus1( + ) + """ + + def testMilvus1(self): + """Test Milvus1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_milvus2.py b/test/test_milvus2.py new file mode 100644 index 0000000..ab5ae37 --- /dev/null +++ b/test/test_milvus2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.milvus2 import Milvus2 + +class TestMilvus2(unittest.TestCase): + """Milvus2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Milvus2: + """Test Milvus2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Milvus2` + """ + model = Milvus2() + if include_optional: + return Milvus2( + id = '', + type = 'MILVUS' + ) + else: + return Milvus2( + id = '', + type = 'MILVUS', + ) + """ + + def testMilvus2(self): + """Test Milvus2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_milvus_auth_config.py b/test/test_milvus_auth_config.py new file mode 100644 index 0000000..74078c2 --- /dev/null +++ b/test/test_milvus_auth_config.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.milvus_auth_config import MILVUSAuthConfig + +class TestMILVUSAuthConfig(unittest.TestCase): + """MILVUSAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MILVUSAuthConfig: + """Test MILVUSAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MILVUSAuthConfig` + """ + model = MILVUSAuthConfig() + if include_optional: + return MILVUSAuthConfig( + name = '', + url = '', + token = '', + username = '', + password = '' + ) + else: + return MILVUSAuthConfig( + name = '', + url = '', + ) + """ + + def testMILVUSAuthConfig(self): + """Test MILVUSAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_milvus_config.py b/test/test_milvus_config.py new file mode 100644 index 0000000..a057670 --- /dev/null +++ b/test/test_milvus_config.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.milvus_config import MILVUSConfig + +class TestMILVUSConfig(unittest.TestCase): + """MILVUSConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MILVUSConfig: + """Test MILVUSConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MILVUSConfig` + """ + model = MILVUSConfig() + if include_optional: + return MILVUSConfig( + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua' + ) + else: + return MILVUSConfig( + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua', + ) + """ + + def testMILVUSConfig(self): + """Test MILVUSConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_n8_n_config.py b/test/test_n8_n_config.py new file mode 100644 index 0000000..7d4f027 --- /dev/null +++ b/test/test_n8_n_config.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.n8_n_config import N8NConfig + +class TestN8NConfig(unittest.TestCase): + """N8NConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> N8NConfig: + """Test N8NConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `N8NConfig` + """ + model = N8NConfig() + if include_optional: + return N8NConfig( + account = '', + webhook_path = '', + headers = { + 'key' : '' + } + ) + else: + return N8NConfig( + account = '', + webhook_path = '', + ) + """ + + def testN8NConfig(self): + """Test N8NConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_notion.py b/test/test_notion.py new file mode 100644 index 0000000..ebe1919 --- /dev/null +++ b/test/test_notion.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.notion import Notion + +class TestNotion(unittest.TestCase): + """Notion unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Notion: + """Test Notion + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Notion` + """ + model = Notion() + if include_optional: + return Notion( + name = '', + type = 'NOTION', + config = vectorize_client.models.notion_config.NOTIONConfig( + select_resources = '', + database_ids = '', + database_names = '', + page_ids = '', + page_names = '', ) + ) + else: + return Notion( + name = '', + type = 'NOTION', + config = vectorize_client.models.notion_config.NOTIONConfig( + select_resources = '', + database_ids = '', + database_names = '', + page_ids = '', + page_names = '', ), + ) + """ + + def testNotion(self): + """Test Notion""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_notion1.py b/test/test_notion1.py new file mode 100644 index 0000000..924af9c --- /dev/null +++ b/test/test_notion1.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.notion1 import Notion1 + +class TestNotion1(unittest.TestCase): + """Notion1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Notion1: + """Test Notion1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Notion1` + """ + model = Notion1() + if include_optional: + return Notion1( + config = vectorize_client.models.notion_config.NOTIONConfig( + select_resources = '', + database_ids = '', + database_names = '', + page_ids = '', + page_names = '', ) + ) + else: + return Notion1( + ) + """ + + def testNotion1(self): + """Test Notion1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_notion2.py b/test/test_notion2.py new file mode 100644 index 0000000..151dc87 --- /dev/null +++ b/test/test_notion2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.notion2 import Notion2 + +class TestNotion2(unittest.TestCase): + """Notion2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Notion2: + """Test Notion2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Notion2` + """ + model = Notion2() + if include_optional: + return Notion2( + id = '', + type = 'NOTION' + ) + else: + return Notion2( + id = '', + type = 'NOTION', + ) + """ + + def testNotion2(self): + """Test Notion2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_notion_auth_config.py b/test/test_notion_auth_config.py new file mode 100644 index 0000000..c1c3fd8 --- /dev/null +++ b/test/test_notion_auth_config.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.notion_auth_config import NOTIONAuthConfig + +class TestNOTIONAuthConfig(unittest.TestCase): + """NOTIONAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NOTIONAuthConfig: + """Test NOTIONAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NOTIONAuthConfig` + """ + model = NOTIONAuthConfig() + if include_optional: + return NOTIONAuthConfig( + name = '', + access_token = '', + s3id = '', + edited_token = '' + ) + else: + return NOTIONAuthConfig( + name = '', + access_token = '', + ) + """ + + def testNOTIONAuthConfig(self): + """Test NOTIONAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_notion_config.py b/test/test_notion_config.py new file mode 100644 index 0000000..8be6f55 --- /dev/null +++ b/test/test_notion_config.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.notion_config import NOTIONConfig + +class TestNOTIONConfig(unittest.TestCase): + """NOTIONConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NOTIONConfig: + """Test NOTIONConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NOTIONConfig` + """ + model = NOTIONConfig() + if include_optional: + return NOTIONConfig( + select_resources = '', + database_ids = '', + database_names = '', + page_ids = '', + page_names = '' + ) + else: + return NOTIONConfig( + select_resources = '', + database_ids = '', + database_names = '', + page_ids = '', + page_names = '', + ) + """ + + def testNOTIONConfig(self): + """Test NOTIONConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_notion_oauth_multi.py b/test/test_notion_oauth_multi.py new file mode 100644 index 0000000..c9aab1f --- /dev/null +++ b/test/test_notion_oauth_multi.py @@ -0,0 +1,64 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.notion_oauth_multi import NotionOauthMulti + +class TestNotionOauthMulti(unittest.TestCase): + """NotionOauthMulti unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NotionOauthMulti: + """Test NotionOauthMulti + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NotionOauthMulti` + """ + model = NotionOauthMulti() + if include_optional: + return NotionOauthMulti( + name = '', + type = 'NOTION_OAUTH_MULTI', + config = vectorize_client.models.notion_oauth_multi_auth_config.NOTION_OAUTH_MULTIAuthConfig( + name = '', + authorized_users = '', + edited_users = '', + deleted_users = '', ) + ) + else: + return NotionOauthMulti( + name = '', + type = 'NOTION_OAUTH_MULTI', + config = vectorize_client.models.notion_oauth_multi_auth_config.NOTION_OAUTH_MULTIAuthConfig( + name = '', + authorized_users = '', + edited_users = '', + deleted_users = '', ), + ) + """ + + def testNotionOauthMulti(self): + """Test NotionOauthMulti""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_notion_oauth_multi1.py b/test/test_notion_oauth_multi1.py new file mode 100644 index 0000000..d6ec8e8 --- /dev/null +++ b/test/test_notion_oauth_multi1.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.notion_oauth_multi1 import NotionOauthMulti1 + +class TestNotionOauthMulti1(unittest.TestCase): + """NotionOauthMulti1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NotionOauthMulti1: + """Test NotionOauthMulti1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NotionOauthMulti1` + """ + model = NotionOauthMulti1() + if include_optional: + return NotionOauthMulti1( + config = vectorize_client.models.notion_oauth_multi_auth_config.NOTION_OAUTH_MULTIAuthConfig( + name = '', + authorized_users = '', + edited_users = '', + deleted_users = '', ) + ) + else: + return NotionOauthMulti1( + ) + """ + + def testNotionOauthMulti1(self): + """Test NotionOauthMulti1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_notion_oauth_multi2.py b/test/test_notion_oauth_multi2.py new file mode 100644 index 0000000..d64e433 --- /dev/null +++ b/test/test_notion_oauth_multi2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.notion_oauth_multi2 import NotionOauthMulti2 + +class TestNotionOauthMulti2(unittest.TestCase): + """NotionOauthMulti2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NotionOauthMulti2: + """Test NotionOauthMulti2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NotionOauthMulti2` + """ + model = NotionOauthMulti2() + if include_optional: + return NotionOauthMulti2( + id = '', + type = 'NOTION_OAUTH_MULTI' + ) + else: + return NotionOauthMulti2( + id = '', + type = 'NOTION_OAUTH_MULTI', + ) + """ + + def testNotionOauthMulti2(self): + """Test NotionOauthMulti2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_notion_oauth_multi_custom.py b/test/test_notion_oauth_multi_custom.py new file mode 100644 index 0000000..ee4c4fa --- /dev/null +++ b/test/test_notion_oauth_multi_custom.py @@ -0,0 +1,68 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.notion_oauth_multi_custom import NotionOauthMultiCustom + +class TestNotionOauthMultiCustom(unittest.TestCase): + """NotionOauthMultiCustom unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NotionOauthMultiCustom: + """Test NotionOauthMultiCustom + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NotionOauthMultiCustom` + """ + model = NotionOauthMultiCustom() + if include_optional: + return NotionOauthMultiCustom( + name = '', + type = 'NOTION_OAUTH_MULTI_CUSTOM', + config = vectorize_client.models.notion_oauth_multi_custom_auth_config.NOTION_OAUTH_MULTI_CUSTOMAuthConfig( + name = '', + client_id = '', + client_secret = '', + authorized_users = '', + edited_users = '', + deleted_users = '', ) + ) + else: + return NotionOauthMultiCustom( + name = '', + type = 'NOTION_OAUTH_MULTI_CUSTOM', + config = vectorize_client.models.notion_oauth_multi_custom_auth_config.NOTION_OAUTH_MULTI_CUSTOMAuthConfig( + name = '', + client_id = '', + client_secret = '', + authorized_users = '', + edited_users = '', + deleted_users = '', ), + ) + """ + + def testNotionOauthMultiCustom(self): + """Test NotionOauthMultiCustom""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_notion_oauth_multi_custom1.py b/test/test_notion_oauth_multi_custom1.py new file mode 100644 index 0000000..42f5a56 --- /dev/null +++ b/test/test_notion_oauth_multi_custom1.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.notion_oauth_multi_custom1 import NotionOauthMultiCustom1 + +class TestNotionOauthMultiCustom1(unittest.TestCase): + """NotionOauthMultiCustom1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NotionOauthMultiCustom1: + """Test NotionOauthMultiCustom1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NotionOauthMultiCustom1` + """ + model = NotionOauthMultiCustom1() + if include_optional: + return NotionOauthMultiCustom1( + config = vectorize_client.models.notion_oauth_multi_custom_auth_config.NOTION_OAUTH_MULTI_CUSTOMAuthConfig( + name = '', + client_id = '', + client_secret = '', + authorized_users = '', + edited_users = '', + deleted_users = '', ) + ) + else: + return NotionOauthMultiCustom1( + ) + """ + + def testNotionOauthMultiCustom1(self): + """Test NotionOauthMultiCustom1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_notion_oauth_multi_custom2.py b/test/test_notion_oauth_multi_custom2.py new file mode 100644 index 0000000..1567e65 --- /dev/null +++ b/test/test_notion_oauth_multi_custom2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.notion_oauth_multi_custom2 import NotionOauthMultiCustom2 + +class TestNotionOauthMultiCustom2(unittest.TestCase): + """NotionOauthMultiCustom2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NotionOauthMultiCustom2: + """Test NotionOauthMultiCustom2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NotionOauthMultiCustom2` + """ + model = NotionOauthMultiCustom2() + if include_optional: + return NotionOauthMultiCustom2( + id = '', + type = 'NOTION_OAUTH_MULTI_CUSTOM' + ) + else: + return NotionOauthMultiCustom2( + id = '', + type = 'NOTION_OAUTH_MULTI_CUSTOM', + ) + """ + + def testNotionOauthMultiCustom2(self): + """Test NotionOauthMultiCustom2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_notionoauthmulti_auth_config.py b/test/test_notionoauthmulti_auth_config.py new file mode 100644 index 0000000..344c6ee --- /dev/null +++ b/test/test_notionoauthmulti_auth_config.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.notionoauthmulti_auth_config import NOTIONOAUTHMULTIAuthConfig + +class TestNOTIONOAUTHMULTIAuthConfig(unittest.TestCase): + """NOTIONOAUTHMULTIAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NOTIONOAUTHMULTIAuthConfig: + """Test NOTIONOAUTHMULTIAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NOTIONOAUTHMULTIAuthConfig` + """ + model = NOTIONOAUTHMULTIAuthConfig() + if include_optional: + return NOTIONOAUTHMULTIAuthConfig( + name = '', + authorized_users = '', + edited_users = '', + deleted_users = '' + ) + else: + return NOTIONOAUTHMULTIAuthConfig( + name = '', + ) + """ + + def testNOTIONOAUTHMULTIAuthConfig(self): + """Test NOTIONOAUTHMULTIAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_notionoauthmulticustom_auth_config.py b/test/test_notionoauthmulticustom_auth_config.py new file mode 100644 index 0000000..b84d533 --- /dev/null +++ b/test/test_notionoauthmulticustom_auth_config.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.notionoauthmulticustom_auth_config import NOTIONOAUTHMULTICUSTOMAuthConfig + +class TestNOTIONOAUTHMULTICUSTOMAuthConfig(unittest.TestCase): + """NOTIONOAUTHMULTICUSTOMAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NOTIONOAUTHMULTICUSTOMAuthConfig: + """Test NOTIONOAUTHMULTICUSTOMAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NOTIONOAUTHMULTICUSTOMAuthConfig` + """ + model = NOTIONOAUTHMULTICUSTOMAuthConfig() + if include_optional: + return NOTIONOAUTHMULTICUSTOMAuthConfig( + name = '', + client_id = '', + client_secret = '', + authorized_users = '', + edited_users = '', + deleted_users = '' + ) + else: + return NOTIONOAUTHMULTICUSTOMAuthConfig( + name = '', + client_id = '', + client_secret = '', + ) + """ + + def testNOTIONOAUTHMULTICUSTOMAuthConfig(self): + """Test NOTIONOAUTHMULTICUSTOMAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_one_drive.py b/test/test_one_drive.py new file mode 100644 index 0000000..77671a6 --- /dev/null +++ b/test/test_one_drive.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.one_drive import OneDrive + +class TestOneDrive(unittest.TestCase): + """OneDrive unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> OneDrive: + """Test OneDrive + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `OneDrive` + """ + model = OneDrive() + if include_optional: + return OneDrive( + name = '', + type = 'ONE_DRIVE', + config = vectorize_client.models.one_drive_config.ONE_DRIVEConfig( + file_extensions = pdf, + path_prefix = '', ) + ) + else: + return OneDrive( + name = '', + type = 'ONE_DRIVE', + config = vectorize_client.models.one_drive_config.ONE_DRIVEConfig( + file_extensions = pdf, + path_prefix = '', ), + ) + """ + + def testOneDrive(self): + """Test OneDrive""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_one_drive1.py b/test/test_one_drive1.py new file mode 100644 index 0000000..a42dc38 --- /dev/null +++ b/test/test_one_drive1.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.one_drive1 import OneDrive1 + +class TestOneDrive1(unittest.TestCase): + """OneDrive1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> OneDrive1: + """Test OneDrive1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `OneDrive1` + """ + model = OneDrive1() + if include_optional: + return OneDrive1( + config = vectorize_client.models.one_drive_config.ONE_DRIVEConfig( + file_extensions = pdf, + path_prefix = '', ) + ) + else: + return OneDrive1( + ) + """ + + def testOneDrive1(self): + """Test OneDrive1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_one_drive2.py b/test/test_one_drive2.py new file mode 100644 index 0000000..eb029d3 --- /dev/null +++ b/test/test_one_drive2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.one_drive2 import OneDrive2 + +class TestOneDrive2(unittest.TestCase): + """OneDrive2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> OneDrive2: + """Test OneDrive2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `OneDrive2` + """ + model = OneDrive2() + if include_optional: + return OneDrive2( + id = '', + type = 'ONE_DRIVE' + ) + else: + return OneDrive2( + id = '', + type = 'ONE_DRIVE', + ) + """ + + def testOneDrive2(self): + """Test OneDrive2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_onedrive_auth_config.py b/test/test_onedrive_auth_config.py new file mode 100644 index 0000000..b52dc8b --- /dev/null +++ b/test/test_onedrive_auth_config.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.onedrive_auth_config import ONEDRIVEAuthConfig + +class TestONEDRIVEAuthConfig(unittest.TestCase): + """ONEDRIVEAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ONEDRIVEAuthConfig: + """Test ONEDRIVEAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ONEDRIVEAuthConfig` + """ + model = ONEDRIVEAuthConfig() + if include_optional: + return ONEDRIVEAuthConfig( + name = '', + ms_client_id = '', + ms_tenant_id = '', + ms_client_secret = '', + users = '' + ) + else: + return ONEDRIVEAuthConfig( + name = '', + ms_client_id = '', + ms_tenant_id = '', + ms_client_secret = '', + users = '', + ) + """ + + def testONEDRIVEAuthConfig(self): + """Test ONEDRIVEAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_onedrive_config.py b/test/test_onedrive_config.py new file mode 100644 index 0000000..8f949ea --- /dev/null +++ b/test/test_onedrive_config.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.onedrive_config import ONEDRIVEConfig + +class TestONEDRIVEConfig(unittest.TestCase): + """ONEDRIVEConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ONEDRIVEConfig: + """Test ONEDRIVEConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ONEDRIVEConfig` + """ + model = ONEDRIVEConfig() + if include_optional: + return ONEDRIVEConfig( + file_extensions = pdf, + path_prefix = '' + ) + else: + return ONEDRIVEConfig( + file_extensions = pdf, + ) + """ + + def testONEDRIVEConfig(self): + """Test ONEDRIVEConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_openai.py b/test/test_openai.py new file mode 100644 index 0000000..66ad1e5 --- /dev/null +++ b/test/test_openai.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.openai import Openai + +class TestOpenai(unittest.TestCase): + """Openai unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Openai: + """Test Openai + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Openai` + """ + model = Openai() + if include_optional: + return Openai( + name = '', + type = 'OPENAI', + config = vectorize_client.models.openai_auth_config.OPENAIAuthConfig( + name = '', + key = 'k', ) + ) + else: + return Openai( + name = '', + type = 'OPENAI', + config = vectorize_client.models.openai_auth_config.OPENAIAuthConfig( + name = '', + key = 'k', ), + ) + """ + + def testOpenai(self): + """Test Openai""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_openai1.py b/test/test_openai1.py new file mode 100644 index 0000000..d502fa1 --- /dev/null +++ b/test/test_openai1.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.openai1 import Openai1 + +class TestOpenai1(unittest.TestCase): + """Openai1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Openai1: + """Test Openai1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Openai1` + """ + model = Openai1() + if include_optional: + return Openai1( + config = vectorize_client.models.openai_auth_config.OPENAIAuthConfig( + name = '', + key = 'k', ) + ) + else: + return Openai1( + ) + """ + + def testOpenai1(self): + """Test Openai1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_openai2.py b/test/test_openai2.py new file mode 100644 index 0000000..be73c62 --- /dev/null +++ b/test/test_openai2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.openai2 import Openai2 + +class TestOpenai2(unittest.TestCase): + """Openai2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Openai2: + """Test Openai2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Openai2` + """ + model = Openai2() + if include_optional: + return Openai2( + id = '', + type = 'OPENAI' + ) + else: + return Openai2( + id = '', + type = 'OPENAI', + ) + """ + + def testOpenai2(self): + """Test Openai2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_openai_auth_config.py b/test/test_openai_auth_config.py new file mode 100644 index 0000000..5a3cab3 --- /dev/null +++ b/test/test_openai_auth_config.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.openai_auth_config import OPENAIAuthConfig + +class TestOPENAIAuthConfig(unittest.TestCase): + """OPENAIAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> OPENAIAuthConfig: + """Test OPENAIAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `OPENAIAuthConfig` + """ + model = OPENAIAuthConfig() + if include_optional: + return OPENAIAuthConfig( + name = '', + key = 'k' + ) + else: + return OPENAIAuthConfig( + name = '', + key = 'k', + ) + """ + + def testOPENAIAuthConfig(self): + """Test OPENAIAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_pinecone.py b/test/test_pinecone.py new file mode 100644 index 0000000..f0a4604 --- /dev/null +++ b/test/test_pinecone.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.pinecone import Pinecone + +class TestPinecone(unittest.TestCase): + """Pinecone unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Pinecone: + """Test Pinecone + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Pinecone` + """ + model = Pinecone() + if include_optional: + return Pinecone( + name = '', + type = 'PINECONE', + config = ERROR_TO_EXAMPLE_VALUE + ) + else: + return Pinecone( + name = '', + type = 'PINECONE', + config = ERROR_TO_EXAMPLE_VALUE, + ) + """ + + def testPinecone(self): + """Test Pinecone""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_pinecone1.py b/test/test_pinecone1.py new file mode 100644 index 0000000..5944dd3 --- /dev/null +++ b/test/test_pinecone1.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.pinecone1 import Pinecone1 + +class TestPinecone1(unittest.TestCase): + """Pinecone1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Pinecone1: + """Test Pinecone1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Pinecone1` + """ + model = Pinecone1() + if include_optional: + return Pinecone1( + config = ERROR_TO_EXAMPLE_VALUE + ) + else: + return Pinecone1( + ) + """ + + def testPinecone1(self): + """Test Pinecone1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_pinecone2.py b/test/test_pinecone2.py new file mode 100644 index 0000000..528fcdb --- /dev/null +++ b/test/test_pinecone2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.pinecone2 import Pinecone2 + +class TestPinecone2(unittest.TestCase): + """Pinecone2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Pinecone2: + """Test Pinecone2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Pinecone2` + """ + model = Pinecone2() + if include_optional: + return Pinecone2( + id = '', + type = 'PINECONE' + ) + else: + return Pinecone2( + id = '', + type = 'PINECONE', + ) + """ + + def testPinecone2(self): + """Test Pinecone2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_pinecone_auth_config.py b/test/test_pinecone_auth_config.py new file mode 100644 index 0000000..09673d6 --- /dev/null +++ b/test/test_pinecone_auth_config.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.pinecone_auth_config import PINECONEAuthConfig + +class TestPINECONEAuthConfig(unittest.TestCase): + """PINECONEAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> PINECONEAuthConfig: + """Test PINECONEAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `PINECONEAuthConfig` + """ + model = PINECONEAuthConfig() + if include_optional: + return PINECONEAuthConfig( + name = '', + api_key = 'k' + ) + else: + return PINECONEAuthConfig( + name = '', + api_key = 'k', + ) + """ + + def testPINECONEAuthConfig(self): + """Test PINECONEAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_pinecone_config.py b/test/test_pinecone_config.py new file mode 100644 index 0000000..b5f2a7d --- /dev/null +++ b/test/test_pinecone_config.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.pinecone_config import PINECONEConfig + +class TestPINECONEConfig(unittest.TestCase): + """PINECONEConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> PINECONEConfig: + """Test PINECONEConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `PINECONEConfig` + """ + model = PINECONEConfig() + if include_optional: + return PINECONEConfig( + index = ERROR_TO_EXAMPLE_VALUE, + namespace = ERROR_TO_EXAMPLE_VALUE + ) + else: + return PINECONEConfig( + index = ERROR_TO_EXAMPLE_VALUE, + ) + """ + + def testPINECONEConfig(self): + """Test PINECONEConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_pipeline_ai_platform_request_inner.py b/test/test_pipeline_ai_platform_request_inner.py new file mode 100644 index 0000000..0109900 --- /dev/null +++ b/test/test_pipeline_ai_platform_request_inner.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.pipeline_ai_platform_request_inner import PipelineAIPlatformRequestInner + +class TestPipelineAIPlatformRequestInner(unittest.TestCase): + """PipelineAIPlatformRequestInner unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> PipelineAIPlatformRequestInner: + """Test PipelineAIPlatformRequestInner + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `PipelineAIPlatformRequestInner` + """ + model = PipelineAIPlatformRequestInner() + if include_optional: + return PipelineAIPlatformRequestInner( + id = '', + type = 'BEDROCK' + ) + else: + return PipelineAIPlatformRequestInner( + id = '', + type = 'BEDROCK', + ) + """ + + def testPipelineAIPlatformRequestInner(self): + """Test PipelineAIPlatformRequestInner""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_pipeline_configuration_schema.py b/test/test_pipeline_configuration_schema.py new file mode 100644 index 0000000..1d6c5a1 --- /dev/null +++ b/test/test_pipeline_configuration_schema.py @@ -0,0 +1,74 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.pipeline_configuration_schema import PipelineConfigurationSchema + +class TestPipelineConfigurationSchema(unittest.TestCase): + """PipelineConfigurationSchema unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> PipelineConfigurationSchema: + """Test PipelineConfigurationSchema + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `PipelineConfigurationSchema` + """ + model = PipelineConfigurationSchema() + if include_optional: + return PipelineConfigurationSchema( + source_connectors = [ + null + ], + destination_connector = [ + null + ], + ai_platform = [ + null + ], + pipeline_name = '0', + schedule = vectorize_client.models.schedule_schema.ScheduleSchema( + type = 'manual', ) + ) + else: + return PipelineConfigurationSchema( + source_connectors = [ + null + ], + destination_connector = [ + null + ], + ai_platform = [ + null + ], + pipeline_name = '0', + schedule = vectorize_client.models.schedule_schema.ScheduleSchema( + type = 'manual', ), + ) + """ + + def testPipelineConfigurationSchema(self): + """Test PipelineConfigurationSchema""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_pipeline_destination_connector_request_inner.py b/test/test_pipeline_destination_connector_request_inner.py new file mode 100644 index 0000000..9698263 --- /dev/null +++ b/test/test_pipeline_destination_connector_request_inner.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.pipeline_destination_connector_request_inner import PipelineDestinationConnectorRequestInner + +class TestPipelineDestinationConnectorRequestInner(unittest.TestCase): + """PipelineDestinationConnectorRequestInner unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> PipelineDestinationConnectorRequestInner: + """Test PipelineDestinationConnectorRequestInner + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `PipelineDestinationConnectorRequestInner` + """ + model = PipelineDestinationConnectorRequestInner() + if include_optional: + return PipelineDestinationConnectorRequestInner( + id = '', + type = 'CAPELLA' + ) + else: + return PipelineDestinationConnectorRequestInner( + id = '', + type = 'CAPELLA', + ) + """ + + def testPipelineDestinationConnectorRequestInner(self): + """Test PipelineDestinationConnectorRequestInner""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_pipeline_events.py b/test/test_pipeline_events.py new file mode 100644 index 0000000..05e5ee9 --- /dev/null +++ b/test/test_pipeline_events.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.pipeline_events import PipelineEvents + +class TestPipelineEvents(unittest.TestCase): + """PipelineEvents unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> PipelineEvents: + """Test PipelineEvents + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `PipelineEvents` + """ + model = PipelineEvents() + if include_optional: + return PipelineEvents( + id = '', + type = '', + timestamp = '', + details = { + 'key' : null + }, + summary = { + 'key' : null + } + ) + else: + return PipelineEvents( + id = '', + type = '', + timestamp = '', + ) + """ + + def testPipelineEvents(self): + """Test PipelineEvents""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_pipeline_list_summary.py b/test/test_pipeline_list_summary.py new file mode 100644 index 0000000..e2b5b44 --- /dev/null +++ b/test/test_pipeline_list_summary.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.pipeline_list_summary import PipelineListSummary + +class TestPipelineListSummary(unittest.TestCase): + """PipelineListSummary unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> PipelineListSummary: + """Test PipelineListSummary + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `PipelineListSummary` + """ + model = PipelineListSummary() + if include_optional: + return PipelineListSummary( + id = '', + name = '', + document_count = 1.337, + source_connector_auth_ids = [ + '' + ], + destination_connector_auth_ids = [ + '' + ], + ai_platform_auth_ids = [ + '' + ], + source_connector_types = [ + '' + ], + destination_connector_types = [ + '' + ], + ai_platform_types = [ + '' + ], + created_at = '', + created_by = '', + status = '', + config_doc = { + 'key' : null + } + ) + else: + return PipelineListSummary( + id = '', + name = '', + document_count = 1.337, + source_connector_auth_ids = [ + '' + ], + destination_connector_auth_ids = [ + '' + ], + ai_platform_auth_ids = [ + '' + ], + source_connector_types = [ + '' + ], + destination_connector_types = [ + '' + ], + ai_platform_types = [ + '' + ], + created_at = '', + created_by = '', + ) + """ + + def testPipelineListSummary(self): + """Test PipelineListSummary""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_pipeline_metrics.py b/test/test_pipeline_metrics.py new file mode 100644 index 0000000..43c82d0 --- /dev/null +++ b/test/test_pipeline_metrics.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.pipeline_metrics import PipelineMetrics + +class TestPipelineMetrics(unittest.TestCase): + """PipelineMetrics unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> PipelineMetrics: + """Test PipelineMetrics + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `PipelineMetrics` + """ + model = PipelineMetrics() + if include_optional: + return PipelineMetrics( + timestamp = '', + new_objects = 1.337, + changed_objects = 1.337, + deleted_objects = 1.337 + ) + else: + return PipelineMetrics( + timestamp = '', + new_objects = 1.337, + changed_objects = 1.337, + deleted_objects = 1.337, + ) + """ + + def testPipelineMetrics(self): + """Test PipelineMetrics""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_pipeline_source_connector_request_inner.py b/test/test_pipeline_source_connector_request_inner.py new file mode 100644 index 0000000..4985746 --- /dev/null +++ b/test/test_pipeline_source_connector_request_inner.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.pipeline_source_connector_request_inner import PipelineSourceConnectorRequestInner + +class TestPipelineSourceConnectorRequestInner(unittest.TestCase): + """PipelineSourceConnectorRequestInner unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> PipelineSourceConnectorRequestInner: + """Test PipelineSourceConnectorRequestInner + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `PipelineSourceConnectorRequestInner` + """ + model = PipelineSourceConnectorRequestInner() + if include_optional: + return PipelineSourceConnectorRequestInner( + id = '', + type = 'AWS_S3' + ) + else: + return PipelineSourceConnectorRequestInner( + id = '', + type = 'AWS_S3', + ) + """ + + def testPipelineSourceConnectorRequestInner(self): + """Test PipelineSourceConnectorRequestInner""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_pipeline_summary.py b/test/test_pipeline_summary.py new file mode 100644 index 0000000..4ffa1f8 --- /dev/null +++ b/test/test_pipeline_summary.py @@ -0,0 +1,196 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.pipeline_summary import PipelineSummary + +class TestPipelineSummary(unittest.TestCase): + """PipelineSummary unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> PipelineSummary: + """Test PipelineSummary + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `PipelineSummary` + """ + model = PipelineSummary() + if include_optional: + return PipelineSummary( + id = '', + name = '', + document_count = 1.337, + source_connector_auth_ids = [ + '' + ], + destination_connector_auth_ids = [ + '' + ], + ai_platform_auth_ids = [ + '' + ], + source_connector_types = [ + '' + ], + destination_connector_types = [ + '' + ], + ai_platform_types = [ + '' + ], + created_at = '', + created_by = '', + status = '', + config_doc = { + 'key' : null + }, + source_connectors = [ + vectorize_client.models.source_connector.SourceConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ], + destination_connectors = [ + vectorize_client.models.destination_connector.DestinationConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ], + ai_platforms = [ + vectorize_client.models.ai_platform.AIPlatform( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ] + ) + else: + return PipelineSummary( + id = '', + name = '', + document_count = 1.337, + source_connector_auth_ids = [ + '' + ], + destination_connector_auth_ids = [ + '' + ], + ai_platform_auth_ids = [ + '' + ], + source_connector_types = [ + '' + ], + destination_connector_types = [ + '' + ], + ai_platform_types = [ + '' + ], + created_at = '', + created_by = '', + source_connectors = [ + vectorize_client.models.source_connector.SourceConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ], + destination_connectors = [ + vectorize_client.models.destination_connector.DestinationConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ], + ai_platforms = [ + vectorize_client.models.ai_platform.AIPlatform( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ) + ], + ) + """ + + def testPipelineSummary(self): + """Test PipelineSummary""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_pipelines_api.py b/test/test_pipelines_api.py new file mode 100644 index 0000000..527a454 --- /dev/null +++ b/test/test_pipelines_api.py @@ -0,0 +1,108 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.api.pipelines_api import PipelinesApi + + +class TestPipelinesApi(unittest.TestCase): + """PipelinesApi unit test stubs""" + + def setUp(self) -> None: + self.api = PipelinesApi() + + def tearDown(self) -> None: + pass + + def test_create_pipeline(self) -> None: + """Test case for create_pipeline + + Create a new pipeline + """ + pass + + def test_delete_pipeline(self) -> None: + """Test case for delete_pipeline + + Delete a pipeline + """ + pass + + def test_get_deep_research_result(self) -> None: + """Test case for get_deep_research_result + + Get deep research result + """ + pass + + def test_get_pipeline(self) -> None: + """Test case for get_pipeline + + Get a pipeline + """ + pass + + def test_get_pipeline_events(self) -> None: + """Test case for get_pipeline_events + + Get pipeline events + """ + pass + + def test_get_pipeline_metrics(self) -> None: + """Test case for get_pipeline_metrics + + Get pipeline metrics + """ + pass + + def test_get_pipelines(self) -> None: + """Test case for get_pipelines + + Get all pipelines + """ + pass + + def test_retrieve_documents(self) -> None: + """Test case for retrieve_documents + + Retrieve documents from a pipeline + """ + pass + + def test_start_deep_research(self) -> None: + """Test case for start_deep_research + + Start a deep research + """ + pass + + def test_start_pipeline(self) -> None: + """Test case for start_pipeline + + Start a pipeline + """ + pass + + def test_stop_pipeline(self) -> None: + """Test case for stop_pipeline + + Stop a pipeline + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_postgresql.py b/test/test_postgresql.py new file mode 100644 index 0000000..215569f --- /dev/null +++ b/test/test_postgresql.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.postgresql import Postgresql + +class TestPostgresql(unittest.TestCase): + """Postgresql unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Postgresql: + """Test Postgresql + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Postgresql` + """ + model = Postgresql() + if include_optional: + return Postgresql( + name = '', + type = 'POSTGRESQL', + config = vectorize_client.models.postgresql_config.POSTGRESQLConfig( + table = 'jUR,Z#}eta.3mh2lcafqw3zheseh1', ) + ) + else: + return Postgresql( + name = '', + type = 'POSTGRESQL', + config = vectorize_client.models.postgresql_config.POSTGRESQLConfig( + table = 'jUR,Z#}eta.3mh2lcafqw3zheseh1', ), + ) + """ + + def testPostgresql(self): + """Test Postgresql""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_postgresql1.py b/test/test_postgresql1.py new file mode 100644 index 0000000..5b2223f --- /dev/null +++ b/test/test_postgresql1.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.postgresql1 import Postgresql1 + +class TestPostgresql1(unittest.TestCase): + """Postgresql1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Postgresql1: + """Test Postgresql1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Postgresql1` + """ + model = Postgresql1() + if include_optional: + return Postgresql1( + config = vectorize_client.models.postgresql_config.POSTGRESQLConfig( + table = 'jUR,Z#}eta.3mh2lcafqw3zheseh1', ) + ) + else: + return Postgresql1( + ) + """ + + def testPostgresql1(self): + """Test Postgresql1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_postgresql2.py b/test/test_postgresql2.py new file mode 100644 index 0000000..4f25ce9 --- /dev/null +++ b/test/test_postgresql2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.postgresql2 import Postgresql2 + +class TestPostgresql2(unittest.TestCase): + """Postgresql2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Postgresql2: + """Test Postgresql2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Postgresql2` + """ + model = Postgresql2() + if include_optional: + return Postgresql2( + id = '', + type = 'POSTGRESQL' + ) + else: + return Postgresql2( + id = '', + type = 'POSTGRESQL', + ) + """ + + def testPostgresql2(self): + """Test Postgresql2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_postgresql_auth_config.py b/test/test_postgresql_auth_config.py new file mode 100644 index 0000000..9fee382 --- /dev/null +++ b/test/test_postgresql_auth_config.py @@ -0,0 +1,61 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.postgresql_auth_config import POSTGRESQLAuthConfig + +class TestPOSTGRESQLAuthConfig(unittest.TestCase): + """POSTGRESQLAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> POSTGRESQLAuthConfig: + """Test POSTGRESQLAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `POSTGRESQLAuthConfig` + """ + model = POSTGRESQLAuthConfig() + if include_optional: + return POSTGRESQLAuthConfig( + name = '', + host = '', + port = 1.337, + database = '', + username = '', + password = '' + ) + else: + return POSTGRESQLAuthConfig( + name = '', + host = '', + database = '', + username = '', + password = '', + ) + """ + + def testPOSTGRESQLAuthConfig(self): + """Test POSTGRESQLAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_postgresql_config.py b/test/test_postgresql_config.py new file mode 100644 index 0000000..36c7ef9 --- /dev/null +++ b/test/test_postgresql_config.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.postgresql_config import POSTGRESQLConfig + +class TestPOSTGRESQLConfig(unittest.TestCase): + """POSTGRESQLConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> POSTGRESQLConfig: + """Test POSTGRESQLConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `POSTGRESQLConfig` + """ + model = POSTGRESQLConfig() + if include_optional: + return POSTGRESQLConfig( + table = 'jUR,Z#}eta.3mh2lcafqw3zheseh1' + ) + else: + return POSTGRESQLConfig( + table = 'jUR,Z#}eta.3mh2lcafqw3zheseh1', + ) + """ + + def testPOSTGRESQLConfig(self): + """Test POSTGRESQLConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_qdrant.py b/test/test_qdrant.py new file mode 100644 index 0000000..570ac8b --- /dev/null +++ b/test/test_qdrant.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.qdrant import Qdrant + +class TestQdrant(unittest.TestCase): + """Qdrant unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Qdrant: + """Test Qdrant + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Qdrant` + """ + model = Qdrant() + if include_optional: + return Qdrant( + name = '', + type = 'QDRANT', + config = vectorize_client.models.qdrant_config.QDRANTConfig( + collection = 'zBAMDTMv2D2ylmgd10Z3UB', ) + ) + else: + return Qdrant( + name = '', + type = 'QDRANT', + config = vectorize_client.models.qdrant_config.QDRANTConfig( + collection = 'zBAMDTMv2D2ylmgd10Z3UB', ), + ) + """ + + def testQdrant(self): + """Test Qdrant""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_qdrant1.py b/test/test_qdrant1.py new file mode 100644 index 0000000..990996f --- /dev/null +++ b/test/test_qdrant1.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.qdrant1 import Qdrant1 + +class TestQdrant1(unittest.TestCase): + """Qdrant1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Qdrant1: + """Test Qdrant1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Qdrant1` + """ + model = Qdrant1() + if include_optional: + return Qdrant1( + config = vectorize_client.models.qdrant_config.QDRANTConfig( + collection = 'zBAMDTMv2D2ylmgd10Z3UB', ) + ) + else: + return Qdrant1( + ) + """ + + def testQdrant1(self): + """Test Qdrant1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_qdrant2.py b/test/test_qdrant2.py new file mode 100644 index 0000000..b3c0fa9 --- /dev/null +++ b/test/test_qdrant2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.qdrant2 import Qdrant2 + +class TestQdrant2(unittest.TestCase): + """Qdrant2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Qdrant2: + """Test Qdrant2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Qdrant2` + """ + model = Qdrant2() + if include_optional: + return Qdrant2( + id = '', + type = 'QDRANT' + ) + else: + return Qdrant2( + id = '', + type = 'QDRANT', + ) + """ + + def testQdrant2(self): + """Test Qdrant2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_qdrant_auth_config.py b/test/test_qdrant_auth_config.py new file mode 100644 index 0000000..80494e6 --- /dev/null +++ b/test/test_qdrant_auth_config.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.qdrant_auth_config import QDRANTAuthConfig + +class TestQDRANTAuthConfig(unittest.TestCase): + """QDRANTAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> QDRANTAuthConfig: + """Test QDRANTAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `QDRANTAuthConfig` + """ + model = QDRANTAuthConfig() + if include_optional: + return QDRANTAuthConfig( + name = '', + host = '', + api_key = 'k' + ) + else: + return QDRANTAuthConfig( + name = '', + host = '', + api_key = 'k', + ) + """ + + def testQDRANTAuthConfig(self): + """Test QDRANTAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_qdrant_config.py b/test/test_qdrant_config.py new file mode 100644 index 0000000..860644a --- /dev/null +++ b/test/test_qdrant_config.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.qdrant_config import QDRANTConfig + +class TestQDRANTConfig(unittest.TestCase): + """QDRANTConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> QDRANTConfig: + """Test QDRANTConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `QDRANTConfig` + """ + model = QDRANTConfig() + if include_optional: + return QDRANTConfig( + collection = 'zBAMDTMv2D2ylmgd10Z3UB' + ) + else: + return QDRANTConfig( + collection = 'zBAMDTMv2D2ylmgd10Z3UB', + ) + """ + + def testQDRANTConfig(self): + """Test QDRANTConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_remove_user_from_source_connector_request.py b/test/test_remove_user_from_source_connector_request.py new file mode 100644 index 0000000..54d12be --- /dev/null +++ b/test/test_remove_user_from_source_connector_request.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.remove_user_from_source_connector_request import RemoveUserFromSourceConnectorRequest + +class TestRemoveUserFromSourceConnectorRequest(unittest.TestCase): + """RemoveUserFromSourceConnectorRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RemoveUserFromSourceConnectorRequest: + """Test RemoveUserFromSourceConnectorRequest + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RemoveUserFromSourceConnectorRequest` + """ + model = RemoveUserFromSourceConnectorRequest() + if include_optional: + return RemoveUserFromSourceConnectorRequest( + user_id = '' + ) + else: + return RemoveUserFromSourceConnectorRequest( + user_id = '', + ) + """ + + def testRemoveUserFromSourceConnectorRequest(self): + """Test RemoveUserFromSourceConnectorRequest""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_remove_user_from_source_connector_response.py b/test/test_remove_user_from_source_connector_response.py new file mode 100644 index 0000000..6135534 --- /dev/null +++ b/test/test_remove_user_from_source_connector_response.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.remove_user_from_source_connector_response import RemoveUserFromSourceConnectorResponse + +class TestRemoveUserFromSourceConnectorResponse(unittest.TestCase): + """RemoveUserFromSourceConnectorResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RemoveUserFromSourceConnectorResponse: + """Test RemoveUserFromSourceConnectorResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RemoveUserFromSourceConnectorResponse` + """ + model = RemoveUserFromSourceConnectorResponse() + if include_optional: + return RemoveUserFromSourceConnectorResponse( + message = '' + ) + else: + return RemoveUserFromSourceConnectorResponse( + message = '', + ) + """ + + def testRemoveUserFromSourceConnectorResponse(self): + """Test RemoveUserFromSourceConnectorResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_retrieve_context.py b/test/test_retrieve_context.py new file mode 100644 index 0000000..dc7aca6 --- /dev/null +++ b/test/test_retrieve_context.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.retrieve_context import RetrieveContext + +class TestRetrieveContext(unittest.TestCase): + """RetrieveContext unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RetrieveContext: + """Test RetrieveContext + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RetrieveContext` + """ + model = RetrieveContext() + if include_optional: + return RetrieveContext( + messages = [ + vectorize_client.models.retrieve_context_message.RetrieveContextMessage( + role = '', + content = '', ) + ] + ) + else: + return RetrieveContext( + messages = [ + vectorize_client.models.retrieve_context_message.RetrieveContextMessage( + role = '', + content = '', ) + ], + ) + """ + + def testRetrieveContext(self): + """Test RetrieveContext""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_retrieve_context_message.py b/test/test_retrieve_context_message.py new file mode 100644 index 0000000..65ad2d6 --- /dev/null +++ b/test/test_retrieve_context_message.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.retrieve_context_message import RetrieveContextMessage + +class TestRetrieveContextMessage(unittest.TestCase): + """RetrieveContextMessage unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RetrieveContextMessage: + """Test RetrieveContextMessage + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RetrieveContextMessage` + """ + model = RetrieveContextMessage() + if include_optional: + return RetrieveContextMessage( + role = '', + content = '' + ) + else: + return RetrieveContextMessage( + role = '', + content = '', + ) + """ + + def testRetrieveContextMessage(self): + """Test RetrieveContextMessage""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_retrieve_documents_request.py b/test/test_retrieve_documents_request.py new file mode 100644 index 0000000..3f87296 --- /dev/null +++ b/test/test_retrieve_documents_request.py @@ -0,0 +1,74 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.retrieve_documents_request import RetrieveDocumentsRequest + +class TestRetrieveDocumentsRequest(unittest.TestCase): + """RetrieveDocumentsRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RetrieveDocumentsRequest: + """Test RetrieveDocumentsRequest + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RetrieveDocumentsRequest` + """ + model = RetrieveDocumentsRequest() + if include_optional: + return RetrieveDocumentsRequest( + question = '', + num_results = 1, + rerank = True, + metadata_filters = [ + { + 'key' : null + } + ], + context = vectorize_client.models.retrieve_context.RetrieveContext( + messages = [ + vectorize_client.models.retrieve_context_message.RetrieveContextMessage( + role = '', + content = '', ) + ], ), + advanced_query = vectorize_client.models.advanced_query.AdvancedQuery( + mode = 'text', + text_fields = [ + '' + ], + match_type = 'match', + text_boost = 1.337, + filters = vectorize_client.models.filters.filters(), ) + ) + else: + return RetrieveDocumentsRequest( + question = '', + num_results = 1, + ) + """ + + def testRetrieveDocumentsRequest(self): + """Test RetrieveDocumentsRequest""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_retrieve_documents_response.py b/test/test_retrieve_documents_response.py new file mode 100644 index 0000000..62d8f80 --- /dev/null +++ b/test/test_retrieve_documents_response.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.retrieve_documents_response import RetrieveDocumentsResponse + +class TestRetrieveDocumentsResponse(unittest.TestCase): + """RetrieveDocumentsResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RetrieveDocumentsResponse: + """Test RetrieveDocumentsResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RetrieveDocumentsResponse` + """ + model = RetrieveDocumentsResponse() + if include_optional: + return RetrieveDocumentsResponse( + question = '', + documents = [ + { } + ], + average_relevancy = 1.337, + ndcg = 1.337 + ) + else: + return RetrieveDocumentsResponse( + question = '', + documents = [ + { } + ], + average_relevancy = 1.337, + ndcg = 1.337, + ) + """ + + def testRetrieveDocumentsResponse(self): + """Test RetrieveDocumentsResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_schedule_schema.py b/test/test_schedule_schema.py new file mode 100644 index 0000000..f2acfd5 --- /dev/null +++ b/test/test_schedule_schema.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.schedule_schema import ScheduleSchema + +class TestScheduleSchema(unittest.TestCase): + """ScheduleSchema unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ScheduleSchema: + """Test ScheduleSchema + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ScheduleSchema` + """ + model = ScheduleSchema() + if include_optional: + return ScheduleSchema( + type = 'manual' + ) + else: + return ScheduleSchema( + type = 'manual', + ) + """ + + def testScheduleSchema(self): + """Test ScheduleSchema""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_schedule_schema_type.py b/test/test_schedule_schema_type.py new file mode 100644 index 0000000..4da55a4 --- /dev/null +++ b/test/test_schedule_schema_type.py @@ -0,0 +1,33 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.schedule_schema_type import ScheduleSchemaType + +class TestScheduleSchemaType(unittest.TestCase): + """ScheduleSchemaType unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def testScheduleSchemaType(self): + """Test ScheduleSchemaType""" + # inst = ScheduleSchemaType() + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_sharepoint.py b/test/test_sharepoint.py new file mode 100644 index 0000000..d345c05 --- /dev/null +++ b/test/test_sharepoint.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.sharepoint import Sharepoint + +class TestSharepoint(unittest.TestCase): + """Sharepoint unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Sharepoint: + """Test Sharepoint + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Sharepoint` + """ + model = Sharepoint() + if include_optional: + return Sharepoint( + name = '', + type = 'SHAREPOINT', + config = vectorize_client.models.sharepoint_config.SHAREPOINTConfig( + file_extensions = pdf, + sites = 'UR,r8NBY08qY UboV 2Ow7qrz 0IL3 fR', ) + ) + else: + return Sharepoint( + name = '', + type = 'SHAREPOINT', + config = vectorize_client.models.sharepoint_config.SHAREPOINTConfig( + file_extensions = pdf, + sites = 'UR,r8NBY08qY UboV 2Ow7qrz 0IL3 fR', ), + ) + """ + + def testSharepoint(self): + """Test Sharepoint""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_sharepoint1.py b/test/test_sharepoint1.py new file mode 100644 index 0000000..df0076f --- /dev/null +++ b/test/test_sharepoint1.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.sharepoint1 import Sharepoint1 + +class TestSharepoint1(unittest.TestCase): + """Sharepoint1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Sharepoint1: + """Test Sharepoint1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Sharepoint1` + """ + model = Sharepoint1() + if include_optional: + return Sharepoint1( + config = vectorize_client.models.sharepoint_config.SHAREPOINTConfig( + file_extensions = pdf, + sites = 'UR,r8NBY08qY UboV 2Ow7qrz 0IL3 fR', ) + ) + else: + return Sharepoint1( + ) + """ + + def testSharepoint1(self): + """Test Sharepoint1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_sharepoint2.py b/test/test_sharepoint2.py new file mode 100644 index 0000000..7bd70d3 --- /dev/null +++ b/test/test_sharepoint2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.sharepoint2 import Sharepoint2 + +class TestSharepoint2(unittest.TestCase): + """Sharepoint2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Sharepoint2: + """Test Sharepoint2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Sharepoint2` + """ + model = Sharepoint2() + if include_optional: + return Sharepoint2( + id = '', + type = 'SHAREPOINT' + ) + else: + return Sharepoint2( + id = '', + type = 'SHAREPOINT', + ) + """ + + def testSharepoint2(self): + """Test Sharepoint2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_sharepoint_auth_config.py b/test/test_sharepoint_auth_config.py new file mode 100644 index 0000000..cc3eab7 --- /dev/null +++ b/test/test_sharepoint_auth_config.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.sharepoint_auth_config import SHAREPOINTAuthConfig + +class TestSHAREPOINTAuthConfig(unittest.TestCase): + """SHAREPOINTAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SHAREPOINTAuthConfig: + """Test SHAREPOINTAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SHAREPOINTAuthConfig` + """ + model = SHAREPOINTAuthConfig() + if include_optional: + return SHAREPOINTAuthConfig( + name = '', + ms_client_id = '', + ms_tenant_id = '', + ms_client_secret = '' + ) + else: + return SHAREPOINTAuthConfig( + name = '', + ms_client_id = '', + ms_tenant_id = '', + ms_client_secret = '', + ) + """ + + def testSHAREPOINTAuthConfig(self): + """Test SHAREPOINTAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_sharepoint_config.py b/test/test_sharepoint_config.py new file mode 100644 index 0000000..8514093 --- /dev/null +++ b/test/test_sharepoint_config.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.sharepoint_config import SHAREPOINTConfig + +class TestSHAREPOINTConfig(unittest.TestCase): + """SHAREPOINTConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SHAREPOINTConfig: + """Test SHAREPOINTConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SHAREPOINTConfig` + """ + model = SHAREPOINTConfig() + if include_optional: + return SHAREPOINTConfig( + file_extensions = pdf, + sites = 'UR,r8NBY08qY UboV 2Ow7qrz 0IL3 fR' + ) + else: + return SHAREPOINTConfig( + file_extensions = pdf, + ) + """ + + def testSHAREPOINTConfig(self): + """Test SHAREPOINTConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_singlestore.py b/test/test_singlestore.py new file mode 100644 index 0000000..486180a --- /dev/null +++ b/test/test_singlestore.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.singlestore import Singlestore + +class TestSinglestore(unittest.TestCase): + """Singlestore unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Singlestore: + """Test Singlestore + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Singlestore` + """ + model = Singlestore() + if include_optional: + return Singlestore( + name = '', + type = 'SINGLESTORE', + config = vectorize_client.models.singlestore_config.SINGLESTOREConfig( + table = 'jUR,Z#}euy2zmetozkhdomha2bae4', ) + ) + else: + return Singlestore( + name = '', + type = 'SINGLESTORE', + config = vectorize_client.models.singlestore_config.SINGLESTOREConfig( + table = 'jUR,Z#}euy2zmetozkhdomha2bae4', ), + ) + """ + + def testSinglestore(self): + """Test Singlestore""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_singlestore1.py b/test/test_singlestore1.py new file mode 100644 index 0000000..f69a391 --- /dev/null +++ b/test/test_singlestore1.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.singlestore1 import Singlestore1 + +class TestSinglestore1(unittest.TestCase): + """Singlestore1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Singlestore1: + """Test Singlestore1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Singlestore1` + """ + model = Singlestore1() + if include_optional: + return Singlestore1( + config = vectorize_client.models.singlestore_config.SINGLESTOREConfig( + table = 'jUR,Z#}euy2zmetozkhdomha2bae4', ) + ) + else: + return Singlestore1( + ) + """ + + def testSinglestore1(self): + """Test Singlestore1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_singlestore2.py b/test/test_singlestore2.py new file mode 100644 index 0000000..0a7219d --- /dev/null +++ b/test/test_singlestore2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.singlestore2 import Singlestore2 + +class TestSinglestore2(unittest.TestCase): + """Singlestore2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Singlestore2: + """Test Singlestore2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Singlestore2` + """ + model = Singlestore2() + if include_optional: + return Singlestore2( + id = '', + type = 'SINGLESTORE' + ) + else: + return Singlestore2( + id = '', + type = 'SINGLESTORE', + ) + """ + + def testSinglestore2(self): + """Test Singlestore2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_singlestore_auth_config.py b/test/test_singlestore_auth_config.py new file mode 100644 index 0000000..656ef55 --- /dev/null +++ b/test/test_singlestore_auth_config.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.singlestore_auth_config import SINGLESTOREAuthConfig + +class TestSINGLESTOREAuthConfig(unittest.TestCase): + """SINGLESTOREAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SINGLESTOREAuthConfig: + """Test SINGLESTOREAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SINGLESTOREAuthConfig` + """ + model = SINGLESTOREAuthConfig() + if include_optional: + return SINGLESTOREAuthConfig( + name = '', + host = '', + port = 1.337, + database = '', + username = '', + password = '' + ) + else: + return SINGLESTOREAuthConfig( + name = '', + host = '', + port = 1.337, + database = '', + username = '', + password = '', + ) + """ + + def testSINGLESTOREAuthConfig(self): + """Test SINGLESTOREAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_singlestore_config.py b/test/test_singlestore_config.py new file mode 100644 index 0000000..cb8ca94 --- /dev/null +++ b/test/test_singlestore_config.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.singlestore_config import SINGLESTOREConfig + +class TestSINGLESTOREConfig(unittest.TestCase): + """SINGLESTOREConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SINGLESTOREConfig: + """Test SINGLESTOREConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SINGLESTOREConfig` + """ + model = SINGLESTOREConfig() + if include_optional: + return SINGLESTOREConfig( + table = 'jUR,Z#}euy2zmetozkhdomha2bae4' + ) + else: + return SINGLESTOREConfig( + table = 'jUR,Z#}euy2zmetozkhdomha2bae4', + ) + """ + + def testSINGLESTOREConfig(self): + """Test SINGLESTOREConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_source_connector.py b/test/test_source_connector.py new file mode 100644 index 0000000..b69671c --- /dev/null +++ b/test/test_source_connector.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.source_connector import SourceConnector + +class TestSourceConnector(unittest.TestCase): + """SourceConnector unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SourceConnector: + """Test SourceConnector + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SourceConnector` + """ + model = SourceConnector() + if include_optional: + return SourceConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '' + ) + else: + return SourceConnector( + id = '', + type = '', + name = '', + ) + """ + + def testSourceConnector(self): + """Test SourceConnector""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_source_connector_input.py b/test/test_source_connector_input.py new file mode 100644 index 0000000..584e97e --- /dev/null +++ b/test/test_source_connector_input.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.source_connector_input import SourceConnectorInput + +class TestSourceConnectorInput(unittest.TestCase): + """SourceConnectorInput unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SourceConnectorInput: + """Test SourceConnectorInput + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SourceConnectorInput` + """ + model = SourceConnectorInput() + if include_optional: + return SourceConnectorInput( + id = '', + type = 'AWS_S3', + config = None + ) + else: + return SourceConnectorInput( + id = '', + type = 'AWS_S3', + config = None, + ) + """ + + def testSourceConnectorInput(self): + """Test SourceConnectorInput""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_source_connector_input_config.py b/test/test_source_connector_input_config.py new file mode 100644 index 0000000..f0fee6b --- /dev/null +++ b/test/test_source_connector_input_config.py @@ -0,0 +1,119 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.source_connector_input_config import SourceConnectorInputConfig + +class TestSourceConnectorInputConfig(unittest.TestCase): + """SourceConnectorInputConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SourceConnectorInputConfig: + """Test SourceConnectorInputConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SourceConnectorInputConfig` + """ + model = SourceConnectorInputConfig() + if include_optional: + return SourceConnectorInputConfig( + file_extensions = pdf, + idle_time = 1, + recursive = True, + path_prefix = '', + path_metadata_regex = '', + path_regex_group_names = '', + spaces = '', + root_parents = 'https://drive.google.com/drive/u/80728/folders/v2D2ylmgd10Z3UB6UkJSISSB512iz', + emoji = '', + author = '', + ignore_author = '', + limit = 1, + thread_message_inclusion = 'ALL', + filter_logic = 'AND', + thread_message_mode = 'CONCATENATE', + endpoint = 'Crawl', + request = vectorize_client.models.request.request(), + created_at = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + updated_at = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + state = open, + select_resources = '', + database_ids = '', + database_names = '', + page_ids = '', + page_names = '', + sites = 'UR,r8NBY08qY UboV 2Ow7qrz 0IL3 fR', + allowed_domains_opt = '', + forbidden_paths = '/BAMDT', + min_time_between_requests = 1.337, + max_error_count = 1.337, + max_urls = 1.337, + max_depth = 1.337, + reindex_interval_seconds = 1.337, + repositories = 'G/WzyBAw2ZuufUOHOEhA8IcFQXnuaZcdyyvKX7HzK', + include_pull_requests = True, + pull_request_status = 'all', + pull_request_labels = '', + include_issues = True, + issue_status = 'all', + issue_labels = '', + max_items = 1.337, + created_after = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + start_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + end_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + title_filter_type = 'AND', + title_filter = '', + participant_filter_type = 'AND', + participant_filter = '', + max_meetings = 1.337 + ) + else: + return SourceConnectorInputConfig( + file_extensions = pdf, + idle_time = 1, + spaces = '', + endpoint = 'Crawl', + request = vectorize_client.models.request.request(), + created_at = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + select_resources = '', + database_ids = '', + database_names = '', + page_ids = '', + page_names = '', + repositories = 'G/WzyBAw2ZuufUOHOEhA8IcFQXnuaZcdyyvKX7HzK', + include_pull_requests = True, + pull_request_status = 'all', + include_issues = True, + issue_status = 'all', + max_items = 1.337, + start_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + title_filter_type = 'AND', + participant_filter_type = 'AND', + ) + """ + + def testSourceConnectorInputConfig(self): + """Test SourceConnectorInputConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_source_connector_schema.py b/test/test_source_connector_schema.py new file mode 100644 index 0000000..3b0dae8 --- /dev/null +++ b/test/test_source_connector_schema.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.source_connector_schema import SourceConnectorSchema + +class TestSourceConnectorSchema(unittest.TestCase): + """SourceConnectorSchema unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SourceConnectorSchema: + """Test SourceConnectorSchema + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SourceConnectorSchema` + """ + model = SourceConnectorSchema() + if include_optional: + return SourceConnectorSchema( + id = '', + type = 'AWS_S3', + config = { + 'key' : null + } + ) + else: + return SourceConnectorSchema( + id = '', + type = 'AWS_S3', + config = { + 'key' : null + }, + ) + """ + + def testSourceConnectorSchema(self): + """Test SourceConnectorSchema""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_source_connector_type.py b/test/test_source_connector_type.py new file mode 100644 index 0000000..6763ae3 --- /dev/null +++ b/test/test_source_connector_type.py @@ -0,0 +1,33 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.source_connector_type import SourceConnectorType + +class TestSourceConnectorType(unittest.TestCase): + """SourceConnectorType unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def testSourceConnectorType(self): + """Test SourceConnectorType""" + # inst = SourceConnectorType() + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_start_deep_research_request.py b/test/test_start_deep_research_request.py new file mode 100644 index 0000000..ef09d32 --- /dev/null +++ b/test/test_start_deep_research_request.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.start_deep_research_request import StartDeepResearchRequest + +class TestStartDeepResearchRequest(unittest.TestCase): + """StartDeepResearchRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StartDeepResearchRequest: + """Test StartDeepResearchRequest + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StartDeepResearchRequest` + """ + model = StartDeepResearchRequest() + if include_optional: + return StartDeepResearchRequest( + query = '', + web_search = True, + var_schema = '', + n8n = vectorize_client.models.n8_n_config.N8NConfig( + account = '', + webhook_path = '', + headers = { + 'key' : '' + }, ) + ) + else: + return StartDeepResearchRequest( + query = '', + ) + """ + + def testStartDeepResearchRequest(self): + """Test StartDeepResearchRequest""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_start_deep_research_response.py b/test/test_start_deep_research_response.py new file mode 100644 index 0000000..3040805 --- /dev/null +++ b/test/test_start_deep_research_response.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.start_deep_research_response import StartDeepResearchResponse + +class TestStartDeepResearchResponse(unittest.TestCase): + """StartDeepResearchResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StartDeepResearchResponse: + """Test StartDeepResearchResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StartDeepResearchResponse` + """ + model = StartDeepResearchResponse() + if include_optional: + return StartDeepResearchResponse( + research_id = '' + ) + else: + return StartDeepResearchResponse( + research_id = '', + ) + """ + + def testStartDeepResearchResponse(self): + """Test StartDeepResearchResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_start_extraction_request.py b/test/test_start_extraction_request.py new file mode 100644 index 0000000..3065a94 --- /dev/null +++ b/test/test_start_extraction_request.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.start_extraction_request import StartExtractionRequest + +class TestStartExtractionRequest(unittest.TestCase): + """StartExtractionRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StartExtractionRequest: + """Test StartExtractionRequest + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StartExtractionRequest` + """ + model = StartExtractionRequest() + if include_optional: + return StartExtractionRequest( + file_id = '', + type = 'iris', + chunking_strategy = 'markdown', + chunk_size = 1.337, + metadata = vectorize_client.models.metadata_extraction_strategy.MetadataExtractionStrategy( + schemas = [ + vectorize_client.models.metadata_extraction_strategy_schema.MetadataExtractionStrategySchema( + id = '', + schema = '', ) + ], + infer_schema = True, ) + ) + else: + return StartExtractionRequest( + file_id = '', + ) + """ + + def testStartExtractionRequest(self): + """Test StartExtractionRequest""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_start_extraction_response.py b/test/test_start_extraction_response.py new file mode 100644 index 0000000..3ccde59 --- /dev/null +++ b/test/test_start_extraction_response.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.start_extraction_response import StartExtractionResponse + +class TestStartExtractionResponse(unittest.TestCase): + """StartExtractionResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StartExtractionResponse: + """Test StartExtractionResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StartExtractionResponse` + """ + model = StartExtractionResponse() + if include_optional: + return StartExtractionResponse( + message = '', + extraction_id = '' + ) + else: + return StartExtractionResponse( + message = '', + extraction_id = '', + ) + """ + + def testStartExtractionResponse(self): + """Test StartExtractionResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_start_file_upload_request.py b/test/test_start_file_upload_request.py new file mode 100644 index 0000000..8fbe096 --- /dev/null +++ b/test/test_start_file_upload_request.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.start_file_upload_request import StartFileUploadRequest + +class TestStartFileUploadRequest(unittest.TestCase): + """StartFileUploadRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StartFileUploadRequest: + """Test StartFileUploadRequest + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StartFileUploadRequest` + """ + model = StartFileUploadRequest() + if include_optional: + return StartFileUploadRequest( + name = '', + content_type = '' + ) + else: + return StartFileUploadRequest( + name = '', + content_type = '', + ) + """ + + def testStartFileUploadRequest(self): + """Test StartFileUploadRequest""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_start_file_upload_response.py b/test/test_start_file_upload_response.py new file mode 100644 index 0000000..f515309 --- /dev/null +++ b/test/test_start_file_upload_response.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.start_file_upload_response import StartFileUploadResponse + +class TestStartFileUploadResponse(unittest.TestCase): + """StartFileUploadResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StartFileUploadResponse: + """Test StartFileUploadResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StartFileUploadResponse` + """ + model = StartFileUploadResponse() + if include_optional: + return StartFileUploadResponse( + file_id = '', + upload_url = '' + ) + else: + return StartFileUploadResponse( + file_id = '', + upload_url = '', + ) + """ + + def testStartFileUploadResponse(self): + """Test StartFileUploadResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_start_file_upload_to_connector_request.py b/test/test_start_file_upload_to_connector_request.py new file mode 100644 index 0000000..d049fe6 --- /dev/null +++ b/test/test_start_file_upload_to_connector_request.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.start_file_upload_to_connector_request import StartFileUploadToConnectorRequest + +class TestStartFileUploadToConnectorRequest(unittest.TestCase): + """StartFileUploadToConnectorRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StartFileUploadToConnectorRequest: + """Test StartFileUploadToConnectorRequest + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StartFileUploadToConnectorRequest` + """ + model = StartFileUploadToConnectorRequest() + if include_optional: + return StartFileUploadToConnectorRequest( + name = '', + content_type = '', + metadata = '' + ) + else: + return StartFileUploadToConnectorRequest( + name = '', + content_type = '', + ) + """ + + def testStartFileUploadToConnectorRequest(self): + """Test StartFileUploadToConnectorRequest""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_start_file_upload_to_connector_response.py b/test/test_start_file_upload_to_connector_response.py new file mode 100644 index 0000000..cc24548 --- /dev/null +++ b/test/test_start_file_upload_to_connector_response.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.start_file_upload_to_connector_response import StartFileUploadToConnectorResponse + +class TestStartFileUploadToConnectorResponse(unittest.TestCase): + """StartFileUploadToConnectorResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StartFileUploadToConnectorResponse: + """Test StartFileUploadToConnectorResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StartFileUploadToConnectorResponse` + """ + model = StartFileUploadToConnectorResponse() + if include_optional: + return StartFileUploadToConnectorResponse( + upload_url = '' + ) + else: + return StartFileUploadToConnectorResponse( + upload_url = '', + ) + """ + + def testStartFileUploadToConnectorResponse(self): + """Test StartFileUploadToConnectorResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_start_pipeline_response.py b/test/test_start_pipeline_response.py new file mode 100644 index 0000000..6ef4675 --- /dev/null +++ b/test/test_start_pipeline_response.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.start_pipeline_response import StartPipelineResponse + +class TestStartPipelineResponse(unittest.TestCase): + """StartPipelineResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StartPipelineResponse: + """Test StartPipelineResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StartPipelineResponse` + """ + model = StartPipelineResponse() + if include_optional: + return StartPipelineResponse( + message = '' + ) + else: + return StartPipelineResponse( + message = '', + ) + """ + + def testStartPipelineResponse(self): + """Test StartPipelineResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_stop_pipeline_response.py b/test/test_stop_pipeline_response.py new file mode 100644 index 0000000..50afb4d --- /dev/null +++ b/test/test_stop_pipeline_response.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.stop_pipeline_response import StopPipelineResponse + +class TestStopPipelineResponse(unittest.TestCase): + """StopPipelineResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StopPipelineResponse: + """Test StopPipelineResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StopPipelineResponse` + """ + model = StopPipelineResponse() + if include_optional: + return StopPipelineResponse( + message = '' + ) + else: + return StopPipelineResponse( + message = '', + ) + """ + + def testStopPipelineResponse(self): + """Test StopPipelineResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_supabase.py b/test/test_supabase.py new file mode 100644 index 0000000..fe6861e --- /dev/null +++ b/test/test_supabase.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.supabase import Supabase + +class TestSupabase(unittest.TestCase): + """Supabase unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Supabase: + """Test Supabase + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Supabase` + """ + model = Supabase() + if include_optional: + return Supabase( + name = '', + type = 'SUPABASE', + config = vectorize_client.models.supabase_config.SUPABASEConfig( + table = 'jUR,Z#}eta.3mh2lcafqw3zheseh1', ) + ) + else: + return Supabase( + name = '', + type = 'SUPABASE', + config = vectorize_client.models.supabase_config.SUPABASEConfig( + table = 'jUR,Z#}eta.3mh2lcafqw3zheseh1', ), + ) + """ + + def testSupabase(self): + """Test Supabase""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_supabase1.py b/test/test_supabase1.py new file mode 100644 index 0000000..de3803a --- /dev/null +++ b/test/test_supabase1.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.supabase1 import Supabase1 + +class TestSupabase1(unittest.TestCase): + """Supabase1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Supabase1: + """Test Supabase1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Supabase1` + """ + model = Supabase1() + if include_optional: + return Supabase1( + config = vectorize_client.models.supabase_config.SUPABASEConfig( + table = 'jUR,Z#}eta.3mh2lcafqw3zheseh1', ) + ) + else: + return Supabase1( + ) + """ + + def testSupabase1(self): + """Test Supabase1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_supabase2.py b/test/test_supabase2.py new file mode 100644 index 0000000..1fe7580 --- /dev/null +++ b/test/test_supabase2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.supabase2 import Supabase2 + +class TestSupabase2(unittest.TestCase): + """Supabase2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Supabase2: + """Test Supabase2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Supabase2` + """ + model = Supabase2() + if include_optional: + return Supabase2( + id = '', + type = 'SUPABASE' + ) + else: + return Supabase2( + id = '', + type = 'SUPABASE', + ) + """ + + def testSupabase2(self): + """Test Supabase2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_supabase_auth_config.py b/test/test_supabase_auth_config.py new file mode 100644 index 0000000..dd5a892 --- /dev/null +++ b/test/test_supabase_auth_config.py @@ -0,0 +1,61 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.supabase_auth_config import SUPABASEAuthConfig + +class TestSUPABASEAuthConfig(unittest.TestCase): + """SUPABASEAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SUPABASEAuthConfig: + """Test SUPABASEAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SUPABASEAuthConfig` + """ + model = SUPABASEAuthConfig() + if include_optional: + return SUPABASEAuthConfig( + name = '', + host = 'aws-0-us-east-1.pooler.supabase.com', + port = 1.337, + database = '', + username = '', + password = '' + ) + else: + return SUPABASEAuthConfig( + name = '', + host = 'aws-0-us-east-1.pooler.supabase.com', + database = '', + username = '', + password = '', + ) + """ + + def testSUPABASEAuthConfig(self): + """Test SUPABASEAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_supabase_config.py b/test/test_supabase_config.py new file mode 100644 index 0000000..56e371b --- /dev/null +++ b/test/test_supabase_config.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.supabase_config import SUPABASEConfig + +class TestSUPABASEConfig(unittest.TestCase): + """SUPABASEConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SUPABASEConfig: + """Test SUPABASEConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SUPABASEConfig` + """ + model = SUPABASEConfig() + if include_optional: + return SUPABASEConfig( + table = 'jUR,Z#}eta.3mh2lcafqw3zheseh1' + ) + else: + return SUPABASEConfig( + table = 'jUR,Z#}eta.3mh2lcafqw3zheseh1', + ) + """ + + def testSUPABASEConfig(self): + """Test SUPABASEConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_turbopuffer.py b/test/test_turbopuffer.py new file mode 100644 index 0000000..54499f4 --- /dev/null +++ b/test/test_turbopuffer.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.turbopuffer import Turbopuffer + +class TestTurbopuffer(unittest.TestCase): + """Turbopuffer unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Turbopuffer: + """Test Turbopuffer + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Turbopuffer` + """ + model = Turbopuffer() + if include_optional: + return Turbopuffer( + name = '', + type = 'TURBOPUFFER', + config = vectorize_client.models.turbopuffer_config.TURBOPUFFERConfig( + namespace = '', ) + ) + else: + return Turbopuffer( + name = '', + type = 'TURBOPUFFER', + config = vectorize_client.models.turbopuffer_config.TURBOPUFFERConfig( + namespace = '', ), + ) + """ + + def testTurbopuffer(self): + """Test Turbopuffer""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_turbopuffer1.py b/test/test_turbopuffer1.py new file mode 100644 index 0000000..12ab85b --- /dev/null +++ b/test/test_turbopuffer1.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.turbopuffer1 import Turbopuffer1 + +class TestTurbopuffer1(unittest.TestCase): + """Turbopuffer1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Turbopuffer1: + """Test Turbopuffer1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Turbopuffer1` + """ + model = Turbopuffer1() + if include_optional: + return Turbopuffer1( + config = vectorize_client.models.turbopuffer_config.TURBOPUFFERConfig( + namespace = '', ) + ) + else: + return Turbopuffer1( + ) + """ + + def testTurbopuffer1(self): + """Test Turbopuffer1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_turbopuffer2.py b/test/test_turbopuffer2.py new file mode 100644 index 0000000..c64bf49 --- /dev/null +++ b/test/test_turbopuffer2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.turbopuffer2 import Turbopuffer2 + +class TestTurbopuffer2(unittest.TestCase): + """Turbopuffer2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Turbopuffer2: + """Test Turbopuffer2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Turbopuffer2` + """ + model = Turbopuffer2() + if include_optional: + return Turbopuffer2( + id = '', + type = 'TURBOPUFFER' + ) + else: + return Turbopuffer2( + id = '', + type = 'TURBOPUFFER', + ) + """ + + def testTurbopuffer2(self): + """Test Turbopuffer2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_turbopuffer_auth_config.py b/test/test_turbopuffer_auth_config.py new file mode 100644 index 0000000..9be27cd --- /dev/null +++ b/test/test_turbopuffer_auth_config.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.turbopuffer_auth_config import TURBOPUFFERAuthConfig + +class TestTURBOPUFFERAuthConfig(unittest.TestCase): + """TURBOPUFFERAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> TURBOPUFFERAuthConfig: + """Test TURBOPUFFERAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `TURBOPUFFERAuthConfig` + """ + model = TURBOPUFFERAuthConfig() + if include_optional: + return TURBOPUFFERAuthConfig( + name = '', + api_key = 'k' + ) + else: + return TURBOPUFFERAuthConfig( + name = '', + api_key = 'k', + ) + """ + + def testTURBOPUFFERAuthConfig(self): + """Test TURBOPUFFERAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_turbopuffer_config.py b/test/test_turbopuffer_config.py new file mode 100644 index 0000000..97b153b --- /dev/null +++ b/test/test_turbopuffer_config.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.turbopuffer_config import TURBOPUFFERConfig + +class TestTURBOPUFFERConfig(unittest.TestCase): + """TURBOPUFFERConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> TURBOPUFFERConfig: + """Test TURBOPUFFERConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `TURBOPUFFERConfig` + """ + model = TURBOPUFFERConfig() + if include_optional: + return TURBOPUFFERConfig( + namespace = '' + ) + else: + return TURBOPUFFERConfig( + namespace = '', + ) + """ + + def testTURBOPUFFERConfig(self): + """Test TURBOPUFFERConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_update_ai_platform_connector_request.py b/test/test_update_ai_platform_connector_request.py new file mode 100644 index 0000000..8784330 --- /dev/null +++ b/test/test_update_ai_platform_connector_request.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.update_ai_platform_connector_request import UpdateAIPlatformConnectorRequest + +class TestUpdateAIPlatformConnectorRequest(unittest.TestCase): + """UpdateAIPlatformConnectorRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UpdateAIPlatformConnectorRequest: + """Test UpdateAIPlatformConnectorRequest + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UpdateAIPlatformConnectorRequest` + """ + model = UpdateAIPlatformConnectorRequest() + if include_optional: + return UpdateAIPlatformConnectorRequest( + config = { + 'key' : null + } + ) + else: + return UpdateAIPlatformConnectorRequest( + config = { + 'key' : null + }, + ) + """ + + def testUpdateAIPlatformConnectorRequest(self): + """Test UpdateAIPlatformConnectorRequest""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_update_ai_platform_connector_response.py b/test/test_update_ai_platform_connector_response.py new file mode 100644 index 0000000..179c1f7 --- /dev/null +++ b/test/test_update_ai_platform_connector_response.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.update_ai_platform_connector_response import UpdateAIPlatformConnectorResponse + +class TestUpdateAIPlatformConnectorResponse(unittest.TestCase): + """UpdateAIPlatformConnectorResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UpdateAIPlatformConnectorResponse: + """Test UpdateAIPlatformConnectorResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UpdateAIPlatformConnectorResponse` + """ + model = UpdateAIPlatformConnectorResponse() + if include_optional: + return UpdateAIPlatformConnectorResponse( + message = '', + data = vectorize_client.models.updated_ai_platform_connector_data.UpdatedAIPlatformConnectorData( + updated_connector = vectorize_client.models.ai_platform.AIPlatform( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ), + pipeline_ids = [ + '' + ], ) + ) + else: + return UpdateAIPlatformConnectorResponse( + message = '', + data = vectorize_client.models.updated_ai_platform_connector_data.UpdatedAIPlatformConnectorData( + updated_connector = vectorize_client.models.ai_platform.AIPlatform( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ), + pipeline_ids = [ + '' + ], ), + ) + """ + + def testUpdateAIPlatformConnectorResponse(self): + """Test UpdateAIPlatformConnectorResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_update_aiplatform_connector_request.py b/test/test_update_aiplatform_connector_request.py new file mode 100644 index 0000000..b3ab7c6 --- /dev/null +++ b/test/test_update_aiplatform_connector_request.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.update_aiplatform_connector_request import UpdateAiplatformConnectorRequest + +class TestUpdateAiplatformConnectorRequest(unittest.TestCase): + """UpdateAiplatformConnectorRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UpdateAiplatformConnectorRequest: + """Test UpdateAiplatformConnectorRequest + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UpdateAiplatformConnectorRequest` + """ + model = UpdateAiplatformConnectorRequest() + if include_optional: + return UpdateAiplatformConnectorRequest( + config = vectorize_client.models.voyage_auth_config.VOYAGEAuthConfig( + name = '', + key = 'k', ) + ) + else: + return UpdateAiplatformConnectorRequest( + ) + """ + + def testUpdateAiplatformConnectorRequest(self): + """Test UpdateAiplatformConnectorRequest""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_update_destination_connector_request.py b/test/test_update_destination_connector_request.py new file mode 100644 index 0000000..3a23be8 --- /dev/null +++ b/test/test_update_destination_connector_request.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.update_destination_connector_request import UpdateDestinationConnectorRequest + +class TestUpdateDestinationConnectorRequest(unittest.TestCase): + """UpdateDestinationConnectorRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UpdateDestinationConnectorRequest: + """Test UpdateDestinationConnectorRequest + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UpdateDestinationConnectorRequest` + """ + model = UpdateDestinationConnectorRequest() + if include_optional: + return UpdateDestinationConnectorRequest( + config = vectorize_client.models.turbopuffer_config.TURBOPUFFERConfig( + namespace = '', ) + ) + else: + return UpdateDestinationConnectorRequest( + ) + """ + + def testUpdateDestinationConnectorRequest(self): + """Test UpdateDestinationConnectorRequest""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_update_destination_connector_response.py b/test/test_update_destination_connector_response.py new file mode 100644 index 0000000..765a722 --- /dev/null +++ b/test/test_update_destination_connector_response.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.update_destination_connector_response import UpdateDestinationConnectorResponse + +class TestUpdateDestinationConnectorResponse(unittest.TestCase): + """UpdateDestinationConnectorResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UpdateDestinationConnectorResponse: + """Test UpdateDestinationConnectorResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UpdateDestinationConnectorResponse` + """ + model = UpdateDestinationConnectorResponse() + if include_optional: + return UpdateDestinationConnectorResponse( + message = '', + data = vectorize_client.models.updated_destination_connector_data.UpdatedDestinationConnectorData( + updated_connector = vectorize_client.models.destination_connector.DestinationConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ), + pipeline_ids = [ + '' + ], ) + ) + else: + return UpdateDestinationConnectorResponse( + message = '', + data = vectorize_client.models.updated_destination_connector_data.UpdatedDestinationConnectorData( + updated_connector = vectorize_client.models.destination_connector.DestinationConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ), + pipeline_ids = [ + '' + ], ), + ) + """ + + def testUpdateDestinationConnectorResponse(self): + """Test UpdateDestinationConnectorResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_update_source_connector_request.py b/test/test_update_source_connector_request.py new file mode 100644 index 0000000..d1ed31b --- /dev/null +++ b/test/test_update_source_connector_request.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.update_source_connector_request import UpdateSourceConnectorRequest + +class TestUpdateSourceConnectorRequest(unittest.TestCase): + """UpdateSourceConnectorRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UpdateSourceConnectorRequest: + """Test UpdateSourceConnectorRequest + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UpdateSourceConnectorRequest` + """ + model = UpdateSourceConnectorRequest() + if include_optional: + return UpdateSourceConnectorRequest( + config = vectorize_client.models.fireflies_config.FIREFLIESConfig( + start_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + end_date = datetime.datetime.strptime('1975-12-30', '%Y-%m-%d').date(), + title_filter_type = 'AND', + title_filter = '', + participant_filter_type = 'AND', + participant_filter = '', + max_meetings = 1.337, ) + ) + else: + return UpdateSourceConnectorRequest( + ) + """ + + def testUpdateSourceConnectorRequest(self): + """Test UpdateSourceConnectorRequest""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_update_source_connector_response.py b/test/test_update_source_connector_response.py new file mode 100644 index 0000000..5aa44d3 --- /dev/null +++ b/test/test_update_source_connector_response.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.update_source_connector_response import UpdateSourceConnectorResponse + +class TestUpdateSourceConnectorResponse(unittest.TestCase): + """UpdateSourceConnectorResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UpdateSourceConnectorResponse: + """Test UpdateSourceConnectorResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UpdateSourceConnectorResponse` + """ + model = UpdateSourceConnectorResponse() + if include_optional: + return UpdateSourceConnectorResponse( + message = '', + data = vectorize_client.models.update_source_connector_response_data.UpdateSourceConnectorResponseData( + updated_connector = vectorize_client.models.source_connector.SourceConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ), + pipeline_ids = [ + '' + ], ) + ) + else: + return UpdateSourceConnectorResponse( + message = '', + data = vectorize_client.models.update_source_connector_response_data.UpdateSourceConnectorResponseData( + updated_connector = vectorize_client.models.source_connector.SourceConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ), + pipeline_ids = [ + '' + ], ), + ) + """ + + def testUpdateSourceConnectorResponse(self): + """Test UpdateSourceConnectorResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_update_source_connector_response_data.py b/test/test_update_source_connector_response_data.py new file mode 100644 index 0000000..7234196 --- /dev/null +++ b/test/test_update_source_connector_response_data.py @@ -0,0 +1,81 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.update_source_connector_response_data import UpdateSourceConnectorResponseData + +class TestUpdateSourceConnectorResponseData(unittest.TestCase): + """UpdateSourceConnectorResponseData unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UpdateSourceConnectorResponseData: + """Test UpdateSourceConnectorResponseData + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UpdateSourceConnectorResponseData` + """ + model = UpdateSourceConnectorResponseData() + if include_optional: + return UpdateSourceConnectorResponseData( + updated_connector = vectorize_client.models.source_connector.SourceConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ), + pipeline_ids = [ + '' + ] + ) + else: + return UpdateSourceConnectorResponseData( + updated_connector = vectorize_client.models.source_connector.SourceConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ), + ) + """ + + def testUpdateSourceConnectorResponseData(self): + """Test UpdateSourceConnectorResponseData""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_update_user_in_source_connector_request.py b/test/test_update_user_in_source_connector_request.py new file mode 100644 index 0000000..8b4cb8b --- /dev/null +++ b/test/test_update_user_in_source_connector_request.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.update_user_in_source_connector_request import UpdateUserInSourceConnectorRequest + +class TestUpdateUserInSourceConnectorRequest(unittest.TestCase): + """UpdateUserInSourceConnectorRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UpdateUserInSourceConnectorRequest: + """Test UpdateUserInSourceConnectorRequest + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UpdateUserInSourceConnectorRequest` + """ + model = UpdateUserInSourceConnectorRequest() + if include_optional: + return UpdateUserInSourceConnectorRequest( + user_id = '', + selected_files = None, + refresh_token = '', + access_token = '' + ) + else: + return UpdateUserInSourceConnectorRequest( + user_id = '', + ) + """ + + def testUpdateUserInSourceConnectorRequest(self): + """Test UpdateUserInSourceConnectorRequest""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_update_user_in_source_connector_response.py b/test/test_update_user_in_source_connector_response.py new file mode 100644 index 0000000..c4a8120 --- /dev/null +++ b/test/test_update_user_in_source_connector_response.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.update_user_in_source_connector_response import UpdateUserInSourceConnectorResponse + +class TestUpdateUserInSourceConnectorResponse(unittest.TestCase): + """UpdateUserInSourceConnectorResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UpdateUserInSourceConnectorResponse: + """Test UpdateUserInSourceConnectorResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UpdateUserInSourceConnectorResponse` + """ + model = UpdateUserInSourceConnectorResponse() + if include_optional: + return UpdateUserInSourceConnectorResponse( + message = '' + ) + else: + return UpdateUserInSourceConnectorResponse( + message = '', + ) + """ + + def testUpdateUserInSourceConnectorResponse(self): + """Test UpdateUserInSourceConnectorResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_updated_ai_platform_connector_data.py b/test/test_updated_ai_platform_connector_data.py new file mode 100644 index 0000000..dac103c --- /dev/null +++ b/test/test_updated_ai_platform_connector_data.py @@ -0,0 +1,81 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.updated_ai_platform_connector_data import UpdatedAIPlatformConnectorData + +class TestUpdatedAIPlatformConnectorData(unittest.TestCase): + """UpdatedAIPlatformConnectorData unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UpdatedAIPlatformConnectorData: + """Test UpdatedAIPlatformConnectorData + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UpdatedAIPlatformConnectorData` + """ + model = UpdatedAIPlatformConnectorData() + if include_optional: + return UpdatedAIPlatformConnectorData( + updated_connector = vectorize_client.models.ai_platform.AIPlatform( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ), + pipeline_ids = [ + '' + ] + ) + else: + return UpdatedAIPlatformConnectorData( + updated_connector = vectorize_client.models.ai_platform.AIPlatform( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ), + ) + """ + + def testUpdatedAIPlatformConnectorData(self): + """Test UpdatedAIPlatformConnectorData""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_updated_destination_connector_data.py b/test/test_updated_destination_connector_data.py new file mode 100644 index 0000000..9c2148d --- /dev/null +++ b/test/test_updated_destination_connector_data.py @@ -0,0 +1,81 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.updated_destination_connector_data import UpdatedDestinationConnectorData + +class TestUpdatedDestinationConnectorData(unittest.TestCase): + """UpdatedDestinationConnectorData unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UpdatedDestinationConnectorData: + """Test UpdatedDestinationConnectorData + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UpdatedDestinationConnectorData` + """ + model = UpdatedDestinationConnectorData() + if include_optional: + return UpdatedDestinationConnectorData( + updated_connector = vectorize_client.models.destination_connector.DestinationConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ), + pipeline_ids = [ + '' + ] + ) + else: + return UpdatedDestinationConnectorData( + updated_connector = vectorize_client.models.destination_connector.DestinationConnector( + id = '', + type = '', + name = '', + config_doc = { + 'key' : null + }, + created_at = '', + created_by_id = '', + last_updated_by_id = '', + created_by_email = '', + last_updated_by_email = '', + error_message = '', + verification_status = '', ), + ) + """ + + def testUpdatedDestinationConnectorData(self): + """Test UpdatedDestinationConnectorData""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_upload_file.py b/test/test_upload_file.py new file mode 100644 index 0000000..1e2dbf6 --- /dev/null +++ b/test/test_upload_file.py @@ -0,0 +1,65 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.upload_file import UploadFile + +class TestUploadFile(unittest.TestCase): + """UploadFile unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UploadFile: + """Test UploadFile + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UploadFile` + """ + model = UploadFile() + if include_optional: + return UploadFile( + key = '', + name = '', + size = 1.337, + extension = '', + last_modified = '', + metadata = { + 'key' : '' + } + ) + else: + return UploadFile( + key = '', + name = '', + size = 1.337, + last_modified = '', + metadata = { + 'key' : '' + }, + ) + """ + + def testUploadFile(self): + """Test UploadFile""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_uploads_api.py b/test/test_uploads_api.py new file mode 100644 index 0000000..eeef312 --- /dev/null +++ b/test/test_uploads_api.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.api.uploads_api import UploadsApi + + +class TestUploadsApi(unittest.TestCase): + """UploadsApi unit test stubs""" + + def setUp(self) -> None: + self.api = UploadsApi() + + def tearDown(self) -> None: + pass + + def test_delete_file_from_connector(self) -> None: + """Test case for delete_file_from_connector + + Delete a file from a file upload connector + """ + pass + + def test_get_upload_files_from_connector(self) -> None: + """Test case for get_upload_files_from_connector + + Get uploaded files from a file upload connector + """ + pass + + def test_start_file_upload_to_connector(self) -> None: + """Test case for start_file_upload_to_connector + + Upload a file to a file upload connector + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_vertex.py b/test/test_vertex.py new file mode 100644 index 0000000..e4fedb4 --- /dev/null +++ b/test/test_vertex.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.vertex import Vertex + +class TestVertex(unittest.TestCase): + """Vertex unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Vertex: + """Test Vertex + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Vertex` + """ + model = Vertex() + if include_optional: + return Vertex( + name = '', + type = 'VERTEX', + config = vectorize_client.models.vertex_auth_config.VERTEXAuthConfig( + name = '', + key = '', + region = '', ) + ) + else: + return Vertex( + name = '', + type = 'VERTEX', + config = vectorize_client.models.vertex_auth_config.VERTEXAuthConfig( + name = '', + key = '', + region = '', ), + ) + """ + + def testVertex(self): + """Test Vertex""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_vertex1.py b/test/test_vertex1.py new file mode 100644 index 0000000..e77c807 --- /dev/null +++ b/test/test_vertex1.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.vertex1 import Vertex1 + +class TestVertex1(unittest.TestCase): + """Vertex1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Vertex1: + """Test Vertex1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Vertex1` + """ + model = Vertex1() + if include_optional: + return Vertex1( + config = vectorize_client.models.vertex_auth_config.VERTEXAuthConfig( + name = '', + key = '', + region = '', ) + ) + else: + return Vertex1( + ) + """ + + def testVertex1(self): + """Test Vertex1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_vertex2.py b/test/test_vertex2.py new file mode 100644 index 0000000..3f26626 --- /dev/null +++ b/test/test_vertex2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.vertex2 import Vertex2 + +class TestVertex2(unittest.TestCase): + """Vertex2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Vertex2: + """Test Vertex2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Vertex2` + """ + model = Vertex2() + if include_optional: + return Vertex2( + id = '', + type = 'VERTEX' + ) + else: + return Vertex2( + id = '', + type = 'VERTEX', + ) + """ + + def testVertex2(self): + """Test Vertex2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_vertex_auth_config.py b/test/test_vertex_auth_config.py new file mode 100644 index 0000000..c5b734a --- /dev/null +++ b/test/test_vertex_auth_config.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.vertex_auth_config import VERTEXAuthConfig + +class TestVERTEXAuthConfig(unittest.TestCase): + """VERTEXAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> VERTEXAuthConfig: + """Test VERTEXAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `VERTEXAuthConfig` + """ + model = VERTEXAuthConfig() + if include_optional: + return VERTEXAuthConfig( + name = '', + key = '', + region = '' + ) + else: + return VERTEXAuthConfig( + name = '', + key = '', + region = '', + ) + """ + + def testVERTEXAuthConfig(self): + """Test VERTEXAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_voyage.py b/test/test_voyage.py new file mode 100644 index 0000000..44f52af --- /dev/null +++ b/test/test_voyage.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.voyage import Voyage + +class TestVoyage(unittest.TestCase): + """Voyage unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Voyage: + """Test Voyage + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Voyage` + """ + model = Voyage() + if include_optional: + return Voyage( + name = '', + type = 'VOYAGE', + config = vectorize_client.models.voyage_auth_config.VOYAGEAuthConfig( + name = '', + key = 'k', ) + ) + else: + return Voyage( + name = '', + type = 'VOYAGE', + config = vectorize_client.models.voyage_auth_config.VOYAGEAuthConfig( + name = '', + key = 'k', ), + ) + """ + + def testVoyage(self): + """Test Voyage""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_voyage1.py b/test/test_voyage1.py new file mode 100644 index 0000000..f55d7bb --- /dev/null +++ b/test/test_voyage1.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.voyage1 import Voyage1 + +class TestVoyage1(unittest.TestCase): + """Voyage1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Voyage1: + """Test Voyage1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Voyage1` + """ + model = Voyage1() + if include_optional: + return Voyage1( + config = vectorize_client.models.voyage_auth_config.VOYAGEAuthConfig( + name = '', + key = 'k', ) + ) + else: + return Voyage1( + ) + """ + + def testVoyage1(self): + """Test Voyage1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_voyage2.py b/test/test_voyage2.py new file mode 100644 index 0000000..2b7ac94 --- /dev/null +++ b/test/test_voyage2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.voyage2 import Voyage2 + +class TestVoyage2(unittest.TestCase): + """Voyage2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Voyage2: + """Test Voyage2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Voyage2` + """ + model = Voyage2() + if include_optional: + return Voyage2( + id = '', + type = 'VOYAGE' + ) + else: + return Voyage2( + id = '', + type = 'VOYAGE', + ) + """ + + def testVoyage2(self): + """Test Voyage2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_voyage_auth_config.py b/test/test_voyage_auth_config.py new file mode 100644 index 0000000..a265cc2 --- /dev/null +++ b/test/test_voyage_auth_config.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.voyage_auth_config import VOYAGEAuthConfig + +class TestVOYAGEAuthConfig(unittest.TestCase): + """VOYAGEAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> VOYAGEAuthConfig: + """Test VOYAGEAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `VOYAGEAuthConfig` + """ + model = VOYAGEAuthConfig() + if include_optional: + return VOYAGEAuthConfig( + name = '', + key = 'k' + ) + else: + return VOYAGEAuthConfig( + name = '', + key = 'k', + ) + """ + + def testVOYAGEAuthConfig(self): + """Test VOYAGEAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_weaviate.py b/test/test_weaviate.py new file mode 100644 index 0000000..eae0264 --- /dev/null +++ b/test/test_weaviate.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.weaviate import Weaviate + +class TestWeaviate(unittest.TestCase): + """Weaviate unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Weaviate: + """Test Weaviate + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Weaviate` + """ + model = Weaviate() + if include_optional: + return Weaviate( + name = '', + type = 'WEAVIATE', + config = vectorize_client.models.weaviate_config.WEAVIATEConfig( + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua', ) + ) + else: + return Weaviate( + name = '', + type = 'WEAVIATE', + config = vectorize_client.models.weaviate_config.WEAVIATEConfig( + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua', ), + ) + """ + + def testWeaviate(self): + """Test Weaviate""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_weaviate1.py b/test/test_weaviate1.py new file mode 100644 index 0000000..f0a280d --- /dev/null +++ b/test/test_weaviate1.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.weaviate1 import Weaviate1 + +class TestWeaviate1(unittest.TestCase): + """Weaviate1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Weaviate1: + """Test Weaviate1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Weaviate1` + """ + model = Weaviate1() + if include_optional: + return Weaviate1( + config = vectorize_client.models.weaviate_config.WEAVIATEConfig( + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua', ) + ) + else: + return Weaviate1( + ) + """ + + def testWeaviate1(self): + """Test Weaviate1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_weaviate2.py b/test/test_weaviate2.py new file mode 100644 index 0000000..5df2242 --- /dev/null +++ b/test/test_weaviate2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.weaviate2 import Weaviate2 + +class TestWeaviate2(unittest.TestCase): + """Weaviate2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Weaviate2: + """Test Weaviate2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Weaviate2` + """ + model = Weaviate2() + if include_optional: + return Weaviate2( + id = '', + type = 'WEAVIATE' + ) + else: + return Weaviate2( + id = '', + type = 'WEAVIATE', + ) + """ + + def testWeaviate2(self): + """Test Weaviate2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_weaviate_auth_config.py b/test/test_weaviate_auth_config.py new file mode 100644 index 0000000..a72ca67 --- /dev/null +++ b/test/test_weaviate_auth_config.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.weaviate_auth_config import WEAVIATEAuthConfig + +class TestWEAVIATEAuthConfig(unittest.TestCase): + """WEAVIATEAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> WEAVIATEAuthConfig: + """Test WEAVIATEAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `WEAVIATEAuthConfig` + """ + model = WEAVIATEAuthConfig() + if include_optional: + return WEAVIATEAuthConfig( + name = '', + host = '', + api_key = 'k' + ) + else: + return WEAVIATEAuthConfig( + name = '', + host = '', + api_key = 'k', + ) + """ + + def testWEAVIATEAuthConfig(self): + """Test WEAVIATEAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_weaviate_config.py b/test/test_weaviate_config.py new file mode 100644 index 0000000..dc5d1ef --- /dev/null +++ b/test/test_weaviate_config.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.weaviate_config import WEAVIATEConfig + +class TestWEAVIATEConfig(unittest.TestCase): + """WEAVIATEConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> WEAVIATEConfig: + """Test WEAVIATEConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `WEAVIATEConfig` + """ + model = WEAVIATEConfig() + if include_optional: + return WEAVIATEConfig( + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua' + ) + else: + return WEAVIATEConfig( + collection = 'AqXzyCBw3_uufVPIPFhB9JcGRYnua', + ) + """ + + def testWEAVIATEConfig(self): + """Test WEAVIATEConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_web_crawler.py b/test/test_web_crawler.py new file mode 100644 index 0000000..815e2c6 --- /dev/null +++ b/test/test_web_crawler.py @@ -0,0 +1,70 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.web_crawler import WebCrawler + +class TestWebCrawler(unittest.TestCase): + """WebCrawler unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> WebCrawler: + """Test WebCrawler + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `WebCrawler` + """ + model = WebCrawler() + if include_optional: + return WebCrawler( + name = '', + type = 'WEB_CRAWLER', + config = vectorize_client.models.web_crawler_config.WEB_CRAWLERConfig( + allowed_domains_opt = '', + forbidden_paths = '/BAMDT', + min_time_between_requests = 1.337, + max_error_count = 1.337, + max_urls = 1.337, + max_depth = 1.337, + reindex_interval_seconds = 1.337, ) + ) + else: + return WebCrawler( + name = '', + type = 'WEB_CRAWLER', + config = vectorize_client.models.web_crawler_config.WEB_CRAWLERConfig( + allowed_domains_opt = '', + forbidden_paths = '/BAMDT', + min_time_between_requests = 1.337, + max_error_count = 1.337, + max_urls = 1.337, + max_depth = 1.337, + reindex_interval_seconds = 1.337, ), + ) + """ + + def testWebCrawler(self): + """Test WebCrawler""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_web_crawler1.py b/test/test_web_crawler1.py new file mode 100644 index 0000000..5fa12b0 --- /dev/null +++ b/test/test_web_crawler1.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.web_crawler1 import WebCrawler1 + +class TestWebCrawler1(unittest.TestCase): + """WebCrawler1 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> WebCrawler1: + """Test WebCrawler1 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `WebCrawler1` + """ + model = WebCrawler1() + if include_optional: + return WebCrawler1( + config = vectorize_client.models.web_crawler_config.WEB_CRAWLERConfig( + allowed_domains_opt = '', + forbidden_paths = '/BAMDT', + min_time_between_requests = 1.337, + max_error_count = 1.337, + max_urls = 1.337, + max_depth = 1.337, + reindex_interval_seconds = 1.337, ) + ) + else: + return WebCrawler1( + ) + """ + + def testWebCrawler1(self): + """Test WebCrawler1""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_web_crawler2.py b/test/test_web_crawler2.py new file mode 100644 index 0000000..e835511 --- /dev/null +++ b/test/test_web_crawler2.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.web_crawler2 import WebCrawler2 + +class TestWebCrawler2(unittest.TestCase): + """WebCrawler2 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> WebCrawler2: + """Test WebCrawler2 + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `WebCrawler2` + """ + model = WebCrawler2() + if include_optional: + return WebCrawler2( + id = '', + type = 'WEB_CRAWLER' + ) + else: + return WebCrawler2( + id = '', + type = 'WEB_CRAWLER', + ) + """ + + def testWebCrawler2(self): + """Test WebCrawler2""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_webcrawler_auth_config.py b/test/test_webcrawler_auth_config.py new file mode 100644 index 0000000..957afad --- /dev/null +++ b/test/test_webcrawler_auth_config.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.webcrawler_auth_config import WEBCRAWLERAuthConfig + +class TestWEBCRAWLERAuthConfig(unittest.TestCase): + """WEBCRAWLERAuthConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> WEBCRAWLERAuthConfig: + """Test WEBCRAWLERAuthConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `WEBCRAWLERAuthConfig` + """ + model = WEBCRAWLERAuthConfig() + if include_optional: + return WEBCRAWLERAuthConfig( + name = '', + seed_urls = '' + ) + else: + return WEBCRAWLERAuthConfig( + name = '', + seed_urls = '', + ) + """ + + def testWEBCRAWLERAuthConfig(self): + """Test WEBCRAWLERAuthConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/test/test_webcrawler_config.py b/test/test_webcrawler_config.py new file mode 100644 index 0000000..c3feaa8 --- /dev/null +++ b/test/test_webcrawler_config.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from vectorize_client.models.webcrawler_config import WEBCRAWLERConfig + +class TestWEBCRAWLERConfig(unittest.TestCase): + """WEBCRAWLERConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> WEBCRAWLERConfig: + """Test WEBCRAWLERConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `WEBCRAWLERConfig` + """ + model = WEBCRAWLERConfig() + if include_optional: + return WEBCRAWLERConfig( + allowed_domains_opt = '', + forbidden_paths = '/BAMDT', + min_time_between_requests = 1.337, + max_error_count = 1.337, + max_urls = 1.337, + max_depth = 1.337, + reindex_interval_seconds = 1.337 + ) + else: + return WEBCRAWLERConfig( + ) + """ + + def testWEBCRAWLERConfig(self): + """Test WEBCRAWLERConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..0ea3820 --- /dev/null +++ b/tox.ini @@ -0,0 +1,9 @@ +[tox] +envlist = py3 + +[testenv] +deps=-r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt + +commands= + pytest --cov=vectorize_client diff --git a/vectorize_api.json b/vectorize_api.json index f106c9a..c521a60 100644 --- a/vectorize_api.json +++ b/vectorize_api.json @@ -1 +1,15928 @@ -{"openapi":"3.0.0","info":{"title":"Vectorize API (Beta)","version":"0.0.1","description":"API for Vectorize services","contact":{"name":"Vectorize","url":"https://vectorize.io"}},"servers":[{"url":"https://api.vectorize.io/v1","description":"Vectorize API"}],"components":{"securitySchemes":{"bearerAuth":{"type":"http","scheme":"bearer","bearerFormat":"JWT"}},"schemas":{"CreatePipelineResponse":{"type":"object","properties":{"message":{"type":"string"},"data":{"type":"object","properties":{"id":{"type":"string"}},"required":["id"]}},"required":["message","data"]},"SourceConnectorType":{"type":"string","enum":["AWS_S3","AZURE_BLOB","CONFLUENCE","DISCORD","DROPBOX","GOOGLE_DRIVE_OAUTH","GOOGLE_DRIVE","GOOGLE_DRIVE_OAUTH_MULTI","GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM","FIRECRAWL","GCS","INTERCOM","ONE_DRIVE","SHAREPOINT","WEB_CRAWLER","FILE_UPLOAD","SALESFORCE","ZENDESK"]},"SourceConnectorSchema":{"type":"object","properties":{"id":{"type":"string","format":"uuid"},"type":{"$ref":"#/components/schemas/SourceConnectorType"},"config":{"type":"object","additionalProperties":{"nullable":true}}},"required":["id","type"],"additionalProperties":false},"DestinationConnectorType":{"type":"string","enum":["CAPELLA","DATASTAX","ELASTIC","PINECONE","SINGLESTORE","MILVUS","POSTGRESQL","QDRANT","SUPABASE","WEAVIATE","AZUREAISEARCH","VECTORIZE","CHROMA","MONGODB"]},"DestinationConnectorSchema":{"type":"object","properties":{"id":{"type":"string","format":"uuid"},"type":{"$ref":"#/components/schemas/DestinationConnectorType"},"config":{"type":"object","additionalProperties":{"nullable":true}}},"required":["id","type"],"additionalProperties":false},"AIPlatformType":{"type":"string","enum":["BEDROCK","VERTEX","OPENAI","VOYAGE","VECTORIZE"]},"AIPlatformConfigSchema":{"type":"object","properties":{"embeddingModel":{"type":"string","enum":["VECTORIZE_OPEN_AI_TEXT_EMBEDDING_2","VECTORIZE_OPEN_AI_TEXT_EMBEDDING_3_LARGE","VECTORIZE_OPEN_AI_TEXT_EMBEDDING_3_SMALL","VECTORIZE_VOYAGE_AI_2","VECTORIZE_VOYAGE_AI_3","VECTORIZE_VOYAGE_AI_3_LITE","VECTORIZE_VOYAGE_AI_3_LARGE","VECTORIZE_VOYAGE_AI_FINANCE_2","VECTORIZE_VOYAGE_AI_MULTILINGUAL_2","VECTORIZE_VOYAGE_AI_LAW_2","VECTORIZE_VOYAGE_AI_CODE_2","VECTORIZE_TITAN_TEXT_EMBEDDING_2","VECTORIZE_TITAN_TEXT_EMBEDDING_1","OPEN_AI_TEXT_EMBEDDING_2","OPEN_AI_TEXT_EMBEDDING_3_SMALL","OPEN_AI_TEXT_EMBEDDING_3_LARGE","VOYAGE_AI_2","VOYAGE_AI_3","VOYAGE_AI_3_LITE","VOYAGE_AI_3_LARGE","VOYAGE_AI_FINANCE_2","VOYAGE_AI_MULTILINGUAL_2","VOYAGE_AI_LAW_2","VOYAGE_AI_CODE_2","TITAN_TEXT_EMBEDDING_1","TITAN_TEXT_EMBEDDING_2","VERTEX_TEXT_EMBEDDING_4","VERTEX_TEXT_EMBEDDING_GECKO_3","VERTEX_GECKO_MULTILINGUAL_1","VERTEX_MULTILINGUAL_EMBEDDING_2"]},"chunkingStrategy":{"type":"string","enum":["FIXED","SENTENCE","PARAGRAPH","MARKDOWN"]},"chunkSize":{"type":"integer","minimum":1},"chunkOverlap":{"type":"integer","minimum":0},"dimensions":{"type":"integer","minimum":1},"extractionStrategy":{"type":"string","enum":["FAST","IRIS","MIXED"]}},"additionalProperties":false},"AIPlatformSchema":{"type":"object","properties":{"id":{"type":"string","format":"uuid"},"type":{"$ref":"#/components/schemas/AIPlatformType"},"config":{"$ref":"#/components/schemas/AIPlatformConfigSchema"}},"required":["id","type","config"],"additionalProperties":false},"ScheduleSchemaType":{"type":"string","enum":["manual","realtime","custom"]},"ScheduleSchema":{"type":"object","properties":{"type":{"$ref":"#/components/schemas/ScheduleSchemaType"}},"required":["type"]},"PipelineConfigurationSchema":{"type":"object","properties":{"sourceConnectors":{"type":"array","items":{"$ref":"#/components/schemas/SourceConnectorSchema"},"minItems":1},"destinationConnector":{"$ref":"#/components/schemas/DestinationConnectorSchema"},"aiPlatform":{"$ref":"#/components/schemas/AIPlatformSchema"},"pipelineName":{"type":"string","minLength":1},"schedule":{"$ref":"#/components/schemas/ScheduleSchema"}},"required":["sourceConnectors","destinationConnector","aiPlatform","pipelineName","schedule"],"additionalProperties":false},"PipelineListSummary":{"type":"object","properties":{"id":{"type":"string"},"name":{"type":"string"},"documentCount":{"type":"number"},"sourceConnectorAuthIds":{"type":"array","items":{"type":"string"}},"destinationConnectorAuthIds":{"type":"array","items":{"type":"string"}},"aiPlatformAuthIds":{"type":"array","items":{"type":"string"}},"sourceConnectorTypes":{"type":"array","items":{"type":"string"}},"destinationConnectorTypes":{"type":"array","items":{"type":"string"}},"aiPlatformTypes":{"type":"array","items":{"type":"string"}},"createdAt":{"type":"string","nullable":true},"createdBy":{"type":"string"},"status":{"type":"string"},"configDoc":{"type":"object","additionalProperties":{"nullable":true}}},"required":["id","name","documentCount","sourceConnectorAuthIds","destinationConnectorAuthIds","aiPlatformAuthIds","sourceConnectorTypes","destinationConnectorTypes","aiPlatformTypes","createdAt","createdBy"]},"GetPipelinesResponse":{"type":"object","properties":{"message":{"type":"string"},"data":{"type":"array","items":{"$ref":"#/components/schemas/PipelineListSummary"}}},"required":["message","data"]},"SourceConnector":{"type":"object","properties":{"id":{"type":"string"},"type":{"type":"string"},"name":{"type":"string"},"configDoc":{"type":"object","additionalProperties":{"nullable":true}},"createdAt":{"type":"string","nullable":true},"createdById":{"type":"string"},"lastUpdatedById":{"type":"string"},"createdByEmail":{"type":"string"},"lastUpdatedByEmail":{"type":"string"},"errorMessage":{"type":"string"},"verificationStatus":{"type":"string"}},"required":["id","type","name"]},"DestinationConnector":{"type":"object","properties":{"id":{"type":"string"},"type":{"type":"string"},"name":{"type":"string"},"configDoc":{"type":"object","additionalProperties":{"nullable":true}},"createdAt":{"type":"string","nullable":true},"createdById":{"type":"string"},"lastUpdatedById":{"type":"string"},"createdByEmail":{"type":"string"},"lastUpdatedByEmail":{"type":"string"},"errorMessage":{"type":"string"},"verificationStatus":{"type":"string"}},"required":["id","type","name"]},"AIPlatform":{"type":"object","properties":{"id":{"type":"string"},"type":{"type":"string"},"name":{"type":"string"},"configDoc":{"type":"object","additionalProperties":{"nullable":true}},"createdAt":{"type":"string","nullable":true},"createdById":{"type":"string"},"lastUpdatedById":{"type":"string"},"createdByEmail":{"type":"string"},"lastUpdatedByEmail":{"type":"string"},"errorMessage":{"type":"string"},"verificationStatus":{"type":"string"}},"required":["id","type","name"]},"PipelineSummary":{"type":"object","properties":{"id":{"type":"string"},"name":{"type":"string"},"documentCount":{"type":"number"},"sourceConnectorAuthIds":{"type":"array","items":{"type":"string"}},"destinationConnectorAuthIds":{"type":"array","items":{"type":"string"}},"aiPlatformAuthIds":{"type":"array","items":{"type":"string"}},"sourceConnectorTypes":{"type":"array","items":{"type":"string"}},"destinationConnectorTypes":{"type":"array","items":{"type":"string"}},"aiPlatformTypes":{"type":"array","items":{"type":"string"}},"createdAt":{"type":"string","nullable":true},"createdBy":{"type":"string"},"status":{"type":"string"},"configDoc":{"type":"object","additionalProperties":{"nullable":true}},"sourceConnectors":{"type":"array","items":{"$ref":"#/components/schemas/SourceConnector"}},"destinationConnectors":{"type":"array","items":{"$ref":"#/components/schemas/DestinationConnector"}},"aiPlatforms":{"type":"array","items":{"$ref":"#/components/schemas/AIPlatform"}}},"required":["id","name","documentCount","sourceConnectorAuthIds","destinationConnectorAuthIds","aiPlatformAuthIds","sourceConnectorTypes","destinationConnectorTypes","aiPlatformTypes","createdAt","createdBy","sourceConnectors","destinationConnectors","aiPlatforms"]},"GetPipelineResponse":{"type":"object","properties":{"message":{"type":"string"},"data":{"$ref":"#/components/schemas/PipelineSummary"}},"required":["message","data"]},"DeletePipelineResponse":{"type":"object","properties":{"message":{"type":"string"}},"required":["message"]},"PipelineEvents":{"type":"object","properties":{"id":{"type":"string"},"type":{"type":"string"},"timestamp":{"type":"string","nullable":true},"details":{"type":"object","additionalProperties":{"nullable":true}},"summary":{"type":"object","additionalProperties":{"nullable":true}}},"required":["id","type","timestamp"]},"GetPipelineEventsResponse":{"type":"object","properties":{"message":{"type":"string"},"nextToken":{"type":"string"},"data":{"type":"array","items":{"$ref":"#/components/schemas/PipelineEvents"}}},"required":["message","data"]},"PipelineMetrics":{"type":"object","properties":{"timestamp":{"type":"string","nullable":true},"newObjects":{"type":"number"},"changedObjects":{"type":"number"},"deletedObjects":{"type":"number"}},"required":["timestamp","newObjects","changedObjects","deletedObjects"]},"GetPipelineMetricsResponse":{"type":"object","properties":{"message":{"type":"string"},"data":{"type":"array","items":{"$ref":"#/components/schemas/PipelineMetrics"}}},"required":["message","data"]},"Document":{"type":"object","properties":{"relevancy":{"type":"number"},"id":{"type":"string"},"text":{"type":"string"},"chunk_id":{"type":"string"},"total_chunks":{"type":"string"},"origin":{"type":"string"},"origin_id":{"type":"string"},"similarity":{"type":"number"},"source":{"type":"string"},"unique_source":{"type":"string"},"source_display_name":{"type":"string"},"pipeline_id":{"type":"string"},"org_id":{"type":"string"}},"required":["relevancy","id","text","chunk_id","total_chunks","origin","origin_id","similarity","source","unique_source","source_display_name"],"additionalProperties":true},"RetrieveDocumentsResponse":{"type":"object","properties":{"question":{"type":"string"},"documents":{"type":"array","items":{"$ref":"#/components/schemas/Document"}},"average_relevancy":{"type":"number"},"ndcg":{"type":"number"}},"required":["question","documents","average_relevancy","ndcg"]},"RetrieveContextMessage":{"type":"object","properties":{"role":{"type":"string"},"content":{"type":"string"}},"required":["role","content"]},"RetrieveContext":{"type":"object","properties":{"messages":{"type":"array","items":{"$ref":"#/components/schemas/RetrieveContextMessage"}}},"required":["messages"]},"RetrieveDocumentsRequest":{"type":"object","properties":{"question":{"type":"string"},"numResults":{"type":"number","minimum":1},"rerank":{"type":"boolean","default":true},"metadata-filters":{"type":"array","items":{"type":"object","additionalProperties":{"nullable":true}}},"context":{"$ref":"#/components/schemas/RetrieveContext"}},"required":["question","numResults"]},"StartPipelineResponse":{"type":"object","properties":{"message":{"type":"string"}},"required":["message"]},"StopPipelineResponse":{"type":"object","properties":{"message":{"type":"string"}},"required":["message"]},"StartDeepResearchResponse":{"type":"object","properties":{"researchId":{"type":"string"}},"required":["researchId"]},"N8NConfig":{"type":"object","properties":{"account":{"type":"string"},"webhookPath":{"type":"string"},"headers":{"type":"object","additionalProperties":{"type":"string"}}},"required":["account","webhookPath"]},"StartDeepResearchRequest":{"type":"object","properties":{"query":{"type":"string"},"webSearch":{"type":"boolean","default":false},"schema":{"type":"string"},"n8n":{"$ref":"#/components/schemas/N8NConfig"}},"required":["query"]},"DeepResearchResult":{"type":"object","properties":{"success":{"type":"boolean"},"events":{"type":"array","items":{"type":"string"}},"markdown":{"type":"string"},"error":{"type":"string"}},"required":["success"]},"GetDeepResearchResponse":{"type":"object","properties":{"ready":{"type":"boolean"},"data":{"$ref":"#/components/schemas/DeepResearchResult"}},"required":["ready"]},"CreatedSourceConnector":{"type":"object","properties":{"name":{"type":"string"},"id":{"type":"string"}},"required":["name","id"]},"CreateSourceConnectorResponse":{"type":"object","properties":{"message":{"type":"string"},"connectors":{"type":"array","items":{"$ref":"#/components/schemas/CreatedSourceConnector"}}},"required":["message","connectors"]},"CreateSourceConnector":{"type":"object","properties":{"name":{"type":"string"},"type":{"$ref":"#/components/schemas/SourceConnectorType"},"config":{"type":"object","additionalProperties":{"nullable":true}}},"required":["name","type"]},"CreateSourceConnectorRequest":{"type":"array","items":{"$ref":"#/components/schemas/CreateSourceConnector"},"minItems":1},"UpdateSourceConnectorResponseData":{"type":"object","properties":{"updatedConnector":{"$ref":"#/components/schemas/SourceConnector"},"pipelineIds":{"type":"array","items":{"type":"string"}}},"required":["updatedConnector"]},"UpdateSourceConnectorResponse":{"type":"object","properties":{"message":{"type":"string"},"data":{"$ref":"#/components/schemas/UpdateSourceConnectorResponseData"}},"required":["message","data"]},"UpdateSourceConnectorRequest":{"type":"object","properties":{"config":{"type":"object","additionalProperties":{"nullable":true}}},"required":["config"]},"DeleteSourceConnectorResponse":{"type":"object","properties":{"message":{"type":"string"}},"required":["message"]},"CreatedDestinationConnector":{"type":"object","properties":{"name":{"type":"string"},"id":{"type":"string"}},"required":["name","id"]},"CreateDestinationConnectorResponse":{"type":"object","properties":{"message":{"type":"string"},"connectors":{"type":"array","items":{"$ref":"#/components/schemas/CreatedDestinationConnector"}}},"required":["message","connectors"]},"CreateDestinationConnector":{"type":"object","properties":{"name":{"type":"string"},"type":{"$ref":"#/components/schemas/DestinationConnectorType"},"config":{"type":"object","additionalProperties":{"nullable":true}}},"required":["name","type"]},"CreateDestinationConnectorRequest":{"type":"array","items":{"$ref":"#/components/schemas/CreateDestinationConnector"},"minItems":1},"UpdatedDestinationConnectorData":{"type":"object","properties":{"updatedConnector":{"$ref":"#/components/schemas/DestinationConnector"},"pipelineIds":{"type":"array","items":{"type":"string"}}},"required":["updatedConnector"]},"UpdateDestinationConnectorResponse":{"type":"object","properties":{"message":{"type":"string"},"data":{"$ref":"#/components/schemas/UpdatedDestinationConnectorData"}},"required":["message","data"]},"UpdateDestinationConnectorRequest":{"type":"object","properties":{"config":{"type":"object","additionalProperties":{"nullable":true}}},"required":["config"]},"DeleteDestinationConnectorResponse":{"type":"object","properties":{"message":{"type":"string"}},"required":["message"]},"CreatedAIPlatformConnector":{"type":"object","properties":{"name":{"type":"string"},"id":{"type":"string"}},"required":["name","id"]},"CreateAIPlatformConnectorResponse":{"type":"object","properties":{"message":{"type":"string"},"connectors":{"type":"array","items":{"$ref":"#/components/schemas/CreatedAIPlatformConnector"}}},"required":["message","connectors"]},"CreateAIPlatformConnector":{"type":"object","properties":{"name":{"type":"string"},"type":{"$ref":"#/components/schemas/AIPlatformType"},"config":{"type":"object","additionalProperties":{"nullable":true}}},"required":["name","type"]},"CreateAIPlatformConnectorRequest":{"type":"array","items":{"$ref":"#/components/schemas/CreateAIPlatformConnector"},"minItems":1},"UpdatedAIPlatformConnectorData":{"type":"object","properties":{"updatedConnector":{"$ref":"#/components/schemas/AIPlatform"},"pipelineIds":{"type":"array","items":{"type":"string"}}},"required":["updatedConnector"]},"UpdateAIPlatformConnectorResponse":{"type":"object","properties":{"message":{"type":"string"},"data":{"$ref":"#/components/schemas/UpdatedAIPlatformConnectorData"}},"required":["message","data"]},"UpdateAIPlatformConnectorRequest":{"type":"object","properties":{"config":{"type":"object","additionalProperties":{"nullable":true}}},"required":["config"]},"DeleteAIPlatformConnectorResponse":{"type":"object","properties":{"message":{"type":"string"}},"required":["message"]},"UploadFile":{"type":"object","properties":{"key":{"type":"string"},"name":{"type":"string"},"size":{"type":"number"},"extension":{"type":"string"},"lastModified":{"type":"string","nullable":true},"metadata":{"type":"object","additionalProperties":{"type":"string"}}},"required":["key","name","size","lastModified","metadata"]},"GetUploadFilesResponse":{"type":"object","properties":{"message":{"type":"string"},"files":{"type":"array","items":{"$ref":"#/components/schemas/UploadFile"}}},"required":["message","files"]},"StartFileUploadToConnectorResponse":{"type":"object","properties":{"uploadUrl":{"type":"string"}},"required":["uploadUrl"]},"StartFileUploadToConnectorRequest":{"type":"object","properties":{"name":{"type":"string"},"contentType":{"type":"string"},"metadata":{"type":"string"}},"required":["name","contentType"]},"DeleteFileResponse":{"type":"object","properties":{"message":{"type":"string"},"fileName":{"type":"string"}},"required":["message","fileName"]},"StartExtractionResponse":{"type":"object","properties":{"message":{"type":"string"},"extractionId":{"type":"string"}},"required":["message","extractionId"]},"ExtractionType":{"type":"string","enum":["iris"],"default":"iris"},"ExtractionChunkingStrategy":{"type":"string","enum":["markdown"],"default":"markdown"},"MetadataExtractionStrategySchema":{"type":"object","properties":{"id":{"type":"string"},"schema":{"type":"string"}},"required":["id","schema"]},"MetadataExtractionStrategy":{"type":"object","properties":{"schemas":{"type":"array","items":{"$ref":"#/components/schemas/MetadataExtractionStrategySchema"}},"inferSchema":{"type":"boolean"}}},"StartExtractionRequest":{"type":"object","properties":{"fileId":{"type":"string"},"type":{"$ref":"#/components/schemas/ExtractionType"},"chunkingStrategy":{"$ref":"#/components/schemas/ExtractionChunkingStrategy"},"chunkSize":{"type":"number","default":256},"metadata":{"$ref":"#/components/schemas/MetadataExtractionStrategy"}},"required":["fileId"]},"ExtractionResult":{"type":"object","properties":{"success":{"type":"boolean"},"chunks":{"type":"array","items":{"type":"string"}},"text":{"type":"string"},"metadata":{"type":"string"},"metadataSchema":{"type":"string"},"chunksMetadata":{"type":"array","items":{"type":"string"}},"chunksSchema":{"type":"array","items":{"type":"string"}},"error":{"type":"string"}},"required":["success"]},"ExtractionResultResponse":{"type":"object","properties":{"ready":{"type":"boolean"},"data":{"$ref":"#/components/schemas/ExtractionResult"}},"required":["ready"]},"StartFileUploadResponse":{"type":"object","properties":{"fileId":{"type":"string"},"uploadUrl":{"type":"string"}},"required":["fileId","uploadUrl"]},"StartFileUploadRequest":{"type":"object","properties":{"name":{"type":"string"},"contentType":{"type":"string"}},"required":["name","contentType"]},"AddUserFromSourceConnectorResponse":{"type":"object","properties":{"message":{"type":"string"}},"required":["message"]},"AddUserToSourceConnectorRequest":{"type":"object","properties":{"userId":{"type":"string"},"selectedFiles":{"type":"object","additionalProperties":{"type":"object","properties":{"name":{"type":"string"},"mimeType":{"type":"string"}},"required":["name","mimeType"]}},"refreshToken":{"type":"string"}},"required":["userId","selectedFiles","refreshToken"]},"UpdateUserInSourceConnectorResponse":{"type":"object","properties":{"message":{"type":"string"}},"required":["message"]},"UpdateUserInSourceConnectorRequest":{"type":"object","properties":{"userId":{"type":"string"},"selectedFiles":{"type":"object","additionalProperties":{"type":"object","properties":{"name":{"type":"string"},"mimeType":{"type":"string"}},"required":["name","mimeType"]}},"refreshToken":{"type":"string"}},"required":["userId"]},"RemoveUserFromSourceConnectorResponse":{"type":"object","properties":{"message":{"type":"string"}},"required":["message"]},"RemoveUserFromSourceConnectorRequest":{"type":"object","properties":{"userId":{"type":"string"}},"required":["userId"]}},"parameters":{}},"paths":{"/org/{organization}/pipelines":{"post":{"operationId":"createPipeline","summary":"Create a new source pipeline. Config fields for sources: Amazon S3 (AWS_S3): \nCheck for updates every (seconds) (idle-time): number, Path Prefix (path-prefix): text, Path Metadata Regex (path-metadata-regex): text, Path Regex Group Names (path-regex-group-names): array oftext) | Azure Blob Storage (AZURE_BLOB): \nPolling Interval (seconds) (idle-time): number, Path Prefix (path-prefix): text, Path Metadata Regex (path-metadata-regex): text, Path Regex Group Names (path-regex-group-names): array oftext) | Confluence (CONFLUENCE): \nSpaces (spaces): array oftext, Root Parents (root-parents): array oftext) | Discord (DISCORD): \nEmoji Filter (emoji): array oftext, Author Filter (author): array oftext, Ignore Author Filter (ignore-author): array oftext, Limit (limit): number) | Dropbox (DROPBOX): \nRead from these folders (optional) (path-prefix): array oftext) | Google Drive OAuth (GOOGLE_DRIVE_OAUTH): \nPolling Interval (seconds) (idle-time): number) | Google Drive (Service Account) (GOOGLE_DRIVE): \nRestrict ingest to these folder URLs (optional) (root-parents): array oftext, Polling Interval (seconds) (idle-time): number) | Google Drive Multi-User (Vectorize) (GOOGLE_DRIVE_OAUTH_MULTI): \nPolling Interval (seconds) (idle-time): number) | Google Drive Multi-User (White Label) (GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM): \nPolling Interval (seconds) (idle-time): number) | Firecrawl (FIRECRAWL): \n) | GCP Cloud Storage (GCS): \nCheck for updates every (seconds) (idle-time): number, Path Prefix (path-prefix): text, Path Metadata Regex (path-metadata-regex): text, Path Regex Group Names (path-regex-group-names): array oftext) | Intercom (INTERCOM): \nReindex Interval (seconds) (reindexIntervalSeconds): number, Limit (limit): number, Tags (tags): array oftext) | OneDrive (ONE_DRIVE): \nRead starting from this folder (optional) (path-prefix): text) | SharePoint (SHAREPOINT): \nSite Name(s) (sites): array oftext) | Web Crawler (WEB_CRAWLER): \nAdditional Allowed URLs or prefix(es) (allowed-domains-opt): array ofurl, Forbidden Paths (forbidden-paths): array oftext, Throttle (ms) (min-time-between-requests): number, Max Error Count (max-error-count): number, Max URLs (max-urls): number, Max Depth (max-depth): number, Reindex Interval (seconds) (reindex-interval-seconds): number) | File Upload (FILE_UPLOAD): \n). Config fields for destinations: Couchbase Capella (CAPELLA): \nBucket Name (bucket): text, Scope Name (scope): text, Collection Name (collection): text, Search Index Name (index): text) | DataStax Astra (DATASTAX): \nCollection Name (collection): text) | Elasticsearch (ELASTIC): \nIndex Name (index): text) | Pinecone (PINECONE): \nIndex Name (index): text, Namespace (namespace): text) | SingleStore (SINGLESTORE): \nTable Name (table): text) | Milvus (MILVUS): \nCollection Name (collection): text) | PostgreSQL (POSTGRESQL): \nTable Name (table): text) | Qdrant (QDRANT): \nCollection Name (collection): text) | Supabase (SUPABASE): \nTable Name (table): text) | Weaviate (WEAVIATE): \nCollection Name (collection): text) | Azure AI Search (AZUREAISEARCH): \nIndex Name (index): text) | Built-in (VECTORIZE): \n) | Chroma (CHROMA): \nIndex Name (index): text) | MongoDB (MONGODB): \nIndex Name (index): text). Config fields for AI platforms: ","tags":["Pipelines"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"}],"requestBody":{"required":true,"content":{"application/json":{"schema":{"$ref":"#/components/schemas/PipelineConfigurationSchema"}}}},"responses":{"200":{"description":"Pipeline created successfully","content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreatePipelineResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}},"get":{"operationId":"getPipelines","summary":"Get all existing pipelines","tags":["Pipelines"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"}],"responses":{"200":{"description":"Get all pipelines","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GetPipelinesResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/pipelines/{pipeline}":{"get":{"operationId":"getPipeline","summary":"Get a pipeline","tags":["Pipelines"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"pipeline","in":"path"}],"responses":{"200":{"description":"Pipeline fetched successfully","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GetPipelineResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}},"delete":{"operationId":"deletePipeline","summary":"Delete a pipeline","tags":["Pipelines"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"pipeline","in":"path"}],"responses":{"200":{"description":"Pipeline deleted successfully","content":{"application/json":{"schema":{"$ref":"#/components/schemas/DeletePipelineResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/pipelines/{pipeline}/events":{"get":{"operationId":"getPipelineEvents","summary":"Get pipeline events","tags":["Pipelines"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"pipeline","in":"path"},{"schema":{"type":"string"},"required":false,"name":"nextToken","in":"query"}],"responses":{"200":{"description":"Pipeline events fetched successfully","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GetPipelineEventsResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/pipelines/{pipeline}/metrics":{"get":{"operationId":"getPipelineMetrics","summary":"Get pipeline metrics","tags":["Pipelines"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"pipeline","in":"path"}],"responses":{"200":{"description":"Pipeline metrics fetched successfully","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GetPipelineMetricsResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/pipelines/{pipeline}/retrieval":{"post":{"operationId":"retrieveDocuments","summary":"Retrieve documents from a pipeline","tags":["Pipelines"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"pipeline","in":"path"}],"requestBody":{"required":true,"content":{"application/json":{"schema":{"$ref":"#/components/schemas/RetrieveDocumentsRequest"}}}},"responses":{"200":{"description":"Documents retrieved successfully","content":{"application/json":{"schema":{"$ref":"#/components/schemas/RetrieveDocumentsResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/pipelines/{pipeline}/start":{"post":{"operationId":"startPipeline","summary":"Start a pipeline","tags":["Pipelines"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"pipeline","in":"path"}],"responses":{"200":{"description":"Pipeline started successfully","content":{"application/json":{"schema":{"$ref":"#/components/schemas/StartPipelineResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/pipelines/{pipeline}/stop":{"post":{"operationId":"stopPipeline","summary":"Stop a pipeline","tags":["Pipelines"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"pipeline","in":"path"}],"responses":{"200":{"description":"Pipeline stopped successfully","content":{"application/json":{"schema":{"$ref":"#/components/schemas/StopPipelineResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/pipelines/{pipeline}/deep-research":{"post":{"operationId":"startDeepResearch","summary":"Start a deep research","tags":["Pipelines"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"pipeline","in":"path"}],"requestBody":{"required":true,"content":{"application/json":{"schema":{"$ref":"#/components/schemas/StartDeepResearchRequest"}}}},"responses":{"200":{"description":"Deep Research started successfully","content":{"application/json":{"schema":{"$ref":"#/components/schemas/StartDeepResearchResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/pipelines/{pipeline}/deep-research/{researchId}":{"get":{"operationId":"getDeepResearchResult","summary":"Get deep research result","tags":["Pipelines"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"pipeline","in":"path"},{"schema":{"type":"string"},"required":true,"name":"researchId","in":"path"}],"responses":{"200":{"description":"Get Deep Research was successful","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GetDeepResearchResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/connectors/sources":{"post":{"operationId":"createSourceConnector","summary":"Create a new source connector. Config values: Amazon S3 (AWS_S3): \nName (name): text, Access Key (access-key): text, Secret Key (secret-key): text, Bucket Name (bucket-name): text, Endpoint (endpoint): url, Region (region): text, Allow as archive destination (archiver): boolean) | Azure Blob Storage (AZURE_BLOB): \nName (name): text, Storage Account Name (storage-account-name): text, Storage Account Key (storage-account-key): text, Container (container): text, Endpoint (endpoint): url) | Confluence (CONFLUENCE): \nName (name): text, Username (username): text, API Token (api-token): text, Domain (domain): text) | Discord (DISCORD): \nName (name): text, Server ID (guild-id): text, Bot token (bot-token): text, Channel ID (channel-ids): array oftext) | Dropbox (DROPBOX): \nName (name): text) | Google Drive OAuth (GOOGLE_DRIVE_OAUTH): \nName (name): text) | Google Drive (Service Account) (GOOGLE_DRIVE): \nName (name): text, Service Account JSON (service-account-json): textarea) | Google Drive Multi-User (Vectorize) (GOOGLE_DRIVE_OAUTH_MULTI): \nName (name): text) | Google Drive Multi-User (White Label) (GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM): \nName (name): text, OAuth2 Client Id (oauth2-client-id): text, OAuth2 Client Secret (oauth2-client-secret): text) | Firecrawl (FIRECRAWL): \nName (name): text, API Key (api-key): text) | GCP Cloud Storage (GCS): \nName (name): text, Service Account JSON (service-account-json): textarea, Bucket (bucket-name): text) | Intercom (INTERCOM): \nName (name): text, Access Token (intercomAccessToken): text) | OneDrive (ONE_DRIVE): \nName (name): text, Client Id (ms-client-id): text, Tenant Id (ms-tenant-id): text, Client Secret (ms-client-secret): text, Users (users): array oftext) | SharePoint (SHAREPOINT): \nName (name): text, Client Id (ms-client-id): text, Tenant Id (ms-tenant-id): text, Client Secret (ms-client-secret): text) | Web Crawler (WEB_CRAWLER): \nName (name): text, Seed URL(s) (seed-urls): array ofurl) | File Upload (FILE_UPLOAD): \nName (name): text)","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"}],"requestBody":{"required":true,"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateSourceConnectorRequest"}}}},"responses":{"200":{"description":"Connector successfully created","content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateSourceConnectorResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}},"get":{"operationId":"getSourceConnectors","summary":"Get all existing source connectors","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"}],"responses":{"200":{"description":"Get all source connectors","content":{"application/json":{"schema":{"type":"object","properties":{"sourceConnectors":{"type":"array","items":{"$ref":"#/components/schemas/SourceConnector"}}},"required":["sourceConnectors"]}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/connectors/sources/{sourceConnectorId}":{"get":{"operationId":"getSourceConnector","summary":"Get a source connector","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"sourceConnectorId","in":"path"}],"responses":{"200":{"description":"Get a source connector","content":{"application/json":{"schema":{"$ref":"#/components/schemas/SourceConnector"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}},"patch":{"operationId":"updateSourceConnector","summary":"Update a source connector","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"sourceConnectorId","in":"path"}],"requestBody":{"required":true,"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateSourceConnectorRequest"}}}},"responses":{"200":{"description":"Source connector successfully updated","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateSourceConnectorResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}},"delete":{"operationId":"deleteSourceConnector","summary":"Delete a source connector","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"sourceConnectorId","in":"path"}],"responses":{"200":{"description":"Source connector successfully deleted","content":{"application/json":{"schema":{"$ref":"#/components/schemas/DeleteSourceConnectorResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/connectors/destinations":{"post":{"operationId":"createDestinationConnector","summary":"Create a new destination connector. Config values: Couchbase Capella (CAPELLA): \nName (name): text, Cluster Access Name (username): text, Cluster Access Password (password): text, Connection String (connection-string): text) | DataStax Astra (DATASTAX): \nName (name): text, API Endpoint (endpoint_secret): text, Application Token (token): text) | Elasticsearch (ELASTIC): \nName (name): text, Host (host): text, Port (port): text, API Key (api-key): text) | Pinecone (PINECONE): \nName (name): text, API Key (api-key): text) | SingleStore (SINGLESTORE): \nName (name): text, Host (host): text, Port (port): number, Database (database): text, Username (username): text, Password (password): text) | Milvus (MILVUS): \nName (name): text, Public Endpoint (url): text, Token (token): text, Username (username): text, Password (password): text) | PostgreSQL (POSTGRESQL): \nName (name): text, Host (host): text, Port (port): number, Database (database): text, Username (username): text, Password (password): text) | Qdrant (QDRANT): \nName (name): text, Host (host): text, API Key (api-key): text) | Supabase (SUPABASE): \nName (name): text, Host (host): text, Port (port): number, Database (database): text, Username (username): text, Password (password): text) | Weaviate (WEAVIATE): \nName (name): text, Endpoint (host): text, API Key (api-key): text) | Azure AI Search (AZUREAISEARCH): \nName (name): text, Azure AI Search Service Name (service-name): text, API Key (api-key): text) | Built-in (VECTORIZE): \n) | Chroma (CHROMA): \nName (name): text, API Key (apiKey): text) | MongoDB (MONGODB): \nName (name): text, API Key (apiKey): text)","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"}],"requestBody":{"required":true,"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateDestinationConnectorRequest"}}}},"responses":{"200":{"description":"Connector successfully created","content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateDestinationConnectorResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}},"get":{"operationId":"getDestinationConnectors","summary":"Get all existing destination connectors","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"}],"responses":{"200":{"description":"Get all destination connectors","content":{"application/json":{"schema":{"type":"object","properties":{"destinationConnectors":{"type":"array","items":{"$ref":"#/components/schemas/DestinationConnector"}}},"required":["destinationConnectors"]}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/connectors/destinations/{destinationConnectorId}":{"get":{"operationId":"getDestinationConnector","summary":"Get a destination connector","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"destinationConnectorId","in":"path"}],"responses":{"200":{"description":"Get a destination connector","content":{"application/json":{"schema":{"$ref":"#/components/schemas/DestinationConnector"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}},"patch":{"operationId":"updateDestinationConnector","summary":"Update a destination connector","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"destinationConnectorId","in":"path"}],"requestBody":{"required":true,"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateDestinationConnectorRequest"}}}},"responses":{"200":{"description":"Destination connector successfully updated","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateDestinationConnectorResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}},"delete":{"operationId":"deleteDestinationConnector","summary":"Delete a destination connector","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"destinationConnectorId","in":"path"}],"responses":{"200":{"description":"Destination connector successfully deleted","content":{"application/json":{"schema":{"$ref":"#/components/schemas/DeleteDestinationConnectorResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/connectors/aiplatforms":{"post":{"operationId":"createAIPlatformConnector","summary":"Create a new AI Platform connector. Config values: Amazon Bedrock (BEDROCK): \nName (name): text, Access Key (access-key): text, Secret Key (key): text) | Google Vertex AI (VERTEX): \nName (name): text, Service Account Json (key): textarea, Region (region): text) | OpenAI (OPENAI): \nName (name): text, API Key (key): text) | Voyage AI (VOYAGE): \nName (name): text, API Key (key): text) | Built-in (VECTORIZE): \n)","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"}],"requestBody":{"required":true,"content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateAIPlatformConnectorRequest"}}}},"responses":{"200":{"description":"Connector successfully created","content":{"application/json":{"schema":{"$ref":"#/components/schemas/CreateAIPlatformConnectorResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}},"get":{"operationId":"getAIPlatformConnectors","summary":"Get all existing AI Platform connectors","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"}],"responses":{"200":{"description":"Get all existing AI Platform connectors","content":{"application/json":{"schema":{"type":"object","properties":{"aiPlatformConnectors":{"type":"array","items":{"$ref":"#/components/schemas/AIPlatform"}}},"required":["aiPlatformConnectors"]}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/connectors/aiplatforms/{aiplatformId}":{"get":{"operationId":"getAIPlatformConnector","summary":"Get an AI platform connector","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"aiplatformId","in":"path"}],"responses":{"200":{"description":"Get an AI platform connector","content":{"application/json":{"schema":{"$ref":"#/components/schemas/AIPlatform"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}},"patch":{"operationId":"updateAIPlatformConnector","summary":"Update an AI Platform connector","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"aiplatformId","in":"path"}],"requestBody":{"required":true,"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateAIPlatformConnectorRequest"}}}},"responses":{"200":{"description":"AI Platform connector successfully updated","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateAIPlatformConnectorResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}},"delete":{"operationId":"deleteAIPlatform","summary":"Delete an AI platform connector","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"aiplatformId","in":"path"}],"responses":{"200":{"description":"AI Platform connector successfully deleted","content":{"application/json":{"schema":{"$ref":"#/components/schemas/DeleteAIPlatformConnectorResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/uploads/{connectorId}/files":{"get":{"operationId":"getUploadFilesFromConnector","summary":"Get uploaded files from a file upload connector","tags":["Uploads"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"connectorId","in":"path"}],"responses":{"200":{"description":"Files retrieved successfully","content":{"application/json":{"schema":{"$ref":"#/components/schemas/GetUploadFilesResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}},"put":{"operationId":"startFileUploadToConnector","summary":"Upload a file to a file upload connector","tags":["Uploads"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"connectorId","in":"path"}],"requestBody":{"required":true,"content":{"application/json":{"schema":{"$ref":"#/components/schemas/StartFileUploadToConnectorRequest"}}}},"responses":{"200":{"description":"File ready to be uploaded","content":{"application/json":{"schema":{"$ref":"#/components/schemas/StartFileUploadToConnectorResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}},"delete":{"operationId":"deleteFileFromConnector","summary":"Delete a file from a file upload connector","tags":["Uploads"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"connectorId","in":"path"}],"responses":{"200":{"description":"File deleted successfully","content":{"application/json":{"schema":{"$ref":"#/components/schemas/DeleteFileResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/extraction":{"post":{"operationId":"startExtraction","summary":"Start content extraction from a file","tags":["Extraction"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"}],"requestBody":{"required":true,"content":{"application/json":{"schema":{"$ref":"#/components/schemas/StartExtractionRequest"}}}},"responses":{"200":{"description":"Extraction started successfully","content":{"application/json":{"schema":{"$ref":"#/components/schemas/StartExtractionResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/extraction/{extractionId}":{"get":{"operationId":"getExtractionResult","summary":"Get extraction result","tags":["Extraction"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"extractionId","in":"path"}],"responses":{"200":{"description":"Extraction started successfully","content":{"application/json":{"schema":{"$ref":"#/components/schemas/ExtractionResultResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/files":{"post":{"operationId":"startFileUpload","summary":"Upload a generic file to the platform","tags":["Files"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"}],"requestBody":{"required":true,"content":{"application/json":{"schema":{"$ref":"#/components/schemas/StartFileUploadRequest"}}}},"responses":{"200":{"description":"File upload started successfully","content":{"application/json":{"schema":{"$ref":"#/components/schemas/StartFileUploadResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}},"/org/{organization}/connectors/sources/{sourceConnectorId}/users":{"post":{"operationId":"addUserToSourceConnector","summary":"Add a user to a source connector","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"sourceConnectorId","in":"path"}],"requestBody":{"required":true,"content":{"application/json":{"schema":{"$ref":"#/components/schemas/AddUserToSourceConnectorRequest"}}}},"responses":{"200":{"description":"User successfully added to the source connector","content":{"application/json":{"schema":{"$ref":"#/components/schemas/AddUserFromSourceConnectorResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}},"patch":{"operationId":"updateUserInSourceConnector","summary":"Update a source connector user","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"sourceConnectorId","in":"path"}],"requestBody":{"required":true,"content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateUserInSourceConnectorRequest"}}}},"responses":{"200":{"description":"User successfully updated in the source connector","content":{"application/json":{"schema":{"$ref":"#/components/schemas/UpdateUserInSourceConnectorResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}},"delete":{"operationId":"deleteUserFromSourceConnector","summary":"Delete a source connector user","tags":["Connectors"],"security":[{"bearerAuth":[]}],"parameters":[{"schema":{"type":"string"},"required":true,"name":"organization","in":"path"},{"schema":{"type":"string"},"required":true,"name":"sourceConnectorId","in":"path"}],"requestBody":{"required":true,"content":{"application/json":{"schema":{"$ref":"#/components/schemas/RemoveUserFromSourceConnectorRequest"}}}},"responses":{"200":{"description":"User successfully removed from the source connector","content":{"application/json":{"schema":{"$ref":"#/components/schemas/RemoveUserFromSourceConnectorResponse"}}}},"400":{"description":"Invalid request","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"401":{"description":"Unauthorized","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"403":{"description":"Forbidden","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"404":{"description":"Not found","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}},"500":{"description":"Internal server error","content":{"application/json":{"schema":{"type":"object","properties":{"error":{"type":"string"},"details":{"type":"string"},"failedUpdates":{"type":"array","items":{"type":"string"}},"successfulUpdates":{"type":"array","items":{"type":"string"}}},"required":["error"]}}}}}}}}} \ No newline at end of file +{ + "openapi": "3.0.0", + "info": { + "title": "Vectorize API (Beta)", + "version": "0.0.1", + "description": "API for Vectorize services", + "contact": { + "name": "Vectorize", + "url": "https://vectorize.io" + } + }, + "servers": [ + { + "url": "https://api.vectorize.io/v1", + "description": "Vectorize API" + } + ], + "components": { + "securitySchemes": { + "bearerAuth": { + "type": "http", + "scheme": "bearer", + "bearerFormat": "JWT" + } + }, + "schemas": { + "CreatePipelineResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "data": { + "type": "object", + "properties": { + "id": { + "type": "string" + } + }, + "required": [ + "id" + ] + } + }, + "required": [ + "message", + "data" + ] + }, + "SourceConnectorType": { + "type": "string", + "enum": [ + "AWS_S3", + "AZURE_BLOB", + "CONFLUENCE", + "DISCORD", + "DROPBOX", + "DROPBOX_OAUTH", + "DROPBOX_OAUTH_MULTI", + "DROPBOX_OAUTH_MULTI_CUSTOM", + "GOOGLE_DRIVE_OAUTH", + "GOOGLE_DRIVE", + "GOOGLE_DRIVE_OAUTH_MULTI", + "GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM", + "FIRECRAWL", + "GCS", + "INTERCOM", + "NOTION", + "NOTION_OAUTH_MULTI", + "NOTION_OAUTH_MULTI_CUSTOM", + "ONE_DRIVE", + "SHAREPOINT", + "WEB_CRAWLER", + "FILE_UPLOAD", + "GITHUB", + "FIREFLIES" + ] + }, + "SourceConnectorSchema": { + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "type": { + "$ref": "#/components/schemas/SourceConnectorType" + }, + "config": { + "type": "object", + "additionalProperties": { + "nullable": true + } + } + }, + "required": [ + "id", + "type", + "config" + ], + "additionalProperties": false + }, + "DestinationConnectorType": { + "type": "string", + "enum": [ + "CAPELLA", + "DATASTAX", + "ELASTIC", + "PINECONE", + "SINGLESTORE", + "MILVUS", + "POSTGRESQL", + "QDRANT", + "SUPABASE", + "WEAVIATE", + "AZUREAISEARCH", + "TURBOPUFFER" + ] + }, + "DestinationConnectorSchema": { + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "type": { + "$ref": "#/components/schemas/DestinationConnectorType" + }, + "config": { + "type": "object", + "additionalProperties": { + "nullable": true + } + } + }, + "required": [ + "id", + "type" + ], + "additionalProperties": false + }, + "AIPlatformType": { + "type": "string", + "enum": [ + "BEDROCK", + "VERTEX", + "OPENAI", + "VOYAGE" + ] + }, + "AIPlatformConfigSchema": { + "type": "object", + "properties": { + "embeddingModel": { + "type": "string", + "enum": [ + "VECTORIZE_OPEN_AI_TEXT_EMBEDDING_2", + "VECTORIZE_OPEN_AI_TEXT_EMBEDDING_3_LARGE", + "VECTORIZE_OPEN_AI_TEXT_EMBEDDING_3_SMALL", + "VECTORIZE_VOYAGE_AI_2", + "VECTORIZE_VOYAGE_AI_3", + "VECTORIZE_VOYAGE_AI_3_LITE", + "VECTORIZE_VOYAGE_AI_3_LARGE", + "VECTORIZE_VOYAGE_AI_FINANCE_2", + "VECTORIZE_VOYAGE_AI_MULTILINGUAL_2", + "VECTORIZE_VOYAGE_AI_LAW_2", + "VECTORIZE_VOYAGE_AI_CODE_2", + "VECTORIZE_TITAN_TEXT_EMBEDDING_2", + "VECTORIZE_TITAN_TEXT_EMBEDDING_1", + "OPEN_AI_TEXT_EMBEDDING_2", + "OPEN_AI_TEXT_EMBEDDING_3_SMALL", + "OPEN_AI_TEXT_EMBEDDING_3_LARGE", + "VOYAGE_AI_2", + "VOYAGE_AI_3", + "VOYAGE_AI_3_LITE", + "VOYAGE_AI_3_LARGE", + "VOYAGE_AI_FINANCE_2", + "VOYAGE_AI_MULTILINGUAL_2", + "VOYAGE_AI_LAW_2", + "VOYAGE_AI_CODE_2", + "TITAN_TEXT_EMBEDDING_1", + "TITAN_TEXT_EMBEDDING_2", + "VERTEX_TEXT_EMBEDDING_4", + "VERTEX_TEXT_EMBEDDING_GECKO_3", + "VERTEX_GECKO_MULTILINGUAL_1", + "VERTEX_MULTILINGUAL_EMBEDDING_2" + ] + }, + "chunkingStrategy": { + "type": "string", + "enum": [ + "FIXED", + "SENTENCE", + "PARAGRAPH", + "MARKDOWN" + ] + }, + "chunkSize": { + "type": "integer", + "minimum": 1 + }, + "chunkOverlap": { + "type": "integer", + "minimum": 0 + }, + "dimensions": { + "type": "integer", + "minimum": 1 + }, + "extractionStrategy": { + "type": "string", + "enum": [ + "FAST", + "IRIS", + "MIXED" + ] + } + }, + "additionalProperties": false + }, + "AIPlatformSchema": { + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uuid" + }, + "type": { + "$ref": "#/components/schemas/AIPlatformType" + }, + "config": { + "$ref": "#/components/schemas/AIPlatformConfigSchema" + } + }, + "required": [ + "id", + "type", + "config" + ], + "additionalProperties": false + }, + "ScheduleSchemaType": { + "type": "string", + "enum": [ + "manual", + "realtime", + "custom" + ] + }, + "ScheduleSchema": { + "type": "object", + "properties": { + "type": { + "$ref": "#/components/schemas/ScheduleSchemaType" + } + }, + "required": [ + "type" + ] + }, + "PipelineConfigurationSchema": { + "type": "object", + "properties": { + "sourceConnectors": { + "$ref": "#/components/schemas/PipelineSourceConnectorRequest" + }, + "destinationConnector": { + "$ref": "#/components/schemas/PipelineDestinationConnectorRequest" + }, + "aiPlatform": { + "$ref": "#/components/schemas/PipelineAIPlatformRequest" + }, + "pipelineName": { + "type": "string", + "minLength": 1 + }, + "schedule": { + "$ref": "#/components/schemas/ScheduleSchema" + } + }, + "required": [ + "sourceConnectors", + "destinationConnector", + "aiPlatform", + "pipelineName", + "schedule" + ], + "additionalProperties": false + }, + "PipelineListSummary": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": "string" + }, + "documentCount": { + "type": "number" + }, + "sourceConnectorAuthIds": { + "type": "array", + "items": { + "type": "string" + } + }, + "destinationConnectorAuthIds": { + "type": "array", + "items": { + "type": "string" + } + }, + "aiPlatformAuthIds": { + "type": "array", + "items": { + "type": "string" + } + }, + "sourceConnectorTypes": { + "type": "array", + "items": { + "type": "string" + } + }, + "destinationConnectorTypes": { + "type": "array", + "items": { + "type": "string" + } + }, + "aiPlatformTypes": { + "type": "array", + "items": { + "type": "string" + } + }, + "createdAt": { + "type": "string", + "nullable": true + }, + "createdBy": { + "type": "string" + }, + "status": { + "type": "string" + }, + "configDoc": { + "type": "object", + "additionalProperties": { + "nullable": true + } + } + }, + "required": [ + "id", + "name", + "documentCount", + "sourceConnectorAuthIds", + "destinationConnectorAuthIds", + "aiPlatformAuthIds", + "sourceConnectorTypes", + "destinationConnectorTypes", + "aiPlatformTypes", + "createdAt", + "createdBy" + ] + }, + "GetPipelinesResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "$ref": "#/components/schemas/PipelineListSummary" + } + } + }, + "required": [ + "message", + "data" + ] + }, + "SourceConnector": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "name": { + "type": "string" + }, + "configDoc": { + "type": "object", + "additionalProperties": { + "nullable": true + } + }, + "createdAt": { + "type": "string", + "nullable": true + }, + "createdById": { + "type": "string" + }, + "lastUpdatedById": { + "type": "string" + }, + "createdByEmail": { + "type": "string" + }, + "lastUpdatedByEmail": { + "type": "string" + }, + "errorMessage": { + "type": "string" + }, + "verificationStatus": { + "type": "string" + } + }, + "required": [ + "id", + "type", + "name" + ] + }, + "DestinationConnector": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "name": { + "type": "string" + }, + "configDoc": { + "type": "object", + "additionalProperties": { + "nullable": true + } + }, + "createdAt": { + "type": "string", + "nullable": true + }, + "createdById": { + "type": "string" + }, + "lastUpdatedById": { + "type": "string" + }, + "createdByEmail": { + "type": "string" + }, + "lastUpdatedByEmail": { + "type": "string" + }, + "errorMessage": { + "type": "string" + }, + "verificationStatus": { + "type": "string" + } + }, + "required": [ + "id", + "type", + "name" + ] + }, + "AIPlatform": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "name": { + "type": "string" + }, + "configDoc": { + "type": "object", + "additionalProperties": { + "nullable": true + } + }, + "createdAt": { + "type": "string", + "nullable": true + }, + "createdById": { + "type": "string" + }, + "lastUpdatedById": { + "type": "string" + }, + "createdByEmail": { + "type": "string" + }, + "lastUpdatedByEmail": { + "type": "string" + }, + "errorMessage": { + "type": "string" + }, + "verificationStatus": { + "type": "string" + } + }, + "required": [ + "id", + "type", + "name" + ] + }, + "PipelineSummary": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": "string" + }, + "documentCount": { + "type": "number" + }, + "sourceConnectorAuthIds": { + "type": "array", + "items": { + "type": "string" + } + }, + "destinationConnectorAuthIds": { + "type": "array", + "items": { + "type": "string" + } + }, + "aiPlatformAuthIds": { + "type": "array", + "items": { + "type": "string" + } + }, + "sourceConnectorTypes": { + "type": "array", + "items": { + "type": "string" + } + }, + "destinationConnectorTypes": { + "type": "array", + "items": { + "type": "string" + } + }, + "aiPlatformTypes": { + "type": "array", + "items": { + "type": "string" + } + }, + "createdAt": { + "type": "string", + "nullable": true + }, + "createdBy": { + "type": "string" + }, + "status": { + "type": "string" + }, + "configDoc": { + "type": "object", + "additionalProperties": { + "nullable": true + } + }, + "sourceConnectors": { + "type": "array", + "items": { + "$ref": "#/components/schemas/SourceConnector" + } + }, + "destinationConnectors": { + "type": "array", + "items": { + "$ref": "#/components/schemas/DestinationConnector" + } + }, + "aiPlatforms": { + "type": "array", + "items": { + "$ref": "#/components/schemas/AIPlatform" + } + } + }, + "required": [ + "id", + "name", + "documentCount", + "sourceConnectorAuthIds", + "destinationConnectorAuthIds", + "aiPlatformAuthIds", + "sourceConnectorTypes", + "destinationConnectorTypes", + "aiPlatformTypes", + "createdAt", + "createdBy", + "sourceConnectors", + "destinationConnectors", + "aiPlatforms" + ] + }, + "GetPipelineResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "data": { + "$ref": "#/components/schemas/PipelineSummary" + } + }, + "required": [ + "message", + "data" + ] + }, + "DeletePipelineResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + } + }, + "required": [ + "message" + ] + }, + "PipelineEvents": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "timestamp": { + "type": "string", + "nullable": true + }, + "details": { + "type": "object", + "additionalProperties": { + "nullable": true + } + }, + "summary": { + "type": "object", + "additionalProperties": { + "nullable": true + } + } + }, + "required": [ + "id", + "type", + "timestamp" + ] + }, + "GetPipelineEventsResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "nextToken": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "$ref": "#/components/schemas/PipelineEvents" + } + } + }, + "required": [ + "message", + "data" + ] + }, + "PipelineMetrics": { + "type": "object", + "properties": { + "timestamp": { + "type": "string", + "nullable": true + }, + "newObjects": { + "type": "number" + }, + "changedObjects": { + "type": "number" + }, + "deletedObjects": { + "type": "number" + } + }, + "required": [ + "timestamp", + "newObjects", + "changedObjects", + "deletedObjects" + ] + }, + "GetPipelineMetricsResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "data": { + "type": "array", + "items": { + "$ref": "#/components/schemas/PipelineMetrics" + } + } + }, + "required": [ + "message", + "data" + ] + }, + "Document": { + "type": "object", + "properties": { + "relevancy": { + "type": "number" + }, + "id": { + "type": "string" + }, + "text": { + "type": "string" + }, + "chunk_id": { + "type": "string" + }, + "total_chunks": { + "type": "string" + }, + "origin": { + "type": "string" + }, + "origin_id": { + "type": "string" + }, + "similarity": { + "type": "number" + }, + "source": { + "type": "string" + }, + "unique_source": { + "type": "string" + }, + "source_display_name": { + "type": "string" + }, + "pipeline_id": { + "type": "string" + }, + "org_id": { + "type": "string" + } + }, + "required": [ + "relevancy", + "id", + "text", + "chunk_id", + "total_chunks", + "origin", + "origin_id", + "similarity", + "source", + "unique_source", + "source_display_name" + ], + "additionalProperties": true + }, + "RetrieveDocumentsResponse": { + "type": "object", + "properties": { + "question": { + "type": "string" + }, + "documents": { + "type": "array", + "items": { + "$ref": "#/components/schemas/Document" + } + }, + "average_relevancy": { + "type": "number" + }, + "ndcg": { + "type": "number" + } + }, + "required": [ + "question", + "documents", + "average_relevancy", + "ndcg" + ] + }, + "RetrieveContextMessage": { + "type": "object", + "properties": { + "role": { + "type": "string" + }, + "content": { + "type": "string" + } + }, + "required": [ + "role", + "content" + ] + }, + "RetrieveContext": { + "type": "object", + "properties": { + "messages": { + "type": "array", + "items": { + "$ref": "#/components/schemas/RetrieveContextMessage" + } + } + }, + "required": [ + "messages" + ] + }, + "AdvancedQuery": { + "type": "object", + "properties": { + "mode": { + "type": "string", + "enum": ["text", "vector", "hybrid"], + "description": "Search mode: 'text', 'vector', or 'hybrid'. Defaults to 'vector' if not specified." + }, + "text-fields": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Fields to perform text search on." + }, + "match-type": { + "type": "string", + "enum": ["match", "match_phrase", "multi_match"], + "description": "Type of text match to perform." + }, + "text-boost": { + "type": "number", + "format": "float", + "description": "Multiplier for text search scores." + }, + "filters": { + "type": "object", + "description": "Elasticsearch-compatible filter object." + } + }, + "description": "Advanced query parameters for enhanced search capabilities." +}, + "RetrieveDocumentsRequest": { + "type": "object", + "properties": { + "question": { + "type": "string" + }, + "numResults": { + "type": "number", + "minimum": 1 + }, + "rerank": { + "type": "boolean", + "default": true + }, + "metadata-filters": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": { + "nullable": true + } + } + }, + "context": { + "$ref": "#/components/schemas/RetrieveContext" + }, + "advanced-query": { + "$ref": "#/components/schemas/AdvancedQuery" + } + }, + "required": [ + "question", + "numResults" + ] + }, + "StartPipelineResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + } + }, + "required": [ + "message" + ] + }, + "StopPipelineResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + } + }, + "required": [ + "message" + ] + }, + "StartDeepResearchResponse": { + "type": "object", + "properties": { + "researchId": { + "type": "string" + } + }, + "required": [ + "researchId" + ] + }, + "N8NConfig": { + "type": "object", + "properties": { + "account": { + "type": "string" + }, + "webhookPath": { + "type": "string" + }, + "headers": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "required": [ + "account", + "webhookPath" + ] + }, + "StartDeepResearchRequest": { + "type": "object", + "properties": { + "query": { + "type": "string" + }, + "webSearch": { + "type": "boolean", + "default": false + }, + "schema": { + "type": "string" + }, + "n8n": { + "$ref": "#/components/schemas/N8NConfig" + } + }, + "required": [ + "query" + ] + }, + "DeepResearchResult": { + "type": "object", + "properties": { + "success": { + "type": "boolean" + }, + "events": { + "type": "array", + "items": { + "type": "string" + } + }, + "markdown": { + "type": "string" + }, + "error": { + "type": "string" + } + }, + "required": [ + "success" + ] + }, + "GetDeepResearchResponse": { + "type": "object", + "properties": { + "ready": { + "type": "boolean" + }, + "data": { + "$ref": "#/components/schemas/DeepResearchResult" + } + }, + "required": [ + "ready" + ] + }, + "CreatedSourceConnector": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "id": { + "type": "string" + } + }, + "required": [ + "name", + "id" + ] + }, + "CreateSourceConnectorResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "connectors": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CreatedSourceConnector" + } + } + }, + "required": [ + "message", + "connectors" + ] + }, + "CreateSourceConnector": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "type": { + "$ref": "#/components/schemas/SourceConnectorType" + }, + "config": { + "type": "object", + "additionalProperties": { + "nullable": true + } + } + }, + "required": [ + "name", + "type" + ] + }, + "CreateSourceConnectorRequest": { + "type": "array", + "minItems": 1, + "items": { + "oneOf": [ + { + "type": "object", + "title": "Amazon S3", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "AWS_S3" + ], + "description": "Connector type (must be \"AWS_S3\")" + }, + "config": { + "$ref": "#/components/schemas/AWS_S3Config" + } + }, + "example": { + "name": "Amazon S3 Example", + "type": "AWS_S3", + "config": { + "access-key": "Enter Access Key", + "secret-key": "Enter Secret Key", + "bucket-name": "Enter your S3 Bucket Name", + "region": "Region Name", + "archiver": false, + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "idle-time": 5, + "path-prefix": "example-path-prefix" + } + } + }, + { + "type": "object", + "title": "Azure Blob Storage", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "AZURE_BLOB" + ], + "description": "Connector type (must be \"AZURE_BLOB\")" + }, + "config": { + "$ref": "#/components/schemas/AZURE_BLOBConfig" + } + }, + "example": { + "name": "Azure Blob Storage Example", + "type": "AZURE_BLOB", + "config": { + "storage-account-name": "Enter Storage Account Name", + "storage-account-key": "Enter Storage Account Key", + "container": "Enter Container Name", + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "idle-time": 5, + "path-prefix": "example-path-prefix" + } + } + }, + { + "type": "object", + "title": "Confluence", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "CONFLUENCE" + ], + "description": "Connector type (must be \"CONFLUENCE\")" + }, + "config": { + "$ref": "#/components/schemas/CONFLUENCEConfig" + } + }, + "example": { + "name": "Confluence Example", + "type": "CONFLUENCE", + "config": { + "username": "Enter your Confluence username", + "api-token": "Enter your Confluence API token", + "domain": "Enter your Confluence domain (e.g. my-domain.atlassian.net or confluence..com)", + "spaces": "Spaces to include (name, key or id)" + } + } + }, + { + "type": "object", + "title": "Discord", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "DISCORD" + ], + "description": "Connector type (must be \"DISCORD\")" + }, + "config": { + "$ref": "#/components/schemas/DISCORDConfig" + } + }, + "example": { + "name": "Discord Example", + "type": "DISCORD", + "config": { + "server-id": "Enter Server ID", + "bot-token": "Enter Token", + "channel-ids": "Enter channel ID", + "limit": 10000, + "thread-message-inclusion": "ALL", + "filter-logic": "AND", + "thread-message-mode": "CONCATENATE" + } + } + }, + { + "type": "object", + "title": "Dropbox", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "DROPBOX" + ], + "description": "Connector type (must be \"DROPBOX\")" + }, + "config": { + "$ref": "#/components/schemas/DROPBOXConfig" + } + }, + "example": { + "name": "Dropbox (Legacy) Example", + "type": "DROPBOX", + "config": { + "path-prefix": "Enter Path: /exampleFolder/subFolder" + } + } + }, + { + "type": "object", + "title": "Dropbox Oauth", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "DROPBOX_OAUTH" + ], + "description": "Connector type (must be \"DROPBOX_OAUTH\")" + }, + "config": { + "$ref": "#/components/schemas/DROPBOX_OAUTHAuthConfig" + } + }, + "example": { + "name": "Dropbox OAuth Example", + "type": "DROPBOX_OAUTH", + "config": {} + } + }, + { + "type": "object", + "title": "Dropbox Oauth Multi", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "DROPBOX_OAUTH_MULTI" + ], + "description": "Connector type (must be \"DROPBOX_OAUTH_MULTI\")" + }, + "config": { + "$ref": "#/components/schemas/DROPBOX_OAUTH_MULTIAuthConfig" + } + }, + "example": { + "name": "Dropbox Multi-User (Vectorize) Example", + "type": "DROPBOX_OAUTH_MULTI", + "config": {} + } + }, + { + "type": "object", + "title": "Dropbox Oauth Multi Custom", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "DROPBOX_OAUTH_MULTI_CUSTOM" + ], + "description": "Connector type (must be \"DROPBOX_OAUTH_MULTI_CUSTOM\")" + }, + "config": { + "$ref": "#/components/schemas/DROPBOX_OAUTH_MULTI_CUSTOMAuthConfig" + } + }, + "example": { + "name": "Dropbox Multi-User (White Label) Example", + "type": "DROPBOX_OAUTH_MULTI_CUSTOM", + "config": { + "app-key": "Enter App Key", + "app-secret": "Enter App Secret" + } + } + }, + { + "type": "object", + "title": "Google Drive OAuth", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "GOOGLE_DRIVE_OAUTH" + ], + "description": "Connector type (must be \"GOOGLE_DRIVE_OAUTH\")" + }, + "config": { + "$ref": "#/components/schemas/GOOGLE_DRIVE_OAUTHConfig" + } + }, + "example": { + "name": "Google Drive OAuth Example", + "type": "GOOGLE_DRIVE_OAUTH", + "config": { + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "idle-time": 5 + } + } + }, + { + "type": "object", + "title": "Google Drive", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "GOOGLE_DRIVE" + ], + "description": "Connector type (must be \"GOOGLE_DRIVE\")" + }, + "config": { + "$ref": "#/components/schemas/GOOGLE_DRIVEConfig" + } + }, + "example": { + "name": "Google Drive (Service Account) Example", + "type": "GOOGLE_DRIVE", + "config": { + "service-account-json": "Enter the JSON key file for the service account", + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "idle-time": 5 + } + } + }, + { + "type": "object", + "title": "Google Drive Oauth Multi", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "GOOGLE_DRIVE_OAUTH_MULTI" + ], + "description": "Connector type (must be \"GOOGLE_DRIVE_OAUTH_MULTI\")" + }, + "config": { + "$ref": "#/components/schemas/GOOGLE_DRIVE_OAUTH_MULTIConfig" + } + }, + "example": { + "name": "Google Drive Multi-User (Vectorize) Example", + "type": "GOOGLE_DRIVE_OAUTH_MULTI", + "config": { + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "idle-time": 5 + } + } + }, + { + "type": "object", + "title": "Google Drive Oauth Multi Custom", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM" + ], + "description": "Connector type (must be \"GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM\")" + }, + "config": { + "$ref": "#/components/schemas/GOOGLE_DRIVE_OAUTH_MULTI_CUSTOMConfig" + } + }, + "example": { + "name": "Google Drive Multi-User (White Label) Example", + "type": "GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM", + "config": { + "oauth2-client-id": "Enter Client Id", + "oauth2-client-secret": "Enter Client Secret", + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "idle-time": 5 + } + } + }, + { + "type": "object", + "title": "Firecrawl", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "FIRECRAWL" + ], + "description": "Connector type (must be \"FIRECRAWL\")" + }, + "config": { + "$ref": "#/components/schemas/FIRECRAWLConfig" + } + }, + "example": { + "name": "Firecrawl Example", + "type": "FIRECRAWL", + "config": { + "api-key": "Enter your Firecrawl API Key", + "endpoint": "Crawl", + "request": "{ \n \"url\": \"https://docs.vectorize.io/\",\n \"maxDepth\": 25,\n \"limit\": 100\n }" + } + } + }, + { + "type": "object", + "title": "Google Cloud Storage", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "GCS" + ], + "description": "Connector type (must be \"GCS\")" + }, + "config": { + "$ref": "#/components/schemas/GCSConfig" + } + }, + "example": { + "name": "GCP Cloud Storage Example", + "type": "GCS", + "config": { + "service-account-json": "Enter the JSON key file for the service account", + "bucket-name": "Enter bucket name", + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "idle-time": 5, + "path-prefix": "example-path-prefix" + } + } + }, + { + "type": "object", + "title": "Intercom", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "INTERCOM" + ], + "description": "Connector type (must be \"INTERCOM\")" + }, + "config": { + "$ref": "#/components/schemas/INTERCOMConfig" + } + }, + "example": { + "name": "Intercom Example", + "type": "INTERCOM", + "config": { + "created_at": "2025-06-12", + "state": "all" + } + } + }, + { + "type": "object", + "title": "Notion", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "NOTION" + ], + "description": "Connector type (must be \"NOTION\")" + }, + "config": { + "$ref": "#/components/schemas/NOTIONConfig" + } + }, + "example": { + "name": "Notion Example", + "type": "NOTION", + "config": { + "select-resources": "example-select-resources" + } + } + }, + { + "type": "object", + "title": "Notion Oauth Multi", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "NOTION_OAUTH_MULTI" + ], + "description": "Connector type (must be \"NOTION_OAUTH_MULTI\")" + }, + "config": { + "$ref": "#/components/schemas/NOTION_OAUTH_MULTIAuthConfig" + } + }, + "example": { + "name": "Notion Multi-User (Vectorize) Example", + "type": "NOTION_OAUTH_MULTI", + "config": {} + } + }, + { + "type": "object", + "title": "Notion Oauth Multi Custom", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "NOTION_OAUTH_MULTI_CUSTOM" + ], + "description": "Connector type (must be \"NOTION_OAUTH_MULTI_CUSTOM\")" + }, + "config": { + "$ref": "#/components/schemas/NOTION_OAUTH_MULTI_CUSTOMAuthConfig" + } + }, + "example": { + "name": "Notion Multi-User (White Label) Example", + "type": "NOTION_OAUTH_MULTI_CUSTOM", + "config": { + "client-id": "Enter Client ID", + "client-secret": "Enter Client Secret" + } + } + }, + { + "type": "object", + "title": "OneDrive", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "ONE_DRIVE" + ], + "description": "Connector type (must be \"ONE_DRIVE\")" + }, + "config": { + "$ref": "#/components/schemas/ONE_DRIVEConfig" + } + }, + "example": { + "name": "OneDrive Example", + "type": "ONE_DRIVE", + "config": { + "ms-client-id": "Enter Client Id", + "ms-tenant-id": "Enter Tenant Id", + "ms-client-secret": "Enter Client Secret", + "users": "Enter users emails to import files from. Example: developer@vectorize.io", + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "path-prefix": "Enter Folder path: /exampleFolder/subFolder" + } + } + }, + { + "type": "object", + "title": "Sharepoint", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "SHAREPOINT" + ], + "description": "Connector type (must be \"SHAREPOINT\")" + }, + "config": { + "$ref": "#/components/schemas/SHAREPOINTConfig" + } + }, + "example": { + "name": "SharePoint Example", + "type": "SHAREPOINT", + "config": { + "ms-client-id": "Enter Client Id", + "ms-tenant-id": "Enter Tenant Id", + "ms-client-secret": "Enter Client Secret", + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,json,csv,jpg,jpeg,png,webp,svg,gif" + } + } + }, + { + "type": "object", + "title": "Web Crawler", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "WEB_CRAWLER" + ], + "description": "Connector type (must be \"WEB_CRAWLER\")" + }, + "config": { + "$ref": "#/components/schemas/WEB_CRAWLERConfig" + } + }, + "example": { + "name": "Web Crawler Example", + "type": "WEB_CRAWLER", + "config": { + "seed-urls": "(e.g. https://example.com)", + "min-time-between-requests": 500, + "max-error-count": 5, + "max-urls": 1000, + "max-depth": 50, + "reindex-interval-seconds": 3600 + } + } + }, + { + "type": "object", + "title": "File Upload", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "FILE_UPLOAD" + ], + "description": "Connector type (must be \"FILE_UPLOAD\")" + }, + "config": { + "$ref": "#/components/schemas/FILE_UPLOADAuthConfig" + } + }, + "example": { + "name": "File Upload Example", + "type": "FILE_UPLOAD", + "config": {} + } + }, + { + "type": "object", + "title": "Github", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "GITHUB" + ], + "description": "Connector type (must be \"GITHUB\")" + }, + "config": { + "$ref": "#/components/schemas/GITHUBConfig" + } + }, + "example": { + "name": "GitHub Example", + "type": "GITHUB", + "config": { + "oauth-token": "Enter your GitHub personal access token", + "repositories": "Example: owner1/repo1", + "include-pull-requests": true, + "pull-request-status": "all", + "include-issues": true, + "issue-status": "all", + "max-items": 1000 + } + } + }, + { + "type": "object", + "title": "Fireflies", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "FIREFLIES" + ], + "description": "Connector type (must be \"FIREFLIES\")" + }, + "config": { + "$ref": "#/components/schemas/FIREFLIESConfig" + } + }, + "example": { + "name": "Fireflies.ai Example", + "type": "FIREFLIES", + "config": { + "api-key": "Enter your Fireflies.ai API key", + "start-date": "2025-06-12", + "max-meetings": -1 + } + } + } + ] + } + }, + "UpdateSourceConnectorResponseData": { + "type": "object", + "properties": { + "updatedConnector": { + "$ref": "#/components/schemas/SourceConnector" + }, + "pipelineIds": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "updatedConnector" + ] + }, + "UpdateSourceConnectorResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "data": { + "$ref": "#/components/schemas/UpdateSourceConnectorResponseData" + } + }, + "required": [ + "message", + "data" + ] + }, + "UpdateSourceConnectorRequest": { + "oneOf": [ + { + "type": "object", + "title": "Amazon S3", + "properties": { + "config": { + "$ref": "#/components/schemas/AWS_S3Config" + } + }, + "example": { + "config": { + "access-key": "Enter Access Key", + "secret-key": "Enter Secret Key", + "bucket-name": "Enter your S3 Bucket Name", + "region": "Region Name", + "archiver": false, + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "idle-time": 5, + "path-prefix": "example-path-prefix" + } + } + }, + { + "type": "object", + "title": "Azure Blob Storage", + "properties": { + "config": { + "$ref": "#/components/schemas/AZURE_BLOBConfig" + } + }, + "example": { + "config": { + "storage-account-name": "Enter Storage Account Name", + "storage-account-key": "Enter Storage Account Key", + "container": "Enter Container Name", + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "idle-time": 5, + "path-prefix": "example-path-prefix" + } + } + }, + { + "type": "object", + "title": "Confluence", + "properties": { + "config": { + "$ref": "#/components/schemas/CONFLUENCEConfig" + } + }, + "example": { + "config": { + "username": "Enter your Confluence username", + "api-token": "Enter your Confluence API token", + "domain": "Enter your Confluence domain (e.g. my-domain.atlassian.net or confluence..com)", + "spaces": "Spaces to include (name, key or id)" + } + } + }, + { + "type": "object", + "title": "Discord", + "properties": { + "config": { + "$ref": "#/components/schemas/DISCORDConfig" + } + }, + "example": { + "config": { + "server-id": "Enter Server ID", + "bot-token": "Enter Token", + "channel-ids": "Enter channel ID", + "limit": 10000, + "thread-message-inclusion": "ALL", + "filter-logic": "AND", + "thread-message-mode": "CONCATENATE" + } + } + }, + { + "type": "object", + "title": "Dropbox", + "properties": { + "config": { + "$ref": "#/components/schemas/DROPBOXConfig" + } + }, + "example": { + "config": { + "path-prefix": "Enter Path: /exampleFolder/subFolder" + } + } + }, + { + "type": "object", + "title": "Dropbox Oauth", + "properties": { + "config": { + "$ref": "#/components/schemas/DROPBOX_OAUTHAuthConfig" + } + }, + "example": { + "config": {} + } + }, + { + "type": "object", + "title": "Dropbox Oauth Multi", + "properties": { + "config": { + "$ref": "#/components/schemas/DROPBOX_OAUTH_MULTIAuthConfig" + } + }, + "example": { + "config": {} + } + }, + { + "type": "object", + "title": "Dropbox Oauth Multi Custom", + "properties": { + "config": { + "$ref": "#/components/schemas/DROPBOX_OAUTH_MULTI_CUSTOMAuthConfig" + } + }, + "example": { + "config": { + "app-key": "Enter App Key", + "app-secret": "Enter App Secret" + } + } + }, + { + "type": "object", + "title": "Google Drive OAuth", + "properties": { + "config": { + "$ref": "#/components/schemas/GOOGLE_DRIVE_OAUTHConfig" + } + }, + "example": { + "config": { + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "idle-time": 5 + } + } + }, + { + "type": "object", + "title": "Google Drive", + "properties": { + "config": { + "$ref": "#/components/schemas/GOOGLE_DRIVEConfig" + } + }, + "example": { + "config": { + "service-account-json": "Enter the JSON key file for the service account", + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "idle-time": 5 + } + } + }, + { + "type": "object", + "title": "Google Drive Oauth Multi", + "properties": { + "config": { + "$ref": "#/components/schemas/GOOGLE_DRIVE_OAUTH_MULTIConfig" + } + }, + "example": { + "config": { + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "idle-time": 5 + } + } + }, + { + "type": "object", + "title": "Google Drive Oauth Multi Custom", + "properties": { + "config": { + "$ref": "#/components/schemas/GOOGLE_DRIVE_OAUTH_MULTI_CUSTOMConfig" + } + }, + "example": { + "config": { + "oauth2-client-id": "Enter Client Id", + "oauth2-client-secret": "Enter Client Secret", + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "idle-time": 5 + } + } + }, + { + "type": "object", + "title": "Firecrawl", + "properties": { + "config": { + "$ref": "#/components/schemas/FIRECRAWLConfig" + } + }, + "example": { + "config": { + "api-key": "Enter your Firecrawl API Key", + "endpoint": "Crawl", + "request": "{ \n \"url\": \"https://docs.vectorize.io/\",\n \"maxDepth\": 25,\n \"limit\": 100\n }" + } + } + }, + { + "type": "object", + "title": "Google Cloud Storage", + "properties": { + "config": { + "$ref": "#/components/schemas/GCSConfig" + } + }, + "example": { + "config": { + "service-account-json": "Enter the JSON key file for the service account", + "bucket-name": "Enter bucket name", + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "idle-time": 5, + "path-prefix": "example-path-prefix" + } + } + }, + { + "type": "object", + "title": "Intercom", + "properties": { + "config": { + "$ref": "#/components/schemas/INTERCOMConfig" + } + }, + "example": { + "config": { + "created_at": "2025-06-12", + "state": "all" + } + } + }, + { + "type": "object", + "title": "Notion", + "properties": { + "config": { + "$ref": "#/components/schemas/NOTIONConfig" + } + }, + "example": { + "config": { + "select-resources": "example-select-resources" + } + } + }, + { + "type": "object", + "title": "Notion Oauth Multi", + "properties": { + "config": { + "$ref": "#/components/schemas/NOTION_OAUTH_MULTIAuthConfig" + } + }, + "example": { + "config": {} + } + }, + { + "type": "object", + "title": "Notion Oauth Multi Custom", + "properties": { + "config": { + "$ref": "#/components/schemas/NOTION_OAUTH_MULTI_CUSTOMAuthConfig" + } + }, + "example": { + "config": { + "client-id": "Enter Client ID", + "client-secret": "Enter Client Secret" + } + } + }, + { + "type": "object", + "title": "OneDrive", + "properties": { + "config": { + "$ref": "#/components/schemas/ONE_DRIVEConfig" + } + }, + "example": { + "config": { + "ms-client-id": "Enter Client Id", + "ms-tenant-id": "Enter Tenant Id", + "ms-client-secret": "Enter Client Secret", + "users": "Enter users emails to import files from. Example: developer@vectorize.io", + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "path-prefix": "Enter Folder path: /exampleFolder/subFolder" + } + } + }, + { + "type": "object", + "title": "Sharepoint", + "properties": { + "config": { + "$ref": "#/components/schemas/SHAREPOINTConfig" + } + }, + "example": { + "config": { + "ms-client-id": "Enter Client Id", + "ms-tenant-id": "Enter Tenant Id", + "ms-client-secret": "Enter Client Secret", + "file-extensions": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,json,csv,jpg,jpeg,png,webp,svg,gif" + } + } + }, + { + "type": "object", + "title": "Web Crawler", + "properties": { + "config": { + "$ref": "#/components/schemas/WEB_CRAWLERConfig" + } + }, + "example": { + "config": { + "seed-urls": "(e.g. https://example.com)", + "min-time-between-requests": 500, + "max-error-count": 5, + "max-urls": 1000, + "max-depth": 50, + "reindex-interval-seconds": 3600 + } + } + }, + { + "type": "object", + "title": "File Upload", + "properties": { + "config": { + "$ref": "#/components/schemas/FILE_UPLOADAuthConfig" + } + }, + "example": { + "config": {} + } + }, + { + "type": "object", + "title": "Github", + "properties": { + "config": { + "$ref": "#/components/schemas/GITHUBConfig" + } + }, + "example": { + "config": { + "oauth-token": "Enter your GitHub personal access token", + "repositories": "Example: owner1/repo1", + "include-pull-requests": true, + "pull-request-status": "all", + "include-issues": true, + "issue-status": "all", + "max-items": 1000 + } + } + }, + { + "type": "object", + "title": "Fireflies", + "properties": { + "config": { + "$ref": "#/components/schemas/FIREFLIESConfig" + } + }, + "example": { + "config": { + "api-key": "Enter your Fireflies.ai API key", + "start-date": "2025-06-12", + "max-meetings": -1 + } + } + } + ] + }, + "DeleteSourceConnectorResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + } + }, + "required": [ + "message" + ] + }, + "CreatedDestinationConnector": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "id": { + "type": "string" + } + }, + "required": [ + "name", + "id" + ] + }, + "CreateDestinationConnectorResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "connectors": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CreatedDestinationConnector" + } + } + }, + "required": [ + "message", + "connectors" + ] + }, + "CreateDestinationConnector": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "type": { + "$ref": "#/components/schemas/DestinationConnectorType" + }, + "config": { + "type": "object", + "additionalProperties": { + "nullable": true + } + } + }, + "required": [ + "name", + "type" + ] + }, + "CreateDestinationConnectorRequest": { + "type": "array", + "minItems": 1, + "items": { + "oneOf": [ + { + "type": "object", + "title": "Capella", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "CAPELLA" + ], + "description": "Connector type (must be \"CAPELLA\")" + }, + "config": { + "$ref": "#/components/schemas/CAPELLAConfig" + } + }, + "example": { + "name": "Couchbase Capella Example", + "type": "CAPELLA", + "config": { + "username": "Enter your cluster access name", + "password": "Enter your cluster access password", + "connection-string": "Enter your connection string", + "bucket": "Enter bucket name", + "scope": "Enter scope name", + "collection": "Enter collection name", + "index": "Enter search index name" + } + } + }, + { + "type": "object", + "title": "Datastax", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "DATASTAX" + ], + "description": "Connector type (must be \"DATASTAX\")" + }, + "config": { + "$ref": "#/components/schemas/DATASTAXConfig" + } + }, + "example": { + "name": "DataStax Astra Example", + "type": "DATASTAX", + "config": { + "endpoint_secret": "Enter your API endpoint", + "token": "Enter your application token", + "collection": "Enter collection name" + } + } + }, + { + "type": "object", + "title": "Elastic", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "ELASTIC" + ], + "description": "Connector type (must be \"ELASTIC\")" + }, + "config": { + "$ref": "#/components/schemas/ELASTICConfig" + } + }, + "example": { + "name": "Elasticsearch Example", + "type": "ELASTIC", + "config": { + "host": "Enter your host", + "port": "Enter your port", + "api-key": "Enter your API key", + "index": "Enter index name" + } + } + }, + { + "type": "object", + "title": "Pinecone", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "PINECONE" + ], + "description": "Connector type (must be \"PINECONE\")" + }, + "config": { + "$ref": "#/components/schemas/PINECONEConfig" + } + }, + "example": { + "name": "Pinecone Example", + "type": "PINECONE", + "config": { + "api-key": "Enter your API Key", + "index": "Enter index name" + } + } + }, + { + "type": "object", + "title": "Singlestore", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "SINGLESTORE" + ], + "description": "Connector type (must be \"SINGLESTORE\")" + }, + "config": { + "$ref": "#/components/schemas/SINGLESTOREConfig" + } + }, + "example": { + "name": "SingleStore Example", + "type": "SINGLESTORE", + "config": { + "host": "Enter the host of the deployment", + "port": 100, + "database": "Enter the database name", + "username": "Enter the username", + "password": "Enter the username's password", + "table": "Enter table name" + } + } + }, + { + "type": "object", + "title": "Milvus", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "MILVUS" + ], + "description": "Connector type (must be \"MILVUS\")" + }, + "config": { + "$ref": "#/components/schemas/MILVUSConfig" + } + }, + "example": { + "name": "Milvus Example", + "type": "MILVUS", + "config": { + "url": "Enter your public endpoint for your Milvus cluster", + "collection": "Enter collection name" + } + } + }, + { + "type": "object", + "title": "Postgresql", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "POSTGRESQL" + ], + "description": "Connector type (must be \"POSTGRESQL\")" + }, + "config": { + "$ref": "#/components/schemas/POSTGRESQLConfig" + } + }, + "example": { + "name": "PostgreSQL Example", + "type": "POSTGRESQL", + "config": { + "host": "Enter the host of the deployment", + "port": 5432, + "database": "Enter the database name", + "username": "Enter the username", + "password": "Enter the username's password", + "table": "Enter or .
" + } + } + }, + { + "type": "object", + "title": "Qdrant", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "QDRANT" + ], + "description": "Connector type (must be \"QDRANT\")" + }, + "config": { + "$ref": "#/components/schemas/QDRANTConfig" + } + }, + "example": { + "name": "Qdrant Example", + "type": "QDRANT", + "config": { + "host": "Enter your host", + "api-key": "Enter your API key", + "collection": "Enter collection name" + } + } + }, + { + "type": "object", + "title": "Supabase", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "SUPABASE" + ], + "description": "Connector type (must be \"SUPABASE\")" + }, + "config": { + "$ref": "#/components/schemas/SUPABASEConfig" + } + }, + "example": { + "name": "Supabase Example", + "type": "SUPABASE", + "config": { + "host": "aws-0-us-east-1.pooler.supabase.com", + "port": 5432, + "database": "Enter the database name", + "username": "Enter the username", + "password": "Enter the username's password", + "table": "Enter
or .
" + } + } + }, + { + "type": "object", + "title": "Weaviate", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "WEAVIATE" + ], + "description": "Connector type (must be \"WEAVIATE\")" + }, + "config": { + "$ref": "#/components/schemas/WEAVIATEConfig" + } + }, + "example": { + "name": "Weaviate Example", + "type": "WEAVIATE", + "config": { + "host": "Enter your Weaviate Cluster REST Endpoint", + "api-key": "Enter your API key", + "collection": "Enter collection name" + } + } + }, + { + "type": "object", + "title": "Azureaisearch", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "AZUREAISEARCH" + ], + "description": "Connector type (must be \"AZUREAISEARCH\")" + }, + "config": { + "$ref": "#/components/schemas/AZUREAISEARCHConfig" + } + }, + "example": { + "name": "Azure AI Search Example", + "type": "AZUREAISEARCH", + "config": { + "service-name": "Enter your Azure AI Search service name", + "api-key": "Enter your API key", + "index": "Enter index name" + } + } + }, + { + "type": "object", + "title": "Turbopuffer", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "TURBOPUFFER" + ], + "description": "Connector type (must be \"TURBOPUFFER\")" + }, + "config": { + "$ref": "#/components/schemas/TURBOPUFFERConfig" + } + }, + "example": { + "name": "Turbopuffer Example", + "type": "TURBOPUFFER", + "config": { + "api-key": "Enter your API key", + "namespace": "Enter namespace name" + } + } + } + ] + } + }, + "UpdatedDestinationConnectorData": { + "type": "object", + "properties": { + "updatedConnector": { + "$ref": "#/components/schemas/DestinationConnector" + }, + "pipelineIds": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "updatedConnector" + ] + }, + "UpdateDestinationConnectorResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "data": { + "$ref": "#/components/schemas/UpdatedDestinationConnectorData" + } + }, + "required": [ + "message", + "data" + ] + }, + "UpdateDestinationConnectorRequest": { + "oneOf": [ + { + "type": "object", + "title": "Capella", + "properties": { + "config": { + "$ref": "#/components/schemas/CAPELLAConfig" + } + }, + "example": { + "config": { + "username": "Enter your cluster access name", + "password": "Enter your cluster access password", + "connection-string": "Enter your connection string", + "bucket": "Enter bucket name", + "scope": "Enter scope name", + "collection": "Enter collection name", + "index": "Enter search index name" + } + } + }, + { + "type": "object", + "title": "Datastax", + "properties": { + "config": { + "$ref": "#/components/schemas/DATASTAXConfig" + } + }, + "example": { + "config": { + "endpoint_secret": "Enter your API endpoint", + "token": "Enter your application token", + "collection": "Enter collection name" + } + } + }, + { + "type": "object", + "title": "Elastic", + "properties": { + "config": { + "$ref": "#/components/schemas/ELASTICConfig" + } + }, + "example": { + "config": { + "host": "Enter your host", + "port": "Enter your port", + "api-key": "Enter your API key", + "index": "Enter index name" + } + } + }, + { + "type": "object", + "title": "Pinecone", + "properties": { + "config": { + "$ref": "#/components/schemas/PINECONEConfig" + } + }, + "example": { + "config": { + "api-key": "Enter your API Key", + "index": "Enter index name" + } + } + }, + { + "type": "object", + "title": "Singlestore", + "properties": { + "config": { + "$ref": "#/components/schemas/SINGLESTOREConfig" + } + }, + "example": { + "config": { + "host": "Enter the host of the deployment", + "port": 100, + "database": "Enter the database name", + "username": "Enter the username", + "password": "Enter the username's password", + "table": "Enter table name" + } + } + }, + { + "type": "object", + "title": "Milvus", + "properties": { + "config": { + "$ref": "#/components/schemas/MILVUSConfig" + } + }, + "example": { + "config": { + "url": "Enter your public endpoint for your Milvus cluster", + "collection": "Enter collection name" + } + } + }, + { + "type": "object", + "title": "Postgresql", + "properties": { + "config": { + "$ref": "#/components/schemas/POSTGRESQLConfig" + } + }, + "example": { + "config": { + "host": "Enter the host of the deployment", + "port": 5432, + "database": "Enter the database name", + "username": "Enter the username", + "password": "Enter the username's password", + "table": "Enter
or .
" + } + } + }, + { + "type": "object", + "title": "Qdrant", + "properties": { + "config": { + "$ref": "#/components/schemas/QDRANTConfig" + } + }, + "example": { + "config": { + "host": "Enter your host", + "api-key": "Enter your API key", + "collection": "Enter collection name" + } + } + }, + { + "type": "object", + "title": "Supabase", + "properties": { + "config": { + "$ref": "#/components/schemas/SUPABASEConfig" + } + }, + "example": { + "config": { + "host": "aws-0-us-east-1.pooler.supabase.com", + "port": 5432, + "database": "Enter the database name", + "username": "Enter the username", + "password": "Enter the username's password", + "table": "Enter
or .
" + } + } + }, + { + "type": "object", + "title": "Weaviate", + "properties": { + "config": { + "$ref": "#/components/schemas/WEAVIATEConfig" + } + }, + "example": { + "config": { + "host": "Enter your Weaviate Cluster REST Endpoint", + "api-key": "Enter your API key", + "collection": "Enter collection name" + } + } + }, + { + "type": "object", + "title": "Azureaisearch", + "properties": { + "config": { + "$ref": "#/components/schemas/AZUREAISEARCHConfig" + } + }, + "example": { + "config": { + "service-name": "Enter your Azure AI Search service name", + "api-key": "Enter your API key", + "index": "Enter index name" + } + } + }, + { + "type": "object", + "title": "Turbopuffer", + "properties": { + "config": { + "$ref": "#/components/schemas/TURBOPUFFERConfig" + } + }, + "example": { + "config": { + "api-key": "Enter your API key", + "namespace": "Enter namespace name" + } + } + } + ] + }, + "DeleteDestinationConnectorResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + } + }, + "required": [ + "message" + ] + }, + "CreatedAIPlatformConnector": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "id": { + "type": "string" + } + }, + "required": [ + "name", + "id" + ] + }, + "CreateAIPlatformConnectorResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "connectors": { + "type": "array", + "items": { + "$ref": "#/components/schemas/CreatedAIPlatformConnector" + } + } + }, + "required": [ + "message", + "connectors" + ] + }, + "CreateAIPlatformConnector": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "type": { + "$ref": "#/components/schemas/AIPlatformType" + }, + "config": { + "type": "object", + "additionalProperties": { + "nullable": true + } + } + }, + "required": [ + "name", + "type" + ] + }, + "CreateAIPlatformConnectorRequest": { + "type": "array", + "minItems": 1, + "items": { + "oneOf": [ + { + "type": "object", + "title": "Bedrock", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "BEDROCK" + ], + "description": "Connector type (must be \"BEDROCK\")" + }, + "config": { + "$ref": "#/components/schemas/BEDROCKAuthConfig" + } + }, + "example": { + "name": "Amazon Bedrock Example", + "type": "BEDROCK", + "config": { + "access-key": "Enter your Amazon Bedrock Access Key", + "key": "Enter your Amazon Bedrock Secret Key", + "region": null + } + } + }, + { + "type": "object", + "title": "Vertex", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "VERTEX" + ], + "description": "Connector type (must be \"VERTEX\")" + }, + "config": { + "$ref": "#/components/schemas/VERTEXAuthConfig" + } + }, + "example": { + "name": "Google Vertex AI Example", + "type": "VERTEX", + "config": { + "key": "Enter the contents of your Google Vertex AI Service Account JSON file", + "region": "Region Name, e.g. us-central1" + } + } + }, + { + "type": "object", + "title": "Openai", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "OPENAI" + ], + "description": "Connector type (must be \"OPENAI\")" + }, + "config": { + "$ref": "#/components/schemas/OPENAIAuthConfig" + } + }, + "example": { + "name": "OpenAI Example", + "type": "OPENAI", + "config": { + "key": "Enter your OpenAI API Key" + } + } + }, + { + "type": "object", + "title": "Voyage", + "required": [ + "name", + "type", + "config" + ], + "properties": { + "name": { + "type": "string", + "description": "Name of the connector" + }, + "type": { + "type": "string", + "enum": [ + "VOYAGE" + ], + "description": "Connector type (must be \"VOYAGE\")" + }, + "config": { + "$ref": "#/components/schemas/VOYAGEAuthConfig" + } + }, + "example": { + "name": "Voyage AI Example", + "type": "VOYAGE", + "config": { + "key": "Enter your Voyage AI API Key" + } + } + } + ] + } + }, + "UpdatedAIPlatformConnectorData": { + "type": "object", + "properties": { + "updatedConnector": { + "$ref": "#/components/schemas/AIPlatform" + }, + "pipelineIds": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "updatedConnector" + ] + }, + "UpdateAIPlatformConnectorResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "data": { + "$ref": "#/components/schemas/UpdatedAIPlatformConnectorData" + } + }, + "required": [ + "message", + "data" + ] + }, + "UpdateAIPlatformConnectorRequest": { + "type": "object", + "properties": { + "config": { + "type": "object", + "additionalProperties": { + "nullable": true + } + } + }, + "required": [ + "config" + ] + }, + "DeleteAIPlatformConnectorResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + } + }, + "required": [ + "message" + ] + }, + "UploadFile": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "name": { + "type": "string" + }, + "size": { + "type": "number" + }, + "extension": { + "type": "string" + }, + "lastModified": { + "type": "string", + "nullable": true + }, + "metadata": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "required": [ + "key", + "name", + "size", + "lastModified", + "metadata" + ] + }, + "GetUploadFilesResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "files": { + "type": "array", + "items": { + "$ref": "#/components/schemas/UploadFile" + } + } + }, + "required": [ + "message", + "files" + ] + }, + "StartFileUploadToConnectorResponse": { + "type": "object", + "properties": { + "uploadUrl": { + "type": "string" + } + }, + "required": [ + "uploadUrl" + ] + }, + "StartFileUploadToConnectorRequest": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "contentType": { + "type": "string" + }, + "metadata": { + "type": "string" + } + }, + "required": [ + "name", + "contentType" + ] + }, + "DeleteFileResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "fileName": { + "type": "string" + } + }, + "required": [ + "message", + "fileName" + ] + }, + "StartExtractionResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + }, + "extractionId": { + "type": "string" + } + }, + "required": [ + "message", + "extractionId" + ] + }, + "ExtractionType": { + "type": "string", + "enum": [ + "iris" + ], + "default": "iris" + }, + "ExtractionChunkingStrategy": { + "type": "string", + "enum": [ + "markdown" + ], + "default": "markdown" + }, + "MetadataExtractionStrategySchema": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "schema": { + "type": "string" + } + }, + "required": [ + "id", + "schema" + ] + }, + "MetadataExtractionStrategy": { + "type": "object", + "properties": { + "schemas": { + "type": "array", + "items": { + "$ref": "#/components/schemas/MetadataExtractionStrategySchema" + } + }, + "inferSchema": { + "type": "boolean" + } + } + }, + "StartExtractionRequest": { + "type": "object", + "properties": { + "fileId": { + "type": "string" + }, + "type": { + "$ref": "#/components/schemas/ExtractionType" + }, + "chunkingStrategy": { + "$ref": "#/components/schemas/ExtractionChunkingStrategy" + }, + "chunkSize": { + "type": "number", + "default": 256 + }, + "metadata": { + "$ref": "#/components/schemas/MetadataExtractionStrategy" + } + }, + "required": [ + "fileId" + ] + }, + "ExtractionResult": { + "type": "object", + "properties": { + "success": { + "type": "boolean" + }, + "chunks": { + "type": "array", + "items": { + "type": "string" + } + }, + "text": { + "type": "string" + }, + "metadata": { + "type": "string" + }, + "metadataSchema": { + "type": "string" + }, + "chunksMetadata": { + "type": "array", + "items": { + "type": "string" + } + }, + "chunksSchema": { + "type": "array", + "items": { + "type": "string" + } + }, + "error": { + "type": "string" + } + }, + "required": [ + "success" + ] + }, + "ExtractionResultResponse": { + "type": "object", + "properties": { + "ready": { + "type": "boolean" + }, + "data": { + "$ref": "#/components/schemas/ExtractionResult" + } + }, + "required": [ + "ready" + ] + }, + "StartFileUploadResponse": { + "type": "object", + "properties": { + "fileId": { + "type": "string" + }, + "uploadUrl": { + "type": "string" + } + }, + "required": [ + "fileId", + "uploadUrl" + ] + }, + "StartFileUploadRequest": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "contentType": { + "type": "string" + } + }, + "required": [ + "name", + "contentType" + ] + }, + "AddUserFromSourceConnectorResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + } + }, + "required": [ + "message" + ] + }, + "AddUserToSourceConnectorRequest": { + "type": "object", + "properties": { + "userId": { + "type": "string" + }, + "selectedFiles": { + "anyOf": [ + { + "type": "object", + "additionalProperties": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "mimeType": { + "type": "string" + } + }, + "required": [ + "name", + "mimeType" + ] + } + }, + { + "type": "object", + "properties": { + "pageIds": { + "type": "array", + "items": { + "type": "string" + } + }, + "databaseIds": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + ] + }, + "refreshToken": { + "type": "string" + }, + "accessToken": { + "type": "string" + } + }, + "required": [ + "userId", + "selectedFiles" + ] + }, + "UpdateUserInSourceConnectorResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + } + }, + "required": [ + "message" + ] + }, + "UpdateUserInSourceConnectorRequest": { + "type": "object", + "properties": { + "userId": { + "type": "string" + }, + "selectedFiles": { + "anyOf": [ + { + "type": "object", + "additionalProperties": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "mimeType": { + "type": "string" + } + }, + "required": [ + "name", + "mimeType" + ] + } + }, + { + "type": "object", + "properties": { + "pageIds": { + "type": "array", + "items": { + "type": "string" + } + }, + "databaseIds": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + ] + }, + "refreshToken": { + "type": "string" + }, + "accessToken": { + "type": "string" + } + }, + "required": [ + "userId" + ] + }, + "RemoveUserFromSourceConnectorResponse": { + "type": "object", + "properties": { + "message": { + "type": "string" + } + }, + "required": [ + "message" + ] + }, + "RemoveUserFromSourceConnectorRequest": { + "type": "object", + "properties": { + "userId": { + "type": "string" + } + }, + "required": [ + "userId" + ] + }, + "AWS_S3AuthConfig": { + "type": "object", + "description": "Authentication configuration for Amazon S3", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "access-key": { + "type": "string", + "description": "Access Key. Example: Enter Access Key", + "pattern": "^\\S.*\\S$|^\\S$" + }, + "secret-key": { + "type": "string", + "description": "Secret Key. Example: Enter Secret Key", + "pattern": "^\\S.*\\S$|^\\S$" + }, + "bucket-name": { + "type": "string", + "description": "Bucket Name. Example: Enter your S3 Bucket Name" + }, + "endpoint": { + "type": "string", + "description": "Endpoint. Example: Enter Endpoint URL" + }, + "region": { + "type": "string", + "description": "Region. Example: Region Name" + }, + "archiver": { + "type": "boolean", + "description": "Allow as archive destination", + "default": false + } + }, + "required": [ + "name", + "access-key", + "secret-key", + "bucket-name", + "archiver" + ] + }, + "AWS_S3Config": { + "type": "object", + "description": "Configuration for Amazon S3 connector", + "properties": { + "file-extensions": { + "type": "array", + "items": { + "type": "string" + }, + "description": "File Extensions", + "default": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "enum": [ + "pdf", + "doc,docx,gdoc,odt,rtf,epub", + "ppt,pptx,gslides", + "xls,xlsx,gsheets,ods", + "eml,msg", + "txt", + "html,htm", + "md", + "json", + "csv", + "jpg,jpeg,png,webp,svg,gif" + ] + }, + "idle-time": { + "type": "number", + "description": "Check for updates every (seconds)", + "minimum": 1, + "default": 5 + }, + "recursive": { + "type": "boolean", + "description": "Recursively scan all folders in the bucket" + }, + "path-prefix": { + "type": "string", + "description": "Path Prefix" + }, + "path-metadata-regex": { + "type": "string", + "description": "Path Metadata Regex" + }, + "path-regex-group-names": { + "type": "string", + "description": "Path Regex Group Names. Example: Enter Group Name" + } + }, + "required": [ + "file-extensions", + "idle-time" + ] + }, + "AZURE_BLOBAuthConfig": { + "type": "object", + "description": "Authentication configuration for Azure Blob Storage", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "storage-account-name": { + "type": "string", + "description": "Storage Account Name. Example: Enter Storage Account Name" + }, + "storage-account-key": { + "type": "string", + "description": "Storage Account Key. Example: Enter Storage Account Key" + }, + "container": { + "type": "string", + "description": "Container. Example: Enter Container Name" + }, + "endpoint": { + "type": "string", + "description": "Endpoint. Example: Enter Endpoint URL" + } + }, + "required": [ + "name", + "storage-account-name", + "storage-account-key", + "container" + ] + }, + "AZURE_BLOBConfig": { + "type": "object", + "description": "Configuration for Azure Blob Storage connector", + "properties": { + "file-extensions": { + "type": "array", + "items": { + "type": "string" + }, + "description": "File Extensions", + "default": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "enum": [ + "pdf", + "doc,docx,gdoc,odt,rtf,epub", + "ppt,pptx,gslides", + "xls,xlsx,gsheets,ods", + "eml,msg", + "txt", + "html,htm", + "md", + "json", + "csv", + "jpg,jpeg,png,webp,svg,gif" + ] + }, + "idle-time": { + "type": "number", + "description": "Polling Interval (seconds)", + "minimum": 1, + "default": 5 + }, + "recursive": { + "type": "boolean", + "description": "Recursively scan all folders in the bucket" + }, + "path-prefix": { + "type": "string", + "description": "Path Prefix" + }, + "path-metadata-regex": { + "type": "string", + "description": "Path Metadata Regex" + }, + "path-regex-group-names": { + "type": "string", + "description": "Path Regex Group Names. Example: Enter Group Name" + } + }, + "required": [ + "file-extensions", + "idle-time" + ] + }, + "CONFLUENCEAuthConfig": { + "type": "object", + "description": "Authentication configuration for Confluence", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "username": { + "type": "string", + "description": "Username. Example: Enter your Confluence username" + }, + "api-token": { + "type": "string", + "description": "API Token. Example: Enter your Confluence API token", + "pattern": "^\\S.*\\S$|^\\S$" + }, + "domain": { + "type": "string", + "description": "Domain. Example: Enter your Confluence domain (e.g. my-domain.atlassian.net or confluence..com)" + } + }, + "required": [ + "name", + "username", + "api-token", + "domain" + ] + }, + "CONFLUENCEConfig": { + "type": "object", + "description": "Configuration for Confluence connector", + "properties": { + "spaces": { + "type": "string", + "description": "Spaces. Example: Spaces to include (name, key or id)" + }, + "root-parents": { + "type": "string", + "description": "Root Parents. Example: Enter root parent pages" + } + }, + "required": [ + "spaces" + ] + }, + "DISCORDAuthConfig": { + "type": "object", + "description": "Authentication configuration for Discord", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "server-id": { + "type": "string", + "description": "Server ID. Example: Enter Server ID" + }, + "bot-token": { + "type": "string", + "description": "Bot token. Example: Enter Token", + "pattern": "^\\S.*\\S$|^\\S$" + }, + "channel-ids": { + "type": "string", + "description": "Channel ID. Example: Enter channel ID" + } + }, + "required": [ + "name", + "server-id", + "bot-token", + "channel-ids" + ] + }, + "DISCORDConfig": { + "type": "object", + "description": "Configuration for Discord connector", + "properties": { + "emoji": { + "type": "string", + "description": "Emoji Filter. Example: Enter custom emoji filter name" + }, + "author": { + "type": "string", + "description": "Author Filter. Example: Enter author name" + }, + "ignore-author": { + "type": "string", + "description": "Ignore Author Filter. Example: Enter ignore author name" + }, + "limit": { + "type": "number", + "description": "Limit. Example: Enter limit", + "minimum": 1, + "default": 10000 + }, + "thread-message-inclusion": { + "type": "string", + "description": "Thread Message Inclusion", + "default": "ALL", + "enum": [ + "ALL", + "FILTER" + ] + }, + "filter-logic": { + "type": "string", + "description": "Filter Logic", + "default": "AND", + "enum": [ + "AND", + "OR" + ] + }, + "thread-message-mode": { + "type": "string", + "description": "Thread Message Mode", + "default": "CONCATENATE", + "enum": [ + "CONCATENATE", + "SINGLE" + ] + } + } + }, + "DROPBOXAuthConfig": { + "type": "object", + "description": "Authentication configuration for Dropbox (Legacy)", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "refresh-token": { + "type": "string", + "description": "Connect Dropbox to Vectorize. Example: Authorize", + "pattern": "^\\S.*\\S$|^\\S$" + } + }, + "required": [ + "name", + "refresh-token" + ] + }, + "DROPBOXConfig": { + "type": "object", + "description": "Configuration for Dropbox (Legacy) connector", + "properties": { + "path-prefix": { + "type": "string", + "description": "Read from these folders (optional). Example: Enter Path: /exampleFolder/subFolder", + "pattern": "^\\/.*$" + } + } + }, + "DROPBOX_OAUTHAuthConfig": { + "type": "object", + "description": "Authentication configuration for Dropbox OAuth", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "authorized-user": { + "type": "string", + "description": "Authorized User" + }, + "selection-details": { + "type": "string", + "description": "Connect Dropbox to Vectorize. Example: Authorize" + }, + "editedUsers": { + "type": "string", + "default": {} + }, + "reconnectUsers": { + "type": "string", + "default": {} + } + }, + "required": [ + "name", + "selection-details" + ] + }, + "DROPBOX_OAUTH_MULTIAuthConfig": { + "type": "object", + "description": "Authentication configuration for Dropbox Multi-User (Vectorize)", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "authorized-users": { + "type": "string", + "description": "Authorized Users" + }, + "editedUsers": { + "type": "string", + "default": {} + }, + "deletedUsers": { + "type": "string", + "default": {} + } + }, + "required": [ + "name" + ] + }, + "DROPBOX_OAUTH_MULTI_CUSTOMAuthConfig": { + "type": "object", + "description": "Authentication configuration for Dropbox Multi-User (White Label)", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "app-key": { + "type": "string", + "description": "Dropbox App Key. Example: Enter App Key" + }, + "app-secret": { + "type": "string", + "description": "Dropbox App Secret. Example: Enter App Secret" + }, + "authorized-users": { + "type": "string", + "description": "Authorized Users" + }, + "editedUsers": { + "type": "string", + "default": {} + }, + "deletedUsers": { + "type": "string", + "default": {} + } + }, + "required": [ + "name", + "app-key", + "app-secret" + ] + }, + "GOOGLE_DRIVE_OAUTHAuthConfig": { + "type": "object", + "description": "Authentication configuration for Google Drive OAuth", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "authorized-user": { + "type": "string", + "description": "Authorized User" + }, + "selection-details": { + "type": "string", + "description": "Connect Google Drive to Vectorize. Example: Authorize" + }, + "editedUsers": { + "type": "string", + "default": {} + }, + "reconnectUsers": { + "type": "string", + "default": {} + } + }, + "required": [ + "name", + "selection-details" + ] + }, + "GOOGLE_DRIVE_OAUTHConfig": { + "type": "object", + "description": "Configuration for Google Drive OAuth connector", + "properties": { + "file-extensions": { + "type": "array", + "items": { + "type": "string" + }, + "description": "File Extensions", + "default": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "enum": [ + "pdf", + "doc,docx,gdoc,odt,rtf,epub", + "ppt,pptx,gslides", + "xls,xlsx,gsheets,ods", + "eml,msg", + "txt", + "html,htm", + "md", + "json", + "csv", + "jpg,jpeg,png,webp,svg,gif" + ] + }, + "idle-time": { + "type": "number", + "description": "Polling Interval (seconds). Example: Enter polling interval in seconds", + "default": 5 + } + }, + "required": [ + "file-extensions" + ] + }, + "GOOGLE_DRIVEAuthConfig": { + "type": "object", + "description": "Authentication configuration for Google Drive (Service Account)", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "service-account-json": { + "type": "string", + "description": "Service Account JSON. Example: Enter the JSON key file for the service account" + } + }, + "required": [ + "name", + "service-account-json" + ] + }, + "GOOGLE_DRIVEConfig": { + "type": "object", + "description": "Configuration for Google Drive (Service Account) connector", + "properties": { + "file-extensions": { + "type": "array", + "items": { + "type": "string" + }, + "description": "File Extensions", + "default": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "enum": [ + "pdf", + "doc,docx,gdoc,odt,rtf,epub", + "ppt,pptx,gslides", + "xls,xlsx,gsheets,ods", + "eml,msg", + "txt", + "html,htm", + "md", + "json", + "csv", + "jpg,jpeg,png,webp,svg,gif" + ] + }, + "root-parents": { + "type": "string", + "description": "Restrict ingest to these folder URLs (optional). Example: Enter Folder URLs. Example: https://drive.google.com/drive/folders/1234aBCd5678_eFgH9012iJKL3456opqr", + "pattern": "^https:\\/\\/drive\\.google\\.com\\/drive(\\/u\\/\\d+)?\\/folders\\/[a-zA-Z0-9_-]+(\\?.*)?$" + }, + "idle-time": { + "type": "number", + "description": "Polling Interval (seconds). Example: Enter polling interval in seconds", + "default": 5 + } + }, + "required": [ + "file-extensions" + ] + }, + "GOOGLE_DRIVE_OAUTH_MULTIAuthConfig": { + "type": "object", + "description": "Authentication configuration for Google Drive Multi-User (Vectorize)", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "authorized-users": { + "type": "string", + "description": "Authorized Users" + }, + "editedUsers": { + "type": "string", + "default": {} + }, + "deletedUsers": { + "type": "string", + "default": {} + } + }, + "required": [ + "name" + ] + }, + "GOOGLE_DRIVE_OAUTH_MULTIConfig": { + "type": "object", + "description": "Configuration for Google Drive Multi-User (Vectorize) connector", + "properties": { + "file-extensions": { + "type": "array", + "items": { + "type": "string" + }, + "description": "File Extensions", + "default": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "enum": [ + "pdf", + "doc,docx,gdoc,odt,rtf,epub", + "ppt,pptx,gslides", + "xls,xlsx,gsheets,ods", + "eml,msg", + "txt", + "html,htm", + "md", + "json", + "csv", + "jpg,jpeg,png,webp,svg,gif" + ] + }, + "idle-time": { + "type": "number", + "description": "Polling Interval (seconds). Example: Enter polling interval in seconds", + "default": 5 + } + }, + "required": [ + "file-extensions" + ] + }, + "GOOGLE_DRIVE_OAUTH_MULTI_CUSTOMAuthConfig": { + "type": "object", + "description": "Authentication configuration for Google Drive Multi-User (White Label)", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "oauth2-client-id": { + "type": "string", + "description": "OAuth2 Client Id. Example: Enter Client Id" + }, + "oauth2-client-secret": { + "type": "string", + "description": "OAuth2 Client Secret. Example: Enter Client Secret" + }, + "authorized-users": { + "type": "string", + "description": "Authorized Users" + }, + "editedUsers": { + "type": "string", + "default": {} + }, + "deletedUsers": { + "type": "string", + "default": {} + } + }, + "required": [ + "name", + "oauth2-client-id", + "oauth2-client-secret" + ] + }, + "GOOGLE_DRIVE_OAUTH_MULTI_CUSTOMConfig": { + "type": "object", + "description": "Configuration for Google Drive Multi-User (White Label) connector", + "properties": { + "file-extensions": { + "type": "array", + "items": { + "type": "string" + }, + "description": "File Extensions", + "default": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "enum": [ + "pdf", + "doc,docx,gdoc,odt,rtf,epub", + "ppt,pptx,gslides", + "xls,xlsx,gsheets,ods", + "eml,msg", + "txt", + "html,htm", + "md", + "json", + "csv", + "jpg,jpeg,png,webp,svg,gif" + ] + }, + "idle-time": { + "type": "number", + "description": "Polling Interval (seconds). Example: Enter polling interval in seconds", + "default": 5 + } + }, + "required": [ + "file-extensions" + ] + }, + "FIRECRAWLAuthConfig": { + "type": "object", + "description": "Authentication configuration for Firecrawl", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "api-key": { + "type": "string", + "description": "API Key. Example: Enter your Firecrawl API Key" + } + }, + "required": [ + "name", + "api-key" + ] + }, + "FIRECRAWLConfig": { + "type": "object", + "description": "Configuration for Firecrawl connector", + "properties": { + "endpoint": { + "type": "string", + "description": "Endpoint. Example: Choose which api endpoint to use", + "default": "Crawl", + "enum": [ + "Crawl", + "Scrape" + ] + }, + "request": { + "type": "object", + "description": "Request Body. Example: JSON config for firecrawl's /crawl or /scrape endpoint.", + "default": "{ \n \"url\": \"https://docs.vectorize.io/\",\n \"maxDepth\": 25,\n \"limit\": 100\n }" + } + }, + "required": [ + "endpoint", + "request" + ] + }, + "GCSAuthConfig": { + "type": "object", + "description": "Authentication configuration for GCP Cloud Storage", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "service-account-json": { + "type": "string", + "description": "Service Account JSON. Example: Enter the JSON key file for the service account" + }, + "bucket-name": { + "type": "string", + "description": "Bucket. Example: Enter bucket name" + } + }, + "required": [ + "name", + "service-account-json", + "bucket-name" + ] + }, + "GCSConfig": { + "type": "object", + "description": "Configuration for GCP Cloud Storage connector", + "properties": { + "file-extensions": { + "type": "array", + "items": { + "type": "string" + }, + "description": "File Extensions", + "default": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "enum": [ + "pdf", + "doc,docx,gdoc,odt,rtf,epub", + "ppt,pptx,gslides", + "xls,xlsx,gsheets,ods", + "eml,msg", + "txt", + "html,htm", + "md", + "json", + "csv", + "jpg,jpeg,png,webp,svg,gif" + ] + }, + "idle-time": { + "type": "number", + "description": "Check for updates every (seconds)", + "minimum": 1, + "default": 5 + }, + "recursive": { + "type": "boolean", + "description": "Recursively scan all folders in the bucket" + }, + "path-prefix": { + "type": "string", + "description": "Path Prefix" + }, + "path-metadata-regex": { + "type": "string", + "description": "Path Metadata Regex" + }, + "path-regex-group-names": { + "type": "string", + "description": "Path Regex Group Names. Example: Enter Group Name" + } + }, + "required": [ + "file-extensions", + "idle-time" + ] + }, + "INTERCOMAuthConfig": { + "type": "object", + "description": "Authentication configuration for Intercom", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "token": { + "type": "string", + "description": "Access Token. Example: Authorize Intercom Access" + } + }, + "required": [ + "name", + "token" + ] + }, + "INTERCOMConfig": { + "type": "object", + "description": "Configuration for Intercom connector", + "properties": { + "created_at": { + "type": "string", + "format": "date", + "description": "Created After. Filter for conversation created after this date. Example: Enter a date: Example 2012-12-31", + "default": "2025-06-12" + }, + "updated_at": { + "type": "string", + "format": "date", + "description": "Updated After. Filter for conversation updated after this date. Example: Enter a date: Example 2012-12-31" + }, + "state": { + "type": "array", + "items": { + "type": "string" + }, + "description": "State", + "default": "all", + "enum": [ + "open", + "closed", + "snoozed", + "all" + ] + } + }, + "required": [ + "created_at" + ] + }, + "NOTIONAuthConfig": { + "type": "object", + "description": "Authentication configuration for Notion", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "access-token": { + "type": "string", + "description": "Connect Notion to Vectorize - Note this will effect existing connections. test. Example: Authorize" + }, + "s3id": { + "type": "string" + }, + "editedToken": { + "type": "string" + } + }, + "required": [ + "name", + "access-token" + ] + }, + "NOTIONConfig": { + "type": "object", + "description": "Configuration for Notion connector", + "properties": { + "select-resources": { + "type": "string", + "description": "Select Notion Resources" + }, + "database-ids": { + "type": "string", + "description": "Database IDs" + }, + "database-names": { + "type": "string", + "description": "Database Names" + }, + "page-ids": { + "type": "string", + "description": "Page IDs" + }, + "page-names": { + "type": "string", + "description": "Page Names" + } + }, + "required": [ + "select-resources", + "database-ids", + "database-names", + "page-ids", + "page-names" + ] + }, + "NOTION_OAUTH_MULTIAuthConfig": { + "type": "object", + "description": "Authentication configuration for Notion Multi-User (Vectorize)", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "authorized-users": { + "type": "string", + "description": "Authorized Users. Users who have authorized access to their Notion content" + }, + "editedUsers": { + "type": "string", + "default": {} + }, + "deletedUsers": { + "type": "string", + "default": {} + } + }, + "required": [ + "name" + ] + }, + "NOTION_OAUTH_MULTI_CUSTOMAuthConfig": { + "type": "object", + "description": "Authentication configuration for Notion Multi-User (White Label)", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "client-id": { + "type": "string", + "description": "Notion Client ID. Example: Enter Client ID" + }, + "client-secret": { + "type": "string", + "description": "Notion Client Secret. Example: Enter Client Secret" + }, + "authorized-users": { + "type": "string", + "description": "Authorized Users" + }, + "editedUsers": { + "type": "string", + "default": {} + }, + "deletedUsers": { + "type": "string", + "default": {} + } + }, + "required": [ + "name", + "client-id", + "client-secret" + ] + }, + "ONE_DRIVEAuthConfig": { + "type": "object", + "description": "Authentication configuration for OneDrive", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "ms-client-id": { + "type": "string", + "description": "Client Id. Example: Enter Client Id" + }, + "ms-tenant-id": { + "type": "string", + "description": "Tenant Id. Example: Enter Tenant Id" + }, + "ms-client-secret": { + "type": "string", + "description": "Client Secret. Example: Enter Client Secret" + }, + "users": { + "type": "string", + "description": "Users. Example: Enter users emails to import files from. Example: developer@vectorize.io" + } + }, + "required": [ + "name", + "ms-client-id", + "ms-tenant-id", + "ms-client-secret", + "users" + ] + }, + "ONE_DRIVEConfig": { + "type": "object", + "description": "Configuration for OneDrive connector", + "properties": { + "file-extensions": { + "type": "array", + "items": { + "type": "string" + }, + "description": "File Extensions", + "default": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,md,json,csv,jpg,jpeg,png,webp,svg,gif", + "enum": [ + "pdf", + "doc,docx,gdoc,odt,rtf,epub", + "ppt,pptx,gslides", + "xls,xlsx,gsheets,ods", + "eml,msg", + "txt", + "html,htm", + "md", + "json", + "csv", + "jpg,jpeg,png,webp,svg,gif" + ] + }, + "path-prefix": { + "type": "string", + "description": "Read starting from this folder (optional). Example: Enter Folder path: /exampleFolder/subFolder" + } + }, + "required": [ + "file-extensions" + ] + }, + "SHAREPOINTAuthConfig": { + "type": "object", + "description": "Authentication configuration for SharePoint", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "ms-client-id": { + "type": "string", + "description": "Client Id. Example: Enter Client Id" + }, + "ms-tenant-id": { + "type": "string", + "description": "Tenant Id. Example: Enter Tenant Id" + }, + "ms-client-secret": { + "type": "string", + "description": "Client Secret. Example: Enter Client Secret" + } + }, + "required": [ + "name", + "ms-client-id", + "ms-tenant-id", + "ms-client-secret" + ] + }, + "SHAREPOINTConfig": { + "type": "object", + "description": "Configuration for SharePoint connector", + "properties": { + "file-extensions": { + "type": "array", + "items": { + "type": "string" + }, + "description": "File Extensions", + "default": "pdf,doc,docx,gdoc,odt,rtf,epub,ppt,pptx,gslides,xls,xlsx,gsheets,ods,eml,msg,txt,html,htm,json,csv,jpg,jpeg,png,webp,svg,gif", + "enum": [ + "pdf", + "doc,docx,gdoc,odt,rtf,epub", + "ppt,pptx,gslides", + "xls,xlsx,gsheets,ods", + "eml,msg", + "txt", + "html,htm", + "json", + "csv", + "jpg,jpeg,png,webp,svg,gif" + ] + }, + "sites": { + "type": "string", + "description": "Site Name(s). Example: Filter by site name. All sites if empty.", + "pattern": "^(?!.*(https?:\\/\\/|www\\.))[\\w\\s\\-.]+$" + } + }, + "required": [ + "file-extensions" + ] + }, + "WEB_CRAWLERAuthConfig": { + "type": "object", + "description": "Authentication configuration for Web Crawler", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "seed-urls": { + "type": "string", + "description": "Seed URL(s). Add one or more seed URLs to crawl. The crawler will start from these URLs and follow links to other pages.. Example: (e.g. https://example.com)" + } + }, + "required": [ + "name", + "seed-urls" + ] + }, + "WEB_CRAWLERConfig": { + "type": "object", + "description": "Configuration for Web Crawler connector", + "properties": { + "allowed-domains-opt": { + "type": "string", + "description": "Additional Allowed URLs or prefix(es). Add one or more allowed URLs or URL prefixes. The crawler will read URLs that match these patterns in addition to the seed URL(s).. Example: (e.g. https://docs.example.com)" + }, + "forbidden-paths": { + "type": "string", + "description": "Forbidden Paths. Example: Enter forbidden paths (e.g. /admin)", + "pattern": "^\\/([a-zA-Z0-9-_]+(\\/)?)+$" + }, + "min-time-between-requests": { + "type": "number", + "description": "Throttle (ms). Example: Enter minimum time between requests in milliseconds", + "default": 500 + }, + "max-error-count": { + "type": "number", + "description": "Max Error Count. Example: Enter maximum error count", + "default": 5 + }, + "max-urls": { + "type": "number", + "description": "Max URLs. Example: Enter maximum number of URLs to crawl", + "default": 1000 + }, + "max-depth": { + "type": "number", + "description": "Max Depth. Example: Enter maximum crawl depth", + "default": 50 + }, + "reindex-interval-seconds": { + "type": "number", + "description": "Reindex Interval (seconds). Example: Enter reindex interval in seconds", + "default": 3600 + } + } + }, + "FILE_UPLOADAuthConfig": { + "type": "object", + "description": "Authentication configuration for File Upload", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for this connector" + }, + "path-prefix": { + "type": "string", + "description": "Path Prefix" + }, + "files": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Choose files. Files uploaded to this connector can be used in pipelines to vectorize their contents. Note: files with the same name will be overwritten." + } + }, + "required": [ + "name" + ] + }, + "GITHUBAuthConfig": { + "type": "object", + "description": "Authentication configuration for GitHub", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "oauth-token": { + "type": "string", + "description": "Personal Access Token. Example: Enter your GitHub personal access token", + "pattern": "^\\S.*\\S$|^\\S$" + } + }, + "required": [ + "name", + "oauth-token" + ] + }, + "GITHUBConfig": { + "type": "object", + "description": "Configuration for GitHub connector", + "properties": { + "repositories": { + "type": "string", + "description": "Repositories. Example: Example: owner1/repo1", + "pattern": "^[a-zA-Z0-9-]+\\/[a-zA-Z0-9-]+$" + }, + "include-pull-requests": { + "type": "boolean", + "description": "Include Pull Requests", + "default": true + }, + "pull-request-status": { + "type": "string", + "description": "Pull Request Status", + "default": "all", + "enum": [ + "all", + "open", + "closed", + "merged" + ] + }, + "pull-request-labels": { + "type": "string", + "description": "Pull Request Labels. Example: Optionally filter by label. E.g. fix" + }, + "include-issues": { + "type": "boolean", + "description": "Include Issues", + "default": true + }, + "issue-status": { + "type": "string", + "description": "Issue Status", + "default": "all", + "enum": [ + "all", + "open", + "closed" + ] + }, + "issue-labels": { + "type": "string", + "description": "Issue Labels. Example: Optionally filter by label. E.g. bug" + }, + "max-items": { + "type": "number", + "description": "Max Items. Example: Enter maximum number of items to fetch", + "default": 1000 + }, + "created-after": { + "type": "string", + "format": "date", + "description": "Created After. Filter for items created after this date. Example: Enter a date: Example 2012-12-31" + } + }, + "required": [ + "repositories", + "include-pull-requests", + "pull-request-status", + "include-issues", + "issue-status", + "max-items" + ] + }, + "FIREFLIESAuthConfig": { + "type": "object", + "description": "Authentication configuration for Fireflies.ai", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name" + }, + "api-key": { + "type": "string", + "description": "API Key. Example: Enter your Fireflies.ai API key", + "pattern": "^\\S.*\\S$|^\\S$" + } + }, + "required": [ + "name", + "api-key" + ] + }, + "FIREFLIESConfig": { + "type": "object", + "description": "Configuration for Fireflies.ai connector", + "properties": { + "start-date": { + "type": "string", + "format": "date", + "description": "Start Date. Include meetings from this date forward. Example: Enter a date: Example 2023-12-31", + "default": "2025-06-12" + }, + "end-date": { + "type": "string", + "format": "date", + "description": "End Date. Include meetings up to this date only. Example: Enter a date: Example 2023-12-31" + }, + "title-filter-type": { + "type": "string", + "default": "AND" + }, + "title-filter": { + "type": "string", + "description": "Title Filter. Only include meetings with this text in the title. Example: Enter meeting title" + }, + "participant-filter-type": { + "type": "string", + "default": "AND" + }, + "participant-filter": { + "type": "string", + "description": "Participant's Email Filter. Include meetings where these participants were invited. Example: Enter participant email" + }, + "max-meetings": { + "type": "number", + "description": "Max Meetings. Enter -1 for all available meetings, or specify a limit. Example: Enter maximum number of meetings to retrieve. (-1 for all)", + "default": -1 + } + }, + "required": [ + "start-date", + "title-filter-type", + "participant-filter-type" + ] + }, + "CAPELLAAuthConfig": { + "type": "object", + "description": "Authentication configuration for Couchbase Capella", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for your Capella integration" + }, + "username": { + "type": "string", + "description": "Cluster Access Name. Example: Enter your cluster access name" + }, + "password": { + "type": "string", + "description": "Cluster Access Password. Example: Enter your cluster access password" + }, + "connection-string": { + "type": "string", + "description": "Connection String. Example: Enter your connection string" + } + }, + "required": [ + "name", + "username", + "password", + "connection-string" + ] + }, + "CAPELLAConfig": { + "type": "object", + "description": "Configuration for Couchbase Capella connector", + "properties": { + "bucket": { + "type": "string", + "description": "Bucket Name. Example: Enter bucket name" + }, + "scope": { + "type": "string", + "description": "Scope Name. Example: Enter scope name" + }, + "collection": { + "type": "string", + "description": "Collection Name. Example: Enter collection name" + }, + "index": { + "type": "string", + "description": "Search Index Name. Example: Enter search index name", + "maxLength": 255 + } + }, + "required": [ + "bucket", + "scope", + "collection", + "index" + ] + }, + "DATASTAXAuthConfig": { + "type": "object", + "description": "Authentication configuration for DataStax Astra", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for your DataStax integration" + }, + "endpoint_secret": { + "type": "string", + "description": "API Endpoint. Example: Enter your API endpoint" + }, + "token": { + "type": "string", + "description": "Application Token. Example: Enter your application token", + "pattern": "^\\S.*\\S$|^\\S$" + } + }, + "required": [ + "name", + "endpoint_secret", + "token" + ] + }, + "DATASTAXConfig": { + "type": "object", + "description": "Configuration for DataStax Astra connector", + "properties": { + "collection": { + "type": "string", + "description": "Collection Name. Example: Enter collection name", + "pattern": "^[a-zA-Z][a-zA-Z0-9_]*$" + } + }, + "required": [ + "collection" + ] + }, + "ELASTICAuthConfig": { + "type": "object", + "description": "Authentication configuration for Elasticsearch", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for your Elastic integration" + }, + "host": { + "type": "string", + "description": "Host. Example: Enter your host" + }, + "port": { + "type": "string", + "description": "Port. Example: Enter your port" + }, + "api-key": { + "type": "string", + "description": "API Key. Example: Enter your API key", + "pattern": "^\\S.*\\S$|^\\S$" + } + }, + "required": [ + "name", + "host", + "port", + "api-key" + ] + }, + "ELASTICConfig": { + "type": "object", + "description": "Configuration for Elasticsearch connector", + "properties": { + "index": { + "type": "string", + "description": "Index Name. Example: Enter index name", + "maxLength": 255, + "pattern": "^(?!.*(--|\\.\\.))(?!^[\\-.])(?!.*[\\-.]$)[a-z0-9-.]*$" + } + }, + "required": [ + "index" + ] + }, + "PINECONEAuthConfig": { + "type": "object", + "description": "Authentication configuration for Pinecone", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for your Pinecone integration" + }, + "api-key": { + "type": "string", + "description": "API Key. Example: Enter your API Key", + "pattern": "^\\S.*\\S$|^\\S$" + } + }, + "required": [ + "name", + "api-key" + ] + }, + "PINECONEConfig": { + "type": "object", + "description": "Configuration for Pinecone connector", + "properties": { + "index": { + "type": "string", + "description": "Index Name. Example: Enter index name", + "maxLength": 45, + "pattern": "^(?!.*--)(?!^-)(?!.*-$)[a-z0-9-]+$" + }, + "namespace": { + "type": "string", + "description": "Namespace. Example: Enter namespace", + "maxLength": 45, + "pattern": "^(?!.*--)(?!^-)(?!.*-$)[a-z0-9-]+$" + } + }, + "required": [ + "index" + ] + }, + "SINGLESTOREAuthConfig": { + "type": "object", + "description": "Authentication configuration for SingleStore", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for your SingleStore integration" + }, + "host": { + "type": "string", + "description": "Host. Example: Enter the host of the deployment" + }, + "port": { + "type": "number", + "description": "Port. Example: Enter the port of the deployment" + }, + "database": { + "type": "string", + "description": "Database. Example: Enter the database name" + }, + "username": { + "type": "string", + "description": "Username. Example: Enter the username" + }, + "password": { + "type": "string", + "description": "Password. Example: Enter the username's password" + } + }, + "required": [ + "name", + "host", + "port", + "database", + "username", + "password" + ] + }, + "SINGLESTOREConfig": { + "type": "object", + "description": "Configuration for SingleStore connector", + "properties": { + "table": { + "type": "string", + "description": "Table Name. Example: Enter table name", + "maxLength": 45, + "pattern": "^(?!\\b(add|alter|all|and|any|as|asc|avg|between|case|check|column|commit|constraint|create|cross|database|default|delete|desc|distinct|drop|else|exists|false|from|full|group|having|in|index|inner|insert|is|join|key|left|like|limit|max|min|not|null|on|or|order|outer|primary|right|rollback|select|set|sum|table|true|union|unique|update|values|view|where)\\b$)(?!.*--)(?!.*[-])[a-z][a-z0-9_]{0,44}$" + } + }, + "required": [ + "table" + ] + }, + "MILVUSAuthConfig": { + "type": "object", + "description": "Authentication configuration for Milvus", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for your Milvus integration" + }, + "url": { + "type": "string", + "description": "Public Endpoint. Example: Enter your public endpoint for your Milvus cluster" + }, + "token": { + "type": "string", + "description": "Token. Example: Enter your cluster token or Username/Password" + }, + "username": { + "type": "string", + "description": "Username. Example: Enter your cluster Username" + }, + "password": { + "type": "string", + "description": "Password. Example: Enter your cluster Password" + } + }, + "required": [ + "name", + "url" + ] + }, + "MILVUSConfig": { + "type": "object", + "description": "Configuration for Milvus connector", + "properties": { + "collection": { + "type": "string", + "description": "Collection Name. Example: Enter collection name", + "pattern": "^[a-zA-Z][a-zA-Z0-9_]*$" + } + }, + "required": [ + "collection" + ] + }, + "POSTGRESQLAuthConfig": { + "type": "object", + "description": "Authentication configuration for PostgreSQL", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for your PostgreSQL integration" + }, + "host": { + "type": "string", + "description": "Host. Example: Enter the host of the deployment" + }, + "port": { + "type": "number", + "description": "Port. Example: Enter the port of the deployment", + "default": 5432 + }, + "database": { + "type": "string", + "description": "Database. Example: Enter the database name" + }, + "username": { + "type": "string", + "description": "Username. Example: Enter the username" + }, + "password": { + "type": "string", + "description": "Password. Example: Enter the username's password" + } + }, + "required": [ + "name", + "host", + "database", + "username", + "password" + ] + }, + "POSTGRESQLConfig": { + "type": "object", + "description": "Configuration for PostgreSQL connector", + "properties": { + "table": { + "type": "string", + "description": "Table Name. Example: Enter
or .
", + "maxLength": 45, + "pattern": "^(?!\\b(add|alter|all|and|any|as|asc|avg|between|case|check|column|commit|constraint|create|cross|database|default|delete|desc|distinct|drop|else|exists|false|from|full|group|having|in|index|inner|insert|is|join|key|left|like|limit|max|min|not|null|on|or|order|outer|primary|right|rollback|select|set|sum|table|true|union|unique|update|values|view|where)\\b$)(?!.*--)(?!.*[-])[a-z][a-z0-9._]{0,44}$" + } + }, + "required": [ + "table" + ] + }, + "QDRANTAuthConfig": { + "type": "object", + "description": "Authentication configuration for Qdrant", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for your Qdrant integration" + }, + "host": { + "type": "string", + "description": "Host. Example: Enter your host" + }, + "api-key": { + "type": "string", + "description": "API Key. Example: Enter your API key", + "pattern": "^\\S.*\\S$|^\\S$" + } + }, + "required": [ + "name", + "host", + "api-key" + ] + }, + "QDRANTConfig": { + "type": "object", + "description": "Configuration for Qdrant connector", + "properties": { + "collection": { + "type": "string", + "description": "Collection Name. Example: Enter collection name", + "pattern": "^[a-zA-Z0-9_-]*$" + } + }, + "required": [ + "collection" + ] + }, + "SUPABASEAuthConfig": { + "type": "object", + "description": "Authentication configuration for Supabase", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for your Supabase integration" + }, + "host": { + "type": "string", + "description": "Host. Example: Enter the host of the deployment", + "default": "aws-0-us-east-1.pooler.supabase.com" + }, + "port": { + "type": "number", + "description": "Port. Example: Enter the port of the deployment", + "default": 5432 + }, + "database": { + "type": "string", + "description": "Database. Example: Enter the database name" + }, + "username": { + "type": "string", + "description": "Username. Example: Enter the username" + }, + "password": { + "type": "string", + "description": "Password. Example: Enter the username's password" + } + }, + "required": [ + "name", + "host", + "database", + "username", + "password" + ] + }, + "SUPABASEConfig": { + "type": "object", + "description": "Configuration for Supabase connector", + "properties": { + "table": { + "type": "string", + "description": "Table Name. Example: Enter
or .
", + "maxLength": 45, + "pattern": "^(?!\\b(add|alter|all|and|any|as|asc|avg|between|case|check|column|commit|constraint|create|cross|database|default|delete|desc|distinct|drop|else|exists|false|from|full|group|having|in|index|inner|insert|is|join|key|left|like|limit|max|min|not|null|on|or|order|outer|primary|right|rollback|select|set|sum|table|true|union|unique|update|values|view|where)\\b$)(?!.*--)(?!.*[-])[a-z][a-z0-9._]{0,44}$" + } + }, + "required": [ + "table" + ] + }, + "WEAVIATEAuthConfig": { + "type": "object", + "description": "Authentication configuration for Weaviate", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for your Weaviate integration" + }, + "host": { + "type": "string", + "description": "Endpoint. Example: Enter your Weaviate Cluster REST Endpoint" + }, + "api-key": { + "type": "string", + "description": "API Key. Example: Enter your API key", + "pattern": "^\\S.*\\S$|^\\S$" + } + }, + "required": [ + "name", + "host", + "api-key" + ] + }, + "WEAVIATEConfig": { + "type": "object", + "description": "Configuration for Weaviate connector", + "properties": { + "collection": { + "type": "string", + "description": "Collection Name. Example: Enter collection name", + "pattern": "^[A-Z][_0-9A-Za-z]*$" + } + }, + "required": [ + "collection" + ] + }, + "AZUREAISEARCHAuthConfig": { + "type": "object", + "description": "Authentication configuration for Azure AI Search", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for your Azure AI Search integration" + }, + "service-name": { + "type": "string", + "description": "Azure AI Search Service Name. Example: Enter your Azure AI Search service name" + }, + "api-key": { + "type": "string", + "description": "API Key. Example: Enter your API key", + "pattern": "^\\S.*\\S$|^\\S$" + } + }, + "required": [ + "name", + "service-name", + "api-key" + ] + }, + "AZUREAISEARCHConfig": { + "type": "object", + "description": "Configuration for Azure AI Search connector", + "properties": { + "index": { + "type": "string", + "description": "Index Name. Example: Enter index name", + "pattern": "^[a-z0-9][a-z0-9-]*[a-z0-9]$" + } + }, + "required": [ + "index" + ] + }, + "TURBOPUFFERAuthConfig": { + "type": "object", + "description": "Authentication configuration for Turbopuffer", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for your Turbopuffer integration" + }, + "api-key": { + "type": "string", + "description": "API Key. Example: Enter your API key", + "pattern": "^\\S.*\\S$|^\\S$" + } + }, + "required": [ + "name", + "api-key" + ] + }, + "TURBOPUFFERConfig": { + "type": "object", + "description": "Configuration for Turbopuffer connector", + "properties": { + "namespace": { + "type": "string", + "description": "Namespace. Example: Enter namespace name" + } + }, + "required": [ + "namespace" + ] + }, + "BEDROCKAuthConfig": { + "type": "object", + "description": "Authentication configuration for Amazon Bedrock", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for your Amazon Bedrock integration" + }, + "access-key": { + "type": "string", + "description": "Access Key. Example: Enter your Amazon Bedrock Access Key", + "pattern": "^\\S.*\\S$|^\\S$" + }, + "key": { + "type": "string", + "description": "Secret Key. Example: Enter your Amazon Bedrock Secret Key", + "pattern": "^\\S.*\\S$|^\\S$" + }, + "region": { + "type": "string", + "description": "Region. Example: Region Name" + } + }, + "required": [ + "name", + "access-key", + "key", + "region" + ] + }, + "VERTEXAuthConfig": { + "type": "object", + "description": "Authentication configuration for Google Vertex AI", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for your Google Vertex AI integration" + }, + "key": { + "type": "string", + "description": "Service Account Json. Example: Enter the contents of your Google Vertex AI Service Account JSON file" + }, + "region": { + "type": "string", + "description": "Region. Example: Region Name, e.g. us-central1" + } + }, + "required": [ + "name", + "key", + "region" + ] + }, + "OPENAIAuthConfig": { + "type": "object", + "description": "Authentication configuration for OpenAI", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for your OpenAI integration" + }, + "key": { + "type": "string", + "description": "API Key. Example: Enter your OpenAI API Key", + "pattern": "^\\S.*\\S$|^\\S$" + } + }, + "required": [ + "name", + "key" + ] + }, + "VOYAGEAuthConfig": { + "type": "object", + "description": "Authentication configuration for Voyage AI", + "properties": { + "name": { + "type": "string", + "description": "Name. Example: Enter a descriptive name for your Voyage AI integration" + }, + "key": { + "type": "string", + "description": "API Key. Example: Enter your Voyage AI API Key", + "pattern": "^\\S.*\\S$|^\\S$" + } + }, + "required": [ + "name", + "key" + ] + }, + "SourceConnectorInput": { + "type": "object", + "description": "Source connector configuration", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the source connector" + }, + "type": { + "type": "string", + "description": "Type of source connector", + "enum": [ + "AWS_S3", + "AZURE_BLOB", + "CONFLUENCE", + "DISCORD", + "DROPBOX", + "DROPBOX_OAUTH", + "DROPBOX_OAUTH_MULTI", + "DROPBOX_OAUTH_MULTI_CUSTOM", + "GOOGLE_DRIVE_OAUTH", + "GOOGLE_DRIVE", + "GOOGLE_DRIVE_OAUTH_MULTI", + "GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM", + "FIRECRAWL", + "GCS", + "INTERCOM", + "NOTION", + "NOTION_OAUTH_MULTI", + "NOTION_OAUTH_MULTI_CUSTOM", + "ONE_DRIVE", + "SHAREPOINT", + "WEB_CRAWLER", + "FILE_UPLOAD", + "GITHUB", + "FIREFLIES" + ] + }, + "config": { + "oneOf": [ + { + "$ref": "#/components/schemas/AWS_S3Config" + }, + { + "$ref": "#/components/schemas/AZURE_BLOBConfig" + }, + { + "$ref": "#/components/schemas/CONFLUENCEConfig" + }, + { + "$ref": "#/components/schemas/DISCORDConfig" + }, + { + "$ref": "#/components/schemas/DROPBOXConfig" + }, + { + "$ref": "#/components/schemas/GOOGLE_DRIVE_OAUTHConfig" + }, + { + "$ref": "#/components/schemas/GOOGLE_DRIVEConfig" + }, + { + "$ref": "#/components/schemas/GOOGLE_DRIVE_OAUTH_MULTIConfig" + }, + { + "$ref": "#/components/schemas/GOOGLE_DRIVE_OAUTH_MULTI_CUSTOMConfig" + }, + { + "$ref": "#/components/schemas/FIRECRAWLConfig" + }, + { + "$ref": "#/components/schemas/GCSConfig" + }, + { + "$ref": "#/components/schemas/INTERCOMConfig" + }, + { + "$ref": "#/components/schemas/NOTIONConfig" + }, + { + "$ref": "#/components/schemas/ONE_DRIVEConfig" + }, + { + "$ref": "#/components/schemas/SHAREPOINTConfig" + }, + { + "$ref": "#/components/schemas/WEB_CRAWLERConfig" + }, + { + "$ref": "#/components/schemas/GITHUBConfig" + }, + { + "$ref": "#/components/schemas/FIREFLIESConfig" + } + ], + "description": "Configuration specific to the connector type" + } + }, + "required": [ + "id", + "type", + "config" + ] + }, + "DestinationConnectorInput": { + "type": "object", + "description": "Destination connector configuration", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the destination connector" + }, + "type": { + "type": "string", + "description": "Type of destination connector", + "enum": [ + "CAPELLA", + "DATASTAX", + "ELASTIC", + "PINECONE", + "SINGLESTORE", + "MILVUS", + "POSTGRESQL", + "QDRANT", + "SUPABASE", + "WEAVIATE", + "AZUREAISEARCH", + "TURBOPUFFER" + ] + }, + "config": { + "oneOf": [ + { + "$ref": "#/components/schemas/CAPELLAConfig" + }, + { + "$ref": "#/components/schemas/DATASTAXConfig" + }, + { + "$ref": "#/components/schemas/ELASTICConfig" + }, + { + "$ref": "#/components/schemas/PINECONEConfig" + }, + { + "$ref": "#/components/schemas/SINGLESTOREConfig" + }, + { + "$ref": "#/components/schemas/MILVUSConfig" + }, + { + "$ref": "#/components/schemas/POSTGRESQLConfig" + }, + { + "$ref": "#/components/schemas/QDRANTConfig" + }, + { + "$ref": "#/components/schemas/SUPABASEConfig" + }, + { + "$ref": "#/components/schemas/WEAVIATEConfig" + }, + { + "$ref": "#/components/schemas/AZUREAISEARCHConfig" + }, + { + "$ref": "#/components/schemas/TURBOPUFFERConfig" + } + ], + "description": "Configuration specific to the connector type" + } + }, + "required": [ + "id", + "type", + "config" + ] + }, + "AIPlatformInput": { + "type": "object", + "description": "AI platform configuration", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the AI platform" + }, + "type": { + "type": "string", + "description": "Type of AI platform", + "enum": [ + "BEDROCK", + "VERTEX", + "OPENAI", + "VOYAGE" + ] + }, + "config": { + "oneOf": [], + "description": "Configuration specific to the AI platform" + } + }, + "required": [ + "id", + "type", + "config" + ] + }, + "UpdateAiplatformConnectorRequest": { + "oneOf": [ + { + "type": "object", + "title": "Bedrock", + "properties": { + "config": { + "$ref": "#/components/schemas/BEDROCKAuthConfig" + } + }, + "example": { + "config": { + "access-key": "Enter your Amazon Bedrock Access Key", + "key": "Enter your Amazon Bedrock Secret Key", + "region": null + } + } + }, + { + "type": "object", + "title": "Vertex", + "properties": { + "config": { + "$ref": "#/components/schemas/VERTEXAuthConfig" + } + }, + "example": { + "config": { + "key": "Enter the contents of your Google Vertex AI Service Account JSON file", + "region": "Region Name, e.g. us-central1" + } + } + }, + { + "type": "object", + "title": "Openai", + "properties": { + "config": { + "$ref": "#/components/schemas/OPENAIAuthConfig" + } + }, + "example": { + "config": { + "key": "Enter your OpenAI API Key" + } + } + }, + { + "type": "object", + "title": "Voyage", + "properties": { + "config": { + "$ref": "#/components/schemas/VOYAGEAuthConfig" + } + }, + "example": { + "config": { + "key": "Enter your Voyage AI API Key" + } + } + } + ] + }, + "PipelineSourceConnectorRequest": { + "type": "array", + "minItems": 1, + "items": { + "oneOf": [ + { + "type": "object", + "title": "Amazon S3", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "AWS_S3" + ], + "description": "Connector type (must be \"AWS_S3\")" + } + }, + "example": { + "id": "7fcda494-eea2-42aa-bd79-4d4fde9f2f72", + "type": "AWS_S3" + } + }, + { + "type": "object", + "title": "Azure Blob Storage", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "AZURE_BLOB" + ], + "description": "Connector type (must be \"AZURE_BLOB\")" + } + }, + "example": { + "id": "ab471549-9d5c-4957-95b2-c04d09ef3c82", + "type": "AZURE_BLOB" + } + }, + { + "type": "object", + "title": "Confluence", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "CONFLUENCE" + ], + "description": "Connector type (must be \"CONFLUENCE\")" + } + }, + "example": { + "id": "7d615128-4045-4a4d-8303-567e3d0b8583", + "type": "CONFLUENCE" + } + }, + { + "type": "object", + "title": "Discord", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "DISCORD" + ], + "description": "Connector type (must be \"DISCORD\")" + } + }, + "example": { + "id": "99916547-656e-4e02-86b4-d8ca5e0324d1", + "type": "DISCORD" + } + }, + { + "type": "object", + "title": "Dropbox", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "DROPBOX" + ], + "description": "Connector type (must be \"DROPBOX\")" + } + }, + "example": { + "id": "39189bb2-76c6-4c77-8c97-d225d44682aa", + "type": "DROPBOX" + } + }, + { + "type": "object", + "title": "Dropbox Oauth", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "DROPBOX_OAUTH" + ], + "description": "Connector type (must be \"DROPBOX_OAUTH\")" + } + }, + "example": { + "id": "4b74354f-1781-4267-b178-61186c8d9434", + "type": "DROPBOX_OAUTH" + } + }, + { + "type": "object", + "title": "Dropbox Oauth Multi", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "DROPBOX_OAUTH_MULTI" + ], + "description": "Connector type (must be \"DROPBOX_OAUTH_MULTI\")" + } + }, + "example": { + "id": "beae78e5-1c1b-49c1-9ff4-fbe94ea38065", + "type": "DROPBOX_OAUTH_MULTI" + } + }, + { + "type": "object", + "title": "Dropbox Oauth Multi Custom", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "DROPBOX_OAUTH_MULTI_CUSTOM" + ], + "description": "Connector type (must be \"DROPBOX_OAUTH_MULTI_CUSTOM\")" + } + }, + "example": { + "id": "6983766e-acce-4b65-a6d4-7390648d9fe0", + "type": "DROPBOX_OAUTH_MULTI_CUSTOM" + } + }, + { + "type": "object", + "title": "Google Drive OAuth", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "GOOGLE_DRIVE_OAUTH" + ], + "description": "Connector type (must be \"GOOGLE_DRIVE_OAUTH\")" + } + }, + "example": { + "id": "278152cb-7ba8-454d-b6f2-8152e8d3f58f", + "type": "GOOGLE_DRIVE_OAUTH" + } + }, + { + "type": "object", + "title": "Google Drive", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "GOOGLE_DRIVE" + ], + "description": "Connector type (must be \"GOOGLE_DRIVE\")" + } + }, + "example": { + "id": "73a245b4-92d9-408e-9ff1-975bbc9f06bf", + "type": "GOOGLE_DRIVE" + } + }, + { + "type": "object", + "title": "Google Drive Oauth Multi", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "GOOGLE_DRIVE_OAUTH_MULTI" + ], + "description": "Connector type (must be \"GOOGLE_DRIVE_OAUTH_MULTI\")" + } + }, + "example": { + "id": "3d3c8017-0039-444f-8d23-5d896bdc0cc4", + "type": "GOOGLE_DRIVE_OAUTH_MULTI" + } + }, + { + "type": "object", + "title": "Google Drive Oauth Multi Custom", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM" + ], + "description": "Connector type (must be \"GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM\")" + } + }, + "example": { + "id": "3f76c0ac-f6e1-4d97-9a87-0a921e6fdcfb", + "type": "GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM" + } + }, + { + "type": "object", + "title": "Firecrawl", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "FIRECRAWL" + ], + "description": "Connector type (must be \"FIRECRAWL\")" + } + }, + "example": { + "id": "f69356ed-f45f-46ca-817c-6f1fb439df45", + "type": "FIRECRAWL" + } + }, + { + "type": "object", + "title": "Google Cloud Storage", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "GCS" + ], + "description": "Connector type (must be \"GCS\")" + } + }, + "example": { + "id": "f78697ac-b76b-45f1-b1c9-6f42b277b152", + "type": "GCS" + } + }, + { + "type": "object", + "title": "Intercom", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "INTERCOM" + ], + "description": "Connector type (must be \"INTERCOM\")" + } + }, + "example": { + "id": "04cf8d76-76ca-44a9-9074-e80db96f610d", + "type": "INTERCOM" + } + }, + { + "type": "object", + "title": "Notion", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "NOTION" + ], + "description": "Connector type (must be \"NOTION\")" + } + }, + "example": { + "id": "fdc5c060-ec3f-4777-aea4-d9fc6b33421e", + "type": "NOTION" + } + }, + { + "type": "object", + "title": "Notion Oauth Multi", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "NOTION_OAUTH_MULTI" + ], + "description": "Connector type (must be \"NOTION_OAUTH_MULTI\")" + } + }, + "example": { + "id": "fc84894f-8749-49e5-849e-e2bb889471ed", + "type": "NOTION_OAUTH_MULTI" + } + }, + { + "type": "object", + "title": "Notion Oauth Multi Custom", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "NOTION_OAUTH_MULTI_CUSTOM" + ], + "description": "Connector type (must be \"NOTION_OAUTH_MULTI_CUSTOM\")" + } + }, + "example": { + "id": "654d8b0a-3675-43b0-84e7-f095438f0c4d", + "type": "NOTION_OAUTH_MULTI_CUSTOM" + } + }, + { + "type": "object", + "title": "OneDrive", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "ONE_DRIVE" + ], + "description": "Connector type (must be \"ONE_DRIVE\")" + } + }, + "example": { + "id": "5914cfcf-2ddd-452e-93de-f1fa91c1100c", + "type": "ONE_DRIVE" + } + }, + { + "type": "object", + "title": "Sharepoint", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "SHAREPOINT" + ], + "description": "Connector type (must be \"SHAREPOINT\")" + } + }, + "example": { + "id": "cff6cea9-70bf-43a8-b2c8-18b26f60acff", + "type": "SHAREPOINT" + } + }, + { + "type": "object", + "title": "Web Crawler", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "WEB_CRAWLER" + ], + "description": "Connector type (must be \"WEB_CRAWLER\")" + } + }, + "example": { + "id": "c7744219-d293-4ed6-ab77-6cc8939c20a0", + "type": "WEB_CRAWLER" + } + }, + { + "type": "object", + "title": "File Upload", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "FILE_UPLOAD" + ], + "description": "Connector type (must be \"FILE_UPLOAD\")" + } + }, + "example": { + "id": "45b74083-ed19-4a0c-b707-b2781f1061fb", + "type": "FILE_UPLOAD" + } + }, + { + "type": "object", + "title": "Github", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "GITHUB" + ], + "description": "Connector type (must be \"GITHUB\")" + } + }, + "example": { + "id": "728bb09f-00bc-42af-954e-730f9bd08339", + "type": "GITHUB" + } + }, + { + "type": "object", + "title": "Fireflies", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "FIREFLIES" + ], + "description": "Connector type (must be \"FIREFLIES\")" + } + }, + "example": { + "id": "11468c05-8d3d-4f50-a471-870c60bdbc0c", + "type": "FIREFLIES" + } + } + ] + } + }, + "PipelineDestinationConnectorRequest": { + "type": "array", + "minItems": 1, + "items": { + "oneOf": [ + { + "type": "object", + "title": "Capella", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "CAPELLA" + ], + "description": "Connector type (must be \"CAPELLA\")" + } + }, + "example": { + "id": "d8456be7-7b8d-44b8-9bde-b3775c00f4f1", + "type": "CAPELLA" + } + }, + { + "type": "object", + "title": "Datastax", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "DATASTAX" + ], + "description": "Connector type (must be \"DATASTAX\")" + } + }, + "example": { + "id": "36901ca6-96ad-4486-bbcb-f5fa371ddd16", + "type": "DATASTAX" + } + }, + { + "type": "object", + "title": "Elastic", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "ELASTIC" + ], + "description": "Connector type (must be \"ELASTIC\")" + } + }, + "example": { + "id": "5b50ef61-5aa2-4ad2-bf7c-ece0f4f80199", + "type": "ELASTIC" + } + }, + { + "type": "object", + "title": "Pinecone", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "PINECONE" + ], + "description": "Connector type (must be \"PINECONE\")" + } + }, + "example": { + "id": "d108887b-24d8-40b5-b380-b22389577747", + "type": "PINECONE" + } + }, + { + "type": "object", + "title": "Singlestore", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "SINGLESTORE" + ], + "description": "Connector type (must be \"SINGLESTORE\")" + } + }, + "example": { + "id": "578186ad-c16c-4f5d-ba34-aa2a7877e8af", + "type": "SINGLESTORE" + } + }, + { + "type": "object", + "title": "Milvus", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "MILVUS" + ], + "description": "Connector type (must be \"MILVUS\")" + } + }, + "example": { + "id": "f3a8514e-5ccd-491e-b354-828664ab0f97", + "type": "MILVUS" + } + }, + { + "type": "object", + "title": "Postgresql", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "POSTGRESQL" + ], + "description": "Connector type (must be \"POSTGRESQL\")" + } + }, + "example": { + "id": "1c6c5283-49b8-4826-a32e-7275467a5623", + "type": "POSTGRESQL" + } + }, + { + "type": "object", + "title": "Qdrant", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "QDRANT" + ], + "description": "Connector type (must be \"QDRANT\")" + } + }, + "example": { + "id": "515e9279-4f7c-4272-ad6a-73a326b1aa09", + "type": "QDRANT" + } + }, + { + "type": "object", + "title": "Supabase", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "SUPABASE" + ], + "description": "Connector type (must be \"SUPABASE\")" + } + }, + "example": { + "id": "d6e757ae-6212-4906-9382-31f3db0e1166", + "type": "SUPABASE" + } + }, + { + "type": "object", + "title": "Weaviate", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "WEAVIATE" + ], + "description": "Connector type (must be \"WEAVIATE\")" + } + }, + "example": { + "id": "91dc9d96-f730-4fb1-94dc-8a11148321a9", + "type": "WEAVIATE" + } + }, + { + "type": "object", + "title": "Azureaisearch", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "AZUREAISEARCH" + ], + "description": "Connector type (must be \"AZUREAISEARCH\")" + } + }, + "example": { + "id": "44c8d4e1-8dc1-4f77-8bf4-c8bb6823a209", + "type": "AZUREAISEARCH" + } + }, + { + "type": "object", + "title": "Turbopuffer", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "TURBOPUFFER" + ], + "description": "Connector type (must be \"TURBOPUFFER\")" + } + }, + "example": { + "id": "3cd1aa86-563d-4fb0-8213-eb43282c13b7", + "type": "TURBOPUFFER" + } + } + ] + } + }, + "PipelineAIPlatformRequest": { + "type": "array", + "minItems": 1, + "items": { + "oneOf": [ + { + "type": "object", + "title": "Bedrock", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "BEDROCK" + ], + "description": "Connector type (must be \"BEDROCK\")" + } + }, + "example": { + "id": "ba5e335e-3118-4904-a19c-522e4f6d09e7", + "type": "BEDROCK" + } + }, + { + "type": "object", + "title": "Vertex", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "VERTEX" + ], + "description": "Connector type (must be \"VERTEX\")" + } + }, + "example": { + "id": "276ccd36-3ba4-4751-892c-75e2c831698a", + "type": "VERTEX" + } + }, + { + "type": "object", + "title": "Openai", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "OPENAI" + ], + "description": "Connector type (must be \"OPENAI\")" + } + }, + "example": { + "id": "6f2b0c17-1e98-4c51-8b3b-2fcd733a3490", + "type": "OPENAI" + } + }, + { + "type": "object", + "title": "Voyage", + "required": [ + "id", + "type", + "config" + ], + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Unique identifier for the connector" + }, + "type": { + "type": "string", + "enum": [ + "VOYAGE" + ], + "description": "Connector type (must be \"VOYAGE\")" + } + }, + "example": { + "id": "d8576b14-beda-4532-a4ca-40caef6b1bdc", + "type": "VOYAGE" + } + } + ] + } + } + }, + "parameters": {} +}, +"paths": { + "/org/{organizationId}/pipelines": { + "post": { + "operationId": "createPipeline", + "summary": "Create a new pipeline", + "description": "Creates a new pipeline with source connectors, destination connector, and AI platform configuration. The specific configuration fields required depend on the connector types selected.", + "tags": [ + "Pipelines" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PipelineConfigurationSchema" + }, + "example": { + "sourceConnectors": [ + { + "id": "4d61dfa9-ce3c-48df-824f-85d1d7421a84", + "type": "AWS_S3" + } + ], + "destinationConnector": [ + { + "id": "e6d268f5-7164-4411-a24b-3d59c78958c8", + "type": "CAPELLA" + } + ], + "aiPlatform": [ + { + "id": "65b8d1f0-32ad-459f-8799-7d359abf4ee4", + "type": "BEDROCK" + } + ], + "pipelineName": "Data Processing Pipeline", + "schedule": { + "type": "manual" + } + } + } + } + }, + "responses": { + "200": { + "description": "Pipeline created successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreatePipelineResponse" + }, + "example": { + "message": "Operation completed successfully", + "data": {} + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + }, + "get": { + "operationId": "getPipelines", + "summary": "Get all pipelines", + "description": "Returns a list of all pipelines in the organization", + "tags": [ + "Pipelines" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Pipelines retrieved successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetPipelinesResponse" + }, + "example": { + "message": "Operation completed successfully", + "data": [ + { + "id": "89628a1a-fd2f-4c4e-a7bc-2f32280c6cac", + "name": "Example Item", + "type": "example-type", + "status": "active" + } + ] + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/pipelines/{pipelineId}": { + "get": { + "operationId": "getPipeline", + "summary": "Get a pipeline", + "tags": [ + "Pipelines" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "pipelineId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Pipeline fetched successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetPipelineResponse" + }, + "example": { + "message": "Operation completed successfully", + "data": { + "id": "d9b5c842-9e69-4340-843b-d66f347ab32f", + "name": "My PipelineSummary", + "documentCount": 42, + "sourceConnectorAuthIds": [], + "destinationConnectorAuthIds": [], + "aiPlatformAuthIds": [], + "sourceConnectorTypes": [], + "destinationConnectorTypes": [], + "aiPlatformTypes": [], + "createdAt": "example-createdAt", + "createdBy": "example-createdBy", + "status": "active", + "configDoc": {}, + "sourceConnectors": [], + "destinationConnectors": [], + "aiPlatforms": [] + } + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + }, + "delete": { + "operationId": "deletePipeline", + "summary": "Delete a pipeline", + "tags": [ + "Pipelines" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "pipelineId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Pipeline deleted successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DeletePipelineResponse" + }, + "example": { + "message": "Operation completed successfully" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/pipelines/{pipelineId}/events": { + "get": { + "operationId": "getPipelineEvents", + "summary": "Get pipeline events", + "tags": [ + "Pipelines" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "pipelineId", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": false, + "name": "nextToken", + "in": "query" + } + ], + "responses": { + "200": { + "description": "Pipeline events fetched successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetPipelineEventsResponse" + }, + "example": { + "message": "Operation completed successfully", + "nextToken": "token_example_123456", + "data": [ + { + "id": "f4b6a53d-7565-417a-95c9-f9c8155a6d40", + "name": "Example Item", + "type": "example-type", + "status": "active" + } + ] + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/pipelines/{pipelineId}/metrics": { + "get": { + "operationId": "getPipelineMetrics", + "summary": "Get pipeline metrics", + "tags": [ + "Pipelines" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "pipelineId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Pipeline metrics fetched successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetPipelineMetricsResponse" + }, + "example": { + "message": "Operation completed successfully", + "data": [ + { + "id": "70ebe007-331d-4c9d-b58e-78b6aa60155f", + "name": "Example Item", + "type": "example-type", + "status": "active" + } + ] + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/pipelines/{pipelineId}/retrieval": { + "post": { + "operationId": "retrieveDocuments", + "summary": "Retrieve documents from a pipeline", + "tags": [ + "Pipelines" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "pipelineId", + "in": "path" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RetrieveDocumentsRequest" + }, + "example": { + "question": "example-question", + "numResults": 100, + "rerank": true, + "metadata-filters": [], + "context": { + "messages": [] + } + } + } + } + }, + "responses": { + "200": { + "description": "Documents retrieved successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RetrieveDocumentsResponse" + }, + "example": { + "question": "example-question", + "documents": [], + "average_relevancy": 100, + "ndcg": 100 + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/pipelines/{pipelineId}/start": { + "post": { + "operationId": "startPipeline", + "summary": "Start a pipeline", + "tags": [ + "Pipelines" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "pipelineId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Pipeline started successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StartPipelineResponse" + }, + "example": { + "message": "Operation completed successfully" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/pipelines/{pipelineId}/stop": { + "post": { + "operationId": "stopPipeline", + "summary": "Stop a pipeline", + "tags": [ + "Pipelines" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "pipelineId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Pipeline stopped successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StopPipelineResponse" + }, + "example": { + "message": "Operation completed successfully" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/pipelines/{pipelineId}/deep-research": { + "post": { + "operationId": "startDeepResearch", + "summary": "Start a deep research", + "tags": [ + "Pipelines" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "pipelineId", + "in": "path" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StartDeepResearchRequest" + }, + "example": { + "query": "example-query", + "webSearch": true, + "schema": "example-schema", + "n8n": { + "account": "example-account", + "webhookPath": "/example/path", + "headers": {} + } + } + } + } + }, + "responses": { + "200": { + "description": "Deep Research started successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StartDeepResearchResponse" + }, + "example": { + "researchId": "c9771f69-5a6f-4739-936d-c283ac250c2a" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/pipelines/{pipelineId}/deep-research/{researchId}": { + "get": { + "operationId": "getDeepResearchResult", + "summary": "Get deep research result", + "tags": [ + "Pipelines" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "pipeline", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "researchId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Get Deep Research was successful", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetDeepResearchResponse" + }, + "example": { + "ready": true, + "data": { + "success": true, + "events": [], + "markdown": "example-markdown", + "error": null + } + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/connectors/sources": { + "post": { + "operationId": "createSourceConnector", + "summary": "Create a new source connector", + "description": "Creates a new source connector for data ingestion. The specific configuration fields required depend on the connector type selected.", + "tags": [ + "Connectors/Source Connectors" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateSourceConnectorRequest" + }, + "example": [ + { + "name": "My CreateSourceConnectorRequest", + "type": "AWS_S3", + "config": { + "file-extensions": "pdf", + "idle-time": 300, + "recursive": true, + "path-prefix": "/example/path", + "path-metadata-regex": "/example/path", + "path-regex-group-names": "/example/path" + } + } + ] + } + } + }, + "responses": { + "200": { + "description": "Connector successfully created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateSourceConnectorResponse" + }, + "example": { + "message": "Operation completed successfully", + "connectors": [] + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + }, + "get": { + "operationId": "getSourceConnectors", + "summary": "Get all existing source connectors", + "tags": [ + "Connectors/Source Connectors" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Get all source connectors", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "sourceConnectors": { + "type": "array", + "items": { + "$ref": "#/components/schemas/SourceConnector" + } + } + }, + "required": [ + "sourceConnectors" + ] + }, + "example": { + "sourceConnectors": [ + { + "id": "09446dda-5528-40c7-af4c-27c5dd07db06", + "type": "AWS_S3", + "name": "S3 Documents Bucket", + "createdAt": "2025-06-12T19:49:20.615Z", + "verificationStatus": "verified" + }, + { + "id": "20d08632-842d-44e5-befd-7a70ea158aa5", + "type": "GOOGLE_DRIVE", + "name": "Team Shared Drive", + "createdAt": "2025-06-12T19:49:20.615Z", + "verificationStatus": "verified" + } + ] + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/connectors/sources/{sourceConnectorId}": { + "get": { + "operationId": "getSourceConnector", + "summary": "Get a source connector", + "tags": [ + "Connectors/Source Connectors" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "sourceConnectorId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Get a source connector", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SourceConnector" + }, + "example": { + "id": "7c8b6c1b-b165-4114-9d69-6a8b12c48af2", + "type": "example-type", + "name": "My SourceConnector", + "configDoc": {}, + "createdAt": "example-createdAt", + "createdById": "3656d166-0e5a-4194-bda7-35b560f7968f", + "lastUpdatedById": "bd1fcb97-f39b-4c8a-9e36-e44119dc969d", + "createdByEmail": "user@example.com", + "lastUpdatedByEmail": "user@example.com", + "errorMessage": "Operation completed successfully", + "verificationStatus": "verified" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + }, + "patch": { + "operationId": "updateSourceConnector", + "summary": "Update a source connector", + "tags": [ + "Connectors/Source Connectors" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "sourceConnectorId", + "in": "path" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateSourceConnectorRequest" + } + } + } + }, + "responses": { + "200": { + "description": "Source connector successfully updated", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateSourceConnectorResponse" + }, + "example": { + "message": "Operation completed successfully", + "data": { + "updatedConnector": { + "id": "dad1f732-4227-40d9-b819-dff89e093336", + "type": "example-type", + "name": "My SourceConnector", + "configDoc": {}, + "createdAt": "example-createdAt", + "createdById": "8e84ce2f-6ea6-4c40-bb68-482602dfbcc4", + "lastUpdatedById": "1a0a0be2-fc06-4e58-9859-499214c8f662", + "createdByEmail": "user@example.com", + "lastUpdatedByEmail": "user@example.com", + "errorMessage": "Operation completed successfully", + "verificationStatus": "verified" + }, + "pipelineIds": [] + } + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + }, + "delete": { + "operationId": "deleteSourceConnector", + "summary": "Delete a source connector", + "tags": [ + "Connectors/Source Connectors" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "sourceConnectorId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Source connector successfully deleted", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DeleteSourceConnectorResponse" + }, + "example": { + "message": "Operation completed successfully" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/connectors/destinations": { + "post": { + "operationId": "createDestinationConnector", + "summary": "Create a new destination connector", + "description": "Creates a new destination connector for data storage. The specific configuration fields required depend on the connector type selected.", + "tags": [ + "Connectors/Destination Connectors" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateDestinationConnectorRequest" + }, + "example": [ + { + "name": "My CreateDestinationConnectorRequest", + "type": "CAPELLA", + "config": { + "bucket": "example-bucket", + "scope": "example-scope", + "collection": "example-collection", + "index": "example-index" + } + } + ] + } + } + }, + "responses": { + "200": { + "description": "Connector successfully created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateDestinationConnectorResponse" + }, + "example": { + "message": "Operation completed successfully", + "connectors": [] + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + }, + "get": { + "operationId": "getDestinationConnectors", + "summary": "Get all existing destination connectors", + "tags": [ + "Connectors/Destination Connectors" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Get all destination connectors", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "destinationConnectors": { + "type": "array", + "items": { + "$ref": "#/components/schemas/DestinationConnector" + } + } + }, + "required": [ + "destinationConnectors" + ] + }, + "example": { + "destinationConnectors": [] + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/connectors/destinations/{destinationConnectorId}": { + "get": { + "operationId": "getDestinationConnector", + "summary": "Get a destination connector", + "tags": [ + "Connectors/Destination Connectors" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "destinationConnectorId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Get a destination connector", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DestinationConnector" + }, + "example": { + "id": "665bddca-2b8d-4b09-b146-2a0fbad8fd1c", + "type": "example-type", + "name": "My DestinationConnector", + "configDoc": {}, + "createdAt": "example-createdAt", + "createdById": "60381ff5-7210-46a3-9502-a5231357b642", + "lastUpdatedById": "2c9413f7-7d26-49d9-babc-4b223dcb38b8", + "createdByEmail": "user@example.com", + "lastUpdatedByEmail": "user@example.com", + "errorMessage": "Operation completed successfully", + "verificationStatus": "verified" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + }, + "patch": { + "operationId": "updateDestinationConnector", + "summary": "Update a destination connector", + "tags": [ + "Connectors/Destination Connectors" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "destinationConnectorId", + "in": "path" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateDestinationConnectorRequest" + } + } + } + }, + "responses": { + "200": { + "description": "Destination connector successfully updated", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateDestinationConnectorResponse" + }, + "example": { + "message": "Operation completed successfully", + "data": { + "updatedConnector": { + "id": "2b1cedf6-641e-4abf-a4ed-776ff2e17d76", + "type": "example-type", + "name": "My DestinationConnector", + "configDoc": {}, + "createdAt": "example-createdAt", + "createdById": "bdb73dad-4402-4748-bd2c-12038c16bdb5", + "lastUpdatedById": "7597cb79-ba31-4b11-b2ee-903a3dd5ebd4", + "createdByEmail": "user@example.com", + "lastUpdatedByEmail": "user@example.com", + "errorMessage": "Operation completed successfully", + "verificationStatus": "verified" + }, + "pipelineIds": [] + } + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + }, + "delete": { + "operationId": "deleteDestinationConnector", + "summary": "Delete a destination connector", + "tags": [ + "Connectors/Destination Connectors" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "destinationConnectorId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Destination connector successfully deleted", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DeleteDestinationConnectorResponse" + }, + "example": { + "message": "Operation completed successfully" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/connectors/aiplatforms": { + "post": { + "operationId": "createAIPlatformConnector", + "summary": "Create a new AI platform connector", + "description": "Creates a new AI platform connector for embeddings and processing. The specific configuration fields required depend on the platform type selected.", + "tags": [ + "Connectors/AI Platforms" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateAIPlatformConnectorRequest" + }, + "example": [ + { + "name": "My CreateAIPlatformConnectorRequest", + "type": "BEDROCK", + "config": { + "name": "My BEDROCKAuthConfig", + "access-key": "AKIAIOSFODNN7EXAMPLE", + "key": "key_example_123456", + "region": "us-east-1" + } + } + ] + } + } + }, + "responses": { + "200": { + "description": "Connector successfully created", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateAIPlatformConnectorResponse" + }, + "example": { + "message": "Operation completed successfully", + "connectors": [] + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + }, + "get": { + "operationId": "getAIPlatformConnectors", + "summary": "Get all existing AI Platform connectors", + "tags": [ + "Connectors/AI Platforms" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Get all existing AI Platform connectors", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "aiPlatformConnectors": { + "type": "array", + "items": { + "$ref": "#/components/schemas/AIPlatform" + } + } + }, + "required": [ + "aiPlatformConnectors" + ] + }, + "example": { + "aiPlatformConnectors": [] + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/connectors/aiplatforms/{aiplatformId}": { + "get": { + "operationId": "getAIPlatformConnector", + "summary": "Get an AI platform connector", + "tags": [ + "Connectors/AI Platforms" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "aiplatformId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Get an AI platform connector", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AIPlatform" + }, + "example": { + "id": "e2176580-2423-4753-81a0-63788261dca7", + "type": "example-type", + "name": "My AIPlatform", + "configDoc": {}, + "createdAt": "example-createdAt", + "createdById": "126b440d-a790-4fc2-a891-32f046f2a7d1", + "lastUpdatedById": "970e121f-ff1f-44ff-923f-e428868a919e", + "createdByEmail": "user@example.com", + "lastUpdatedByEmail": "user@example.com", + "errorMessage": "Operation completed successfully", + "verificationStatus": "verified" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + }, + "patch": { + "operationId": "updateAIPlatformConnector", + "summary": "Update an AI Platform connector", + "tags": [ + "Connectors/AI Platforms" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "aiplatformId", + "in": "path" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateAiplatformConnectorRequest" + } + } + } + }, + "responses": { + "200": { + "description": "AI Platform connector successfully updated", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateAIPlatformConnectorResponse" + }, + "example": { + "message": "Operation completed successfully", + "data": { + "updatedConnector": { + "id": "bcc5f3a2-4536-4b0a-8714-01692496e00f", + "type": "example-type", + "name": "My AIPlatform", + "configDoc": {}, + "createdAt": "example-createdAt", + "createdById": "87f8375a-1d87-40df-883d-94c8ff660970", + "lastUpdatedById": "1d2cfc64-5450-42d3-b407-935325ea03e7", + "createdByEmail": "user@example.com", + "lastUpdatedByEmail": "user@example.com", + "errorMessage": "Operation completed successfully", + "verificationStatus": "verified" + }, + "pipelineIds": [] + } + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + }, + "delete": { + "operationId": "deleteAIPlatform", + "summary": "Delete an AI platform connector", + "tags": [ + "Connectors/AI Platforms" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "aiplatformId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "AI Platform connector successfully deleted", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DeleteAIPlatformConnectorResponse" + }, + "example": { + "message": "Operation completed successfully" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/uploads/{connectorId}/files": { + "get": { + "operationId": "getUploadFilesFromConnector", + "summary": "Get uploaded files from a file upload connector", + "tags": [ + "Uploads" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "connectorId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Files retrieved successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetUploadFilesResponse" + }, + "example": { + "message": "Operation completed successfully", + "files": [] + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + }, + "put": { + "operationId": "startFileUploadToConnector", + "summary": "Upload a file to a file upload connector", + "tags": [ + "Uploads" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "connectorId", + "in": "path" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StartFileUploadToConnectorRequest" + }, + "example": { + "name": "My StartFileUploadToConnectorRequest", + "contentType": "document", + "metadata": "example-metadata" + } + } + } + }, + "responses": { + "200": { + "description": "File ready to be uploaded", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StartFileUploadToConnectorResponse" + }, + "example": { + "uploadUrl": "https://api.example.com" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + }, + "delete": { + "operationId": "deleteFileFromConnector", + "summary": "Delete a file from a file upload connector", + "tags": [ + "Uploads" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "connectorId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "File deleted successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DeleteFileResponse" + }, + "example": { + "message": "Operation completed successfully", + "fileName": "document.pdf" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/extraction": { + "post": { + "operationId": "startExtraction", + "summary": "Start content extraction from a file", + "tags": [ + "Extraction" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StartExtractionRequest" + }, + "example": { + "fileId": "2a53d7fa-748a-4b7f-a35b-e5f73944f444", + "type": "iris", + "chunkingStrategy": "markdown", + "chunkSize": 20, + "metadata": { + "schemas": [], + "inferSchema": true + } + } + } + } + }, + "responses": { + "200": { + "description": "Extraction started successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StartExtractionResponse" + }, + "example": { + "message": "Operation completed successfully", + "extractionId": "54033280-6cbd-493f-8047-1722afc6ae84" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/extraction/{extractionId}": { + "get": { + "operationId": "getExtractionResult", + "summary": "Get extraction result", + "tags": [ + "Extraction" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "extractionId", + "in": "path" + } + ], + "responses": { + "200": { + "description": "Extraction started successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ExtractionResultResponse" + }, + "example": { + "ready": true, + "data": { + "success": true, + "chunks": [], + "text": "example-text", + "metadata": "example-metadata", + "metadataSchema": "example-metadataSchema", + "chunksMetadata": [], + "chunksSchema": [], + "error": null + } + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/files": { + "post": { + "operationId": "startFileUpload", + "summary": "Upload a generic file to the platform", + "tags": [ + "Files" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organizationId", + "in": "path" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StartFileUploadRequest" + }, + "example": { + "name": "My StartFileUploadRequest", + "contentType": "document" + } + } + } + }, + "responses": { + "200": { + "description": "File upload started successfully", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/StartFileUploadResponse" + }, + "example": { + "fileId": "057d09e1-77a4-4cf3-8af5-ede594a3a2aa", + "uploadUrl": "https://api.example.com" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + }, + "/org/{organizationId}/connectors/sources/{sourceConnectorId}/users": { + "post": { + "operationId": "addUserToSourceConnector", + "summary": "Add a user to a source connector", + "tags": [ + "Connectors/Source Connectors" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "sourceConnectorId", + "in": "path" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AddUserToSourceConnectorRequest" + }, + "example": { + "userId": "29cc613c-dcb8-429e-88fe-be19dbd8b312", + "selectedFiles": {}, + "refreshToken": "refresh_token_example_123456", + "accessToken": "access_token_example_123456" + } + } + } + }, + "responses": { + "200": { + "description": "User successfully added to the source connector", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AddUserFromSourceConnectorResponse" + }, + "example": { + "message": "Operation completed successfully" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + }, + "patch": { + "operationId": "updateUserInSourceConnector", + "summary": "Update a source connector user", + "tags": [ + "Connectors/Source Connectors" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "sourceConnectorId", + "in": "path" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateUserInSourceConnectorRequest" + }, + "example": { + "userId": "1dda2405-5b9d-403a-bdf7-01a78cb796da", + "selectedFiles": {}, + "refreshToken": "refresh_token_example_123456", + "accessToken": "access_token_example_123456" + } + } + } + }, + "responses": { + "200": { + "description": "User successfully updated in the source connector", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateUserInSourceConnectorResponse" + }, + "example": { + "message": "Operation completed successfully" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + }, + "delete": { + "operationId": "deleteUserFromSourceConnector", + "summary": "Delete a source connector user", + "tags": [ + "Connectors/Source Connectors" + ], + "security": [ + { + "bearerAuth": [] + } + ], + "parameters": [ + { + "schema": { + "type": "string" + }, + "required": true, + "name": "organization", + "in": "path" + }, + { + "schema": { + "type": "string" + }, + "required": true, + "name": "sourceConnectorId", + "in": "path" + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RemoveUserFromSourceConnectorRequest" + }, + "example": { + "userId": "a3703b11-2eba-45e3-87cd-7e5e7c076e3a" + } + } + } + }, + "responses": { + "200": { + "description": "User successfully removed from the source connector", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RemoveUserFromSourceConnectorResponse" + }, + "example": { + "message": "Operation completed successfully" + } + } + } + }, + "400": { + "description": "Invalid request", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "401": { + "description": "Unauthorized", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "403": { + "description": "Forbidden", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "404": { + "description": "Not found", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "details": { + "type": "string" + }, + "failedUpdates": { + "type": "array", + "items": { + "type": "string" + } + }, + "successfulUpdates": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "error" + ] + }, + "example": { + "error": "An error occurred", + "details": "example-details", + "failedUpdates": [], + "successfulUpdates": [] + } + } + } + } + } + } + } +} +} \ No newline at end of file diff --git a/vectorize_client/__init__.py b/vectorize_client/__init__.py new file mode 100644 index 0000000..9d8a8c3 --- /dev/null +++ b/vectorize_client/__init__.py @@ -0,0 +1,635 @@ +# coding: utf-8 + +# flake8: noqa + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +__version__ = "1.0.0" + +# Define package exports +__all__ = [ + "ConnectorsAIPlatformsApi", + "ConnectorsDestinationConnectorsApi", + "ConnectorsSourceConnectorsApi", + "ExtractionApi", + "FilesApi", + "PipelinesApi", + "UploadsApi", + "ApiResponse", + "ApiClient", + "Configuration", + "OpenApiException", + "ApiTypeError", + "ApiValueError", + "ApiKeyError", + "ApiAttributeError", + "ApiException", + "AIPlatform", + "AIPlatformConfigSchema", + "AIPlatformInput", + "AIPlatformSchema", + "AIPlatformType", + "AWSS3AuthConfig", + "AWSS3Config", + "AZUREAISEARCHAuthConfig", + "AZUREAISEARCHConfig", + "AZUREBLOBAuthConfig", + "AZUREBLOBConfig", + "AddUserFromSourceConnectorResponse", + "AddUserToSourceConnectorRequest", + "AddUserToSourceConnectorRequestSelectedFiles", + "AddUserToSourceConnectorRequestSelectedFilesAnyOf", + "AddUserToSourceConnectorRequestSelectedFilesAnyOfValue", + "AdvancedQuery", + "AmazonS3", + "AmazonS31", + "AmazonS32", + "AzureBlobStorage", + "AzureBlobStorage1", + "AzureBlobStorage2", + "Azureaisearch", + "Azureaisearch1", + "Azureaisearch2", + "BEDROCKAuthConfig", + "Bedrock", + "Bedrock1", + "Bedrock2", + "CAPELLAAuthConfig", + "CAPELLAConfig", + "CONFLUENCEAuthConfig", + "CONFLUENCEConfig", + "Capella", + "Capella1", + "Capella2", + "Confluence", + "Confluence1", + "Confluence2", + "CreateAIPlatformConnector", + "CreateAIPlatformConnectorRequestInner", + "CreateAIPlatformConnectorResponse", + "CreateDestinationConnector", + "CreateDestinationConnectorRequestInner", + "CreateDestinationConnectorResponse", + "CreatePipelineResponse", + "CreatePipelineResponseData", + "CreateSourceConnector", + "CreateSourceConnectorRequestInner", + "CreateSourceConnectorResponse", + "CreatedAIPlatformConnector", + "CreatedDestinationConnector", + "CreatedSourceConnector", + "DATASTAXAuthConfig", + "DATASTAXConfig", + "DISCORDAuthConfig", + "DISCORDConfig", + "DROPBOXAuthConfig", + "DROPBOXConfig", + "DROPBOXOAUTHAuthConfig", + "DROPBOXOAUTHMULTIAuthConfig", + "DROPBOXOAUTHMULTICUSTOMAuthConfig", + "Datastax", + "Datastax1", + "Datastax2", + "DeepResearchResult", + "DeleteAIPlatformConnectorResponse", + "DeleteDestinationConnectorResponse", + "DeleteFileResponse", + "DeletePipelineResponse", + "DeleteSourceConnectorResponse", + "DestinationConnector", + "DestinationConnectorInput", + "DestinationConnectorInputConfig", + "DestinationConnectorSchema", + "DestinationConnectorType", + "Discord", + "Discord1", + "Discord2", + "Document", + "Dropbox", + "Dropbox1", + "Dropbox2", + "DropboxOauth", + "DropboxOauth1", + "DropboxOauth2", + "DropboxOauthMulti", + "DropboxOauthMulti1", + "DropboxOauthMulti2", + "DropboxOauthMultiCustom", + "DropboxOauthMultiCustom1", + "DropboxOauthMultiCustom2", + "ELASTICAuthConfig", + "ELASTICConfig", + "Elastic", + "Elastic1", + "Elastic2", + "ExtractionChunkingStrategy", + "ExtractionResult", + "ExtractionResultResponse", + "ExtractionType", + "FILEUPLOADAuthConfig", + "FIRECRAWLAuthConfig", + "FIRECRAWLConfig", + "FIREFLIESAuthConfig", + "FIREFLIESConfig", + "FileUpload", + "FileUpload1", + "FileUpload2", + "Firecrawl", + "Firecrawl1", + "Firecrawl2", + "Fireflies", + "Fireflies1", + "Fireflies2", + "GCSAuthConfig", + "GCSConfig", + "GITHUBAuthConfig", + "GITHUBConfig", + "GOOGLEDRIVEAuthConfig", + "GOOGLEDRIVEConfig", + "GOOGLEDRIVEOAUTHAuthConfig", + "GOOGLEDRIVEOAUTHConfig", + "GOOGLEDRIVEOAUTHMULTIAuthConfig", + "GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig", + "GOOGLEDRIVEOAUTHMULTICUSTOMConfig", + "GOOGLEDRIVEOAUTHMULTIConfig", + "GetAIPlatformConnectors200Response", + "GetDeepResearchResponse", + "GetDestinationConnectors200Response", + "GetPipelineEventsResponse", + "GetPipelineMetricsResponse", + "GetPipelineResponse", + "GetPipelines400Response", + "GetPipelinesResponse", + "GetSourceConnectors200Response", + "GetUploadFilesResponse", + "Github", + "Github1", + "Github2", + "GoogleCloudStorage", + "GoogleCloudStorage1", + "GoogleCloudStorage2", + "GoogleDrive", + "GoogleDrive1", + "GoogleDrive2", + "GoogleDriveOAuth", + "GoogleDriveOAuth1", + "GoogleDriveOAuth2", + "GoogleDriveOauthMulti", + "GoogleDriveOauthMulti1", + "GoogleDriveOauthMulti2", + "GoogleDriveOauthMultiCustom", + "GoogleDriveOauthMultiCustom1", + "GoogleDriveOauthMultiCustom2", + "INTERCOMAuthConfig", + "INTERCOMConfig", + "Intercom", + "Intercom1", + "Intercom2", + "MILVUSAuthConfig", + "MILVUSConfig", + "MetadataExtractionStrategy", + "MetadataExtractionStrategySchema", + "Milvus", + "Milvus1", + "Milvus2", + "N8NConfig", + "NOTIONAuthConfig", + "NOTIONConfig", + "NOTIONOAUTHMULTIAuthConfig", + "NOTIONOAUTHMULTICUSTOMAuthConfig", + "Notion", + "Notion1", + "Notion2", + "NotionOauthMulti", + "NotionOauthMulti1", + "NotionOauthMulti2", + "NotionOauthMultiCustom", + "NotionOauthMultiCustom1", + "NotionOauthMultiCustom2", + "ONEDRIVEAuthConfig", + "ONEDRIVEConfig", + "OPENAIAuthConfig", + "OneDrive", + "OneDrive1", + "OneDrive2", + "Openai", + "Openai1", + "Openai2", + "PINECONEAuthConfig", + "PINECONEConfig", + "POSTGRESQLAuthConfig", + "POSTGRESQLConfig", + "Pinecone", + "Pinecone1", + "Pinecone2", + "PipelineAIPlatformRequestInner", + "PipelineConfigurationSchema", + "PipelineDestinationConnectorRequestInner", + "PipelineEvents", + "PipelineListSummary", + "PipelineMetrics", + "PipelineSourceConnectorRequestInner", + "PipelineSummary", + "Postgresql", + "Postgresql1", + "Postgresql2", + "QDRANTAuthConfig", + "QDRANTConfig", + "Qdrant", + "Qdrant1", + "Qdrant2", + "RemoveUserFromSourceConnectorRequest", + "RemoveUserFromSourceConnectorResponse", + "RetrieveContext", + "RetrieveContextMessage", + "RetrieveDocumentsRequest", + "RetrieveDocumentsResponse", + "SHAREPOINTAuthConfig", + "SHAREPOINTConfig", + "SINGLESTOREAuthConfig", + "SINGLESTOREConfig", + "SUPABASEAuthConfig", + "SUPABASEConfig", + "ScheduleSchema", + "ScheduleSchemaType", + "Sharepoint", + "Sharepoint1", + "Sharepoint2", + "Singlestore", + "Singlestore1", + "Singlestore2", + "SourceConnector", + "SourceConnectorInput", + "SourceConnectorInputConfig", + "SourceConnectorSchema", + "SourceConnectorType", + "StartDeepResearchRequest", + "StartDeepResearchResponse", + "StartExtractionRequest", + "StartExtractionResponse", + "StartFileUploadRequest", + "StartFileUploadResponse", + "StartFileUploadToConnectorRequest", + "StartFileUploadToConnectorResponse", + "StartPipelineResponse", + "StopPipelineResponse", + "Supabase", + "Supabase1", + "Supabase2", + "TURBOPUFFERAuthConfig", + "TURBOPUFFERConfig", + "Turbopuffer", + "Turbopuffer1", + "Turbopuffer2", + "UpdateAIPlatformConnectorRequest", + "UpdateAIPlatformConnectorResponse", + "UpdateAiplatformConnectorRequest", + "UpdateDestinationConnectorRequest", + "UpdateDestinationConnectorResponse", + "UpdateSourceConnectorRequest", + "UpdateSourceConnectorResponse", + "UpdateSourceConnectorResponseData", + "UpdateUserInSourceConnectorRequest", + "UpdateUserInSourceConnectorResponse", + "UpdatedAIPlatformConnectorData", + "UpdatedDestinationConnectorData", + "UploadFile", + "VERTEXAuthConfig", + "VOYAGEAuthConfig", + "Vertex", + "Vertex1", + "Vertex2", + "Voyage", + "Voyage1", + "Voyage2", + "WEAVIATEAuthConfig", + "WEAVIATEConfig", + "WEBCRAWLERAuthConfig", + "WEBCRAWLERConfig", + "Weaviate", + "Weaviate1", + "Weaviate2", + "WebCrawler", + "WebCrawler1", + "WebCrawler2", +] + +# import apis into sdk package +from vectorize_client.api.connectors_ai_platforms_api import ConnectorsAIPlatformsApi as ConnectorsAIPlatformsApi +from vectorize_client.api.connectors_destination_connectors_api import ConnectorsDestinationConnectorsApi as ConnectorsDestinationConnectorsApi +from vectorize_client.api.connectors_source_connectors_api import ConnectorsSourceConnectorsApi as ConnectorsSourceConnectorsApi +from vectorize_client.api.extraction_api import ExtractionApi as ExtractionApi +from vectorize_client.api.files_api import FilesApi as FilesApi +from vectorize_client.api.pipelines_api import PipelinesApi as PipelinesApi +from vectorize_client.api.uploads_api import UploadsApi as UploadsApi + +# import ApiClient +from vectorize_client.api_response import ApiResponse as ApiResponse +from vectorize_client.api_client import ApiClient as ApiClient +from vectorize_client.configuration import Configuration as Configuration +from vectorize_client.exceptions import OpenApiException as OpenApiException +from vectorize_client.exceptions import ApiTypeError as ApiTypeError +from vectorize_client.exceptions import ApiValueError as ApiValueError +from vectorize_client.exceptions import ApiKeyError as ApiKeyError +from vectorize_client.exceptions import ApiAttributeError as ApiAttributeError +from vectorize_client.exceptions import ApiException as ApiException + +# import models into sdk package +from vectorize_client.models.ai_platform import AIPlatform as AIPlatform +from vectorize_client.models.ai_platform_config_schema import AIPlatformConfigSchema as AIPlatformConfigSchema +from vectorize_client.models.ai_platform_input import AIPlatformInput as AIPlatformInput +from vectorize_client.models.ai_platform_schema import AIPlatformSchema as AIPlatformSchema +from vectorize_client.models.ai_platform_type import AIPlatformType as AIPlatformType +from vectorize_client.models.awss3_auth_config import AWSS3AuthConfig as AWSS3AuthConfig +from vectorize_client.models.awss3_config import AWSS3Config as AWSS3Config +from vectorize_client.models.azureaisearch_auth_config import AZUREAISEARCHAuthConfig as AZUREAISEARCHAuthConfig +from vectorize_client.models.azureaisearch_config import AZUREAISEARCHConfig as AZUREAISEARCHConfig +from vectorize_client.models.azureblob_auth_config import AZUREBLOBAuthConfig as AZUREBLOBAuthConfig +from vectorize_client.models.azureblob_config import AZUREBLOBConfig as AZUREBLOBConfig +from vectorize_client.models.add_user_from_source_connector_response import AddUserFromSourceConnectorResponse as AddUserFromSourceConnectorResponse +from vectorize_client.models.add_user_to_source_connector_request import AddUserToSourceConnectorRequest as AddUserToSourceConnectorRequest +from vectorize_client.models.add_user_to_source_connector_request_selected_files import AddUserToSourceConnectorRequestSelectedFiles as AddUserToSourceConnectorRequestSelectedFiles +from vectorize_client.models.add_user_to_source_connector_request_selected_files_any_of import AddUserToSourceConnectorRequestSelectedFilesAnyOf as AddUserToSourceConnectorRequestSelectedFilesAnyOf +from vectorize_client.models.add_user_to_source_connector_request_selected_files_any_of_value import AddUserToSourceConnectorRequestSelectedFilesAnyOfValue as AddUserToSourceConnectorRequestSelectedFilesAnyOfValue +from vectorize_client.models.advanced_query import AdvancedQuery as AdvancedQuery +from vectorize_client.models.amazon_s3 import AmazonS3 as AmazonS3 +from vectorize_client.models.amazon_s31 import AmazonS31 as AmazonS31 +from vectorize_client.models.amazon_s32 import AmazonS32 as AmazonS32 +from vectorize_client.models.azure_blob_storage import AzureBlobStorage as AzureBlobStorage +from vectorize_client.models.azure_blob_storage1 import AzureBlobStorage1 as AzureBlobStorage1 +from vectorize_client.models.azure_blob_storage2 import AzureBlobStorage2 as AzureBlobStorage2 +from vectorize_client.models.azureaisearch import Azureaisearch as Azureaisearch +from vectorize_client.models.azureaisearch1 import Azureaisearch1 as Azureaisearch1 +from vectorize_client.models.azureaisearch2 import Azureaisearch2 as Azureaisearch2 +from vectorize_client.models.bedrock_auth_config import BEDROCKAuthConfig as BEDROCKAuthConfig +from vectorize_client.models.bedrock import Bedrock as Bedrock +from vectorize_client.models.bedrock1 import Bedrock1 as Bedrock1 +from vectorize_client.models.bedrock2 import Bedrock2 as Bedrock2 +from vectorize_client.models.capella_auth_config import CAPELLAAuthConfig as CAPELLAAuthConfig +from vectorize_client.models.capella_config import CAPELLAConfig as CAPELLAConfig +from vectorize_client.models.confluence_auth_config import CONFLUENCEAuthConfig as CONFLUENCEAuthConfig +from vectorize_client.models.confluence_config import CONFLUENCEConfig as CONFLUENCEConfig +from vectorize_client.models.capella import Capella as Capella +from vectorize_client.models.capella1 import Capella1 as Capella1 +from vectorize_client.models.capella2 import Capella2 as Capella2 +from vectorize_client.models.confluence import Confluence as Confluence +from vectorize_client.models.confluence1 import Confluence1 as Confluence1 +from vectorize_client.models.confluence2 import Confluence2 as Confluence2 +from vectorize_client.models.create_ai_platform_connector import CreateAIPlatformConnector as CreateAIPlatformConnector +from vectorize_client.models.create_ai_platform_connector_request_inner import CreateAIPlatformConnectorRequestInner as CreateAIPlatformConnectorRequestInner +from vectorize_client.models.create_ai_platform_connector_response import CreateAIPlatformConnectorResponse as CreateAIPlatformConnectorResponse +from vectorize_client.models.create_destination_connector import CreateDestinationConnector as CreateDestinationConnector +from vectorize_client.models.create_destination_connector_request_inner import CreateDestinationConnectorRequestInner as CreateDestinationConnectorRequestInner +from vectorize_client.models.create_destination_connector_response import CreateDestinationConnectorResponse as CreateDestinationConnectorResponse +from vectorize_client.models.create_pipeline_response import CreatePipelineResponse as CreatePipelineResponse +from vectorize_client.models.create_pipeline_response_data import CreatePipelineResponseData as CreatePipelineResponseData +from vectorize_client.models.create_source_connector import CreateSourceConnector as CreateSourceConnector +from vectorize_client.models.create_source_connector_request_inner import CreateSourceConnectorRequestInner as CreateSourceConnectorRequestInner +from vectorize_client.models.create_source_connector_response import CreateSourceConnectorResponse as CreateSourceConnectorResponse +from vectorize_client.models.created_ai_platform_connector import CreatedAIPlatformConnector as CreatedAIPlatformConnector +from vectorize_client.models.created_destination_connector import CreatedDestinationConnector as CreatedDestinationConnector +from vectorize_client.models.created_source_connector import CreatedSourceConnector as CreatedSourceConnector +from vectorize_client.models.datastax_auth_config import DATASTAXAuthConfig as DATASTAXAuthConfig +from vectorize_client.models.datastax_config import DATASTAXConfig as DATASTAXConfig +from vectorize_client.models.discord_auth_config import DISCORDAuthConfig as DISCORDAuthConfig +from vectorize_client.models.discord_config import DISCORDConfig as DISCORDConfig +from vectorize_client.models.dropbox_auth_config import DROPBOXAuthConfig as DROPBOXAuthConfig +from vectorize_client.models.dropbox_config import DROPBOXConfig as DROPBOXConfig +from vectorize_client.models.dropboxoauth_auth_config import DROPBOXOAUTHAuthConfig as DROPBOXOAUTHAuthConfig +from vectorize_client.models.dropboxoauthmulti_auth_config import DROPBOXOAUTHMULTIAuthConfig as DROPBOXOAUTHMULTIAuthConfig +from vectorize_client.models.dropboxoauthmulticustom_auth_config import DROPBOXOAUTHMULTICUSTOMAuthConfig as DROPBOXOAUTHMULTICUSTOMAuthConfig +from vectorize_client.models.datastax import Datastax as Datastax +from vectorize_client.models.datastax1 import Datastax1 as Datastax1 +from vectorize_client.models.datastax2 import Datastax2 as Datastax2 +from vectorize_client.models.deep_research_result import DeepResearchResult as DeepResearchResult +from vectorize_client.models.delete_ai_platform_connector_response import DeleteAIPlatformConnectorResponse as DeleteAIPlatformConnectorResponse +from vectorize_client.models.delete_destination_connector_response import DeleteDestinationConnectorResponse as DeleteDestinationConnectorResponse +from vectorize_client.models.delete_file_response import DeleteFileResponse as DeleteFileResponse +from vectorize_client.models.delete_pipeline_response import DeletePipelineResponse as DeletePipelineResponse +from vectorize_client.models.delete_source_connector_response import DeleteSourceConnectorResponse as DeleteSourceConnectorResponse +from vectorize_client.models.destination_connector import DestinationConnector as DestinationConnector +from vectorize_client.models.destination_connector_input import DestinationConnectorInput as DestinationConnectorInput +from vectorize_client.models.destination_connector_input_config import DestinationConnectorInputConfig as DestinationConnectorInputConfig +from vectorize_client.models.destination_connector_schema import DestinationConnectorSchema as DestinationConnectorSchema +from vectorize_client.models.destination_connector_type import DestinationConnectorType as DestinationConnectorType +from vectorize_client.models.discord import Discord as Discord +from vectorize_client.models.discord1 import Discord1 as Discord1 +from vectorize_client.models.discord2 import Discord2 as Discord2 +from vectorize_client.models.document import Document as Document +from vectorize_client.models.dropbox import Dropbox as Dropbox +from vectorize_client.models.dropbox1 import Dropbox1 as Dropbox1 +from vectorize_client.models.dropbox2 import Dropbox2 as Dropbox2 +from vectorize_client.models.dropbox_oauth import DropboxOauth as DropboxOauth +from vectorize_client.models.dropbox_oauth1 import DropboxOauth1 as DropboxOauth1 +from vectorize_client.models.dropbox_oauth2 import DropboxOauth2 as DropboxOauth2 +from vectorize_client.models.dropbox_oauth_multi import DropboxOauthMulti as DropboxOauthMulti +from vectorize_client.models.dropbox_oauth_multi1 import DropboxOauthMulti1 as DropboxOauthMulti1 +from vectorize_client.models.dropbox_oauth_multi2 import DropboxOauthMulti2 as DropboxOauthMulti2 +from vectorize_client.models.dropbox_oauth_multi_custom import DropboxOauthMultiCustom as DropboxOauthMultiCustom +from vectorize_client.models.dropbox_oauth_multi_custom1 import DropboxOauthMultiCustom1 as DropboxOauthMultiCustom1 +from vectorize_client.models.dropbox_oauth_multi_custom2 import DropboxOauthMultiCustom2 as DropboxOauthMultiCustom2 +from vectorize_client.models.elastic_auth_config import ELASTICAuthConfig as ELASTICAuthConfig +from vectorize_client.models.elastic_config import ELASTICConfig as ELASTICConfig +from vectorize_client.models.elastic import Elastic as Elastic +from vectorize_client.models.elastic1 import Elastic1 as Elastic1 +from vectorize_client.models.elastic2 import Elastic2 as Elastic2 +from vectorize_client.models.extraction_chunking_strategy import ExtractionChunkingStrategy as ExtractionChunkingStrategy +from vectorize_client.models.extraction_result import ExtractionResult as ExtractionResult +from vectorize_client.models.extraction_result_response import ExtractionResultResponse as ExtractionResultResponse +from vectorize_client.models.extraction_type import ExtractionType as ExtractionType +from vectorize_client.models.fileupload_auth_config import FILEUPLOADAuthConfig as FILEUPLOADAuthConfig +from vectorize_client.models.firecrawl_auth_config import FIRECRAWLAuthConfig as FIRECRAWLAuthConfig +from vectorize_client.models.firecrawl_config import FIRECRAWLConfig as FIRECRAWLConfig +from vectorize_client.models.fireflies_auth_config import FIREFLIESAuthConfig as FIREFLIESAuthConfig +from vectorize_client.models.fireflies_config import FIREFLIESConfig as FIREFLIESConfig +from vectorize_client.models.file_upload import FileUpload as FileUpload +from vectorize_client.models.file_upload1 import FileUpload1 as FileUpload1 +from vectorize_client.models.file_upload2 import FileUpload2 as FileUpload2 +from vectorize_client.models.firecrawl import Firecrawl as Firecrawl +from vectorize_client.models.firecrawl1 import Firecrawl1 as Firecrawl1 +from vectorize_client.models.firecrawl2 import Firecrawl2 as Firecrawl2 +from vectorize_client.models.fireflies import Fireflies as Fireflies +from vectorize_client.models.fireflies1 import Fireflies1 as Fireflies1 +from vectorize_client.models.fireflies2 import Fireflies2 as Fireflies2 +from vectorize_client.models.gcs_auth_config import GCSAuthConfig as GCSAuthConfig +from vectorize_client.models.gcs_config import GCSConfig as GCSConfig +from vectorize_client.models.github_auth_config import GITHUBAuthConfig as GITHUBAuthConfig +from vectorize_client.models.github_config import GITHUBConfig as GITHUBConfig +from vectorize_client.models.googledrive_auth_config import GOOGLEDRIVEAuthConfig as GOOGLEDRIVEAuthConfig +from vectorize_client.models.googledrive_config import GOOGLEDRIVEConfig as GOOGLEDRIVEConfig +from vectorize_client.models.googledriveoauth_auth_config import GOOGLEDRIVEOAUTHAuthConfig as GOOGLEDRIVEOAUTHAuthConfig +from vectorize_client.models.googledriveoauth_config import GOOGLEDRIVEOAUTHConfig as GOOGLEDRIVEOAUTHConfig +from vectorize_client.models.googledriveoauthmulti_auth_config import GOOGLEDRIVEOAUTHMULTIAuthConfig as GOOGLEDRIVEOAUTHMULTIAuthConfig +from vectorize_client.models.googledriveoauthmulticustom_auth_config import GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig as GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig +from vectorize_client.models.googledriveoauthmulticustom_config import GOOGLEDRIVEOAUTHMULTICUSTOMConfig as GOOGLEDRIVEOAUTHMULTICUSTOMConfig +from vectorize_client.models.googledriveoauthmulti_config import GOOGLEDRIVEOAUTHMULTIConfig as GOOGLEDRIVEOAUTHMULTIConfig +from vectorize_client.models.get_ai_platform_connectors200_response import GetAIPlatformConnectors200Response as GetAIPlatformConnectors200Response +from vectorize_client.models.get_deep_research_response import GetDeepResearchResponse as GetDeepResearchResponse +from vectorize_client.models.get_destination_connectors200_response import GetDestinationConnectors200Response as GetDestinationConnectors200Response +from vectorize_client.models.get_pipeline_events_response import GetPipelineEventsResponse as GetPipelineEventsResponse +from vectorize_client.models.get_pipeline_metrics_response import GetPipelineMetricsResponse as GetPipelineMetricsResponse +from vectorize_client.models.get_pipeline_response import GetPipelineResponse as GetPipelineResponse +from vectorize_client.models.get_pipelines400_response import GetPipelines400Response as GetPipelines400Response +from vectorize_client.models.get_pipelines_response import GetPipelinesResponse as GetPipelinesResponse +from vectorize_client.models.get_source_connectors200_response import GetSourceConnectors200Response as GetSourceConnectors200Response +from vectorize_client.models.get_upload_files_response import GetUploadFilesResponse as GetUploadFilesResponse +from vectorize_client.models.github import Github as Github +from vectorize_client.models.github1 import Github1 as Github1 +from vectorize_client.models.github2 import Github2 as Github2 +from vectorize_client.models.google_cloud_storage import GoogleCloudStorage as GoogleCloudStorage +from vectorize_client.models.google_cloud_storage1 import GoogleCloudStorage1 as GoogleCloudStorage1 +from vectorize_client.models.google_cloud_storage2 import GoogleCloudStorage2 as GoogleCloudStorage2 +from vectorize_client.models.google_drive import GoogleDrive as GoogleDrive +from vectorize_client.models.google_drive1 import GoogleDrive1 as GoogleDrive1 +from vectorize_client.models.google_drive2 import GoogleDrive2 as GoogleDrive2 +from vectorize_client.models.google_drive_o_auth import GoogleDriveOAuth as GoogleDriveOAuth +from vectorize_client.models.google_drive_o_auth1 import GoogleDriveOAuth1 as GoogleDriveOAuth1 +from vectorize_client.models.google_drive_o_auth2 import GoogleDriveOAuth2 as GoogleDriveOAuth2 +from vectorize_client.models.google_drive_oauth_multi import GoogleDriveOauthMulti as GoogleDriveOauthMulti +from vectorize_client.models.google_drive_oauth_multi1 import GoogleDriveOauthMulti1 as GoogleDriveOauthMulti1 +from vectorize_client.models.google_drive_oauth_multi2 import GoogleDriveOauthMulti2 as GoogleDriveOauthMulti2 +from vectorize_client.models.google_drive_oauth_multi_custom import GoogleDriveOauthMultiCustom as GoogleDriveOauthMultiCustom +from vectorize_client.models.google_drive_oauth_multi_custom1 import GoogleDriveOauthMultiCustom1 as GoogleDriveOauthMultiCustom1 +from vectorize_client.models.google_drive_oauth_multi_custom2 import GoogleDriveOauthMultiCustom2 as GoogleDriveOauthMultiCustom2 +from vectorize_client.models.intercom_auth_config import INTERCOMAuthConfig as INTERCOMAuthConfig +from vectorize_client.models.intercom_config import INTERCOMConfig as INTERCOMConfig +from vectorize_client.models.intercom import Intercom as Intercom +from vectorize_client.models.intercom1 import Intercom1 as Intercom1 +from vectorize_client.models.intercom2 import Intercom2 as Intercom2 +from vectorize_client.models.milvus_auth_config import MILVUSAuthConfig as MILVUSAuthConfig +from vectorize_client.models.milvus_config import MILVUSConfig as MILVUSConfig +from vectorize_client.models.metadata_extraction_strategy import MetadataExtractionStrategy as MetadataExtractionStrategy +from vectorize_client.models.metadata_extraction_strategy_schema import MetadataExtractionStrategySchema as MetadataExtractionStrategySchema +from vectorize_client.models.milvus import Milvus as Milvus +from vectorize_client.models.milvus1 import Milvus1 as Milvus1 +from vectorize_client.models.milvus2 import Milvus2 as Milvus2 +from vectorize_client.models.n8_n_config import N8NConfig as N8NConfig +from vectorize_client.models.notion_auth_config import NOTIONAuthConfig as NOTIONAuthConfig +from vectorize_client.models.notion_config import NOTIONConfig as NOTIONConfig +from vectorize_client.models.notionoauthmulti_auth_config import NOTIONOAUTHMULTIAuthConfig as NOTIONOAUTHMULTIAuthConfig +from vectorize_client.models.notionoauthmulticustom_auth_config import NOTIONOAUTHMULTICUSTOMAuthConfig as NOTIONOAUTHMULTICUSTOMAuthConfig +from vectorize_client.models.notion import Notion as Notion +from vectorize_client.models.notion1 import Notion1 as Notion1 +from vectorize_client.models.notion2 import Notion2 as Notion2 +from vectorize_client.models.notion_oauth_multi import NotionOauthMulti as NotionOauthMulti +from vectorize_client.models.notion_oauth_multi1 import NotionOauthMulti1 as NotionOauthMulti1 +from vectorize_client.models.notion_oauth_multi2 import NotionOauthMulti2 as NotionOauthMulti2 +from vectorize_client.models.notion_oauth_multi_custom import NotionOauthMultiCustom as NotionOauthMultiCustom +from vectorize_client.models.notion_oauth_multi_custom1 import NotionOauthMultiCustom1 as NotionOauthMultiCustom1 +from vectorize_client.models.notion_oauth_multi_custom2 import NotionOauthMultiCustom2 as NotionOauthMultiCustom2 +from vectorize_client.models.onedrive_auth_config import ONEDRIVEAuthConfig as ONEDRIVEAuthConfig +from vectorize_client.models.onedrive_config import ONEDRIVEConfig as ONEDRIVEConfig +from vectorize_client.models.openai_auth_config import OPENAIAuthConfig as OPENAIAuthConfig +from vectorize_client.models.one_drive import OneDrive as OneDrive +from vectorize_client.models.one_drive1 import OneDrive1 as OneDrive1 +from vectorize_client.models.one_drive2 import OneDrive2 as OneDrive2 +from vectorize_client.models.openai import Openai as Openai +from vectorize_client.models.openai1 import Openai1 as Openai1 +from vectorize_client.models.openai2 import Openai2 as Openai2 +from vectorize_client.models.pinecone_auth_config import PINECONEAuthConfig as PINECONEAuthConfig +from vectorize_client.models.pinecone_config import PINECONEConfig as PINECONEConfig +from vectorize_client.models.postgresql_auth_config import POSTGRESQLAuthConfig as POSTGRESQLAuthConfig +from vectorize_client.models.postgresql_config import POSTGRESQLConfig as POSTGRESQLConfig +from vectorize_client.models.pinecone import Pinecone as Pinecone +from vectorize_client.models.pinecone1 import Pinecone1 as Pinecone1 +from vectorize_client.models.pinecone2 import Pinecone2 as Pinecone2 +from vectorize_client.models.pipeline_ai_platform_request_inner import PipelineAIPlatformRequestInner as PipelineAIPlatformRequestInner +from vectorize_client.models.pipeline_configuration_schema import PipelineConfigurationSchema as PipelineConfigurationSchema +from vectorize_client.models.pipeline_destination_connector_request_inner import PipelineDestinationConnectorRequestInner as PipelineDestinationConnectorRequestInner +from vectorize_client.models.pipeline_events import PipelineEvents as PipelineEvents +from vectorize_client.models.pipeline_list_summary import PipelineListSummary as PipelineListSummary +from vectorize_client.models.pipeline_metrics import PipelineMetrics as PipelineMetrics +from vectorize_client.models.pipeline_source_connector_request_inner import PipelineSourceConnectorRequestInner as PipelineSourceConnectorRequestInner +from vectorize_client.models.pipeline_summary import PipelineSummary as PipelineSummary +from vectorize_client.models.postgresql import Postgresql as Postgresql +from vectorize_client.models.postgresql1 import Postgresql1 as Postgresql1 +from vectorize_client.models.postgresql2 import Postgresql2 as Postgresql2 +from vectorize_client.models.qdrant_auth_config import QDRANTAuthConfig as QDRANTAuthConfig +from vectorize_client.models.qdrant_config import QDRANTConfig as QDRANTConfig +from vectorize_client.models.qdrant import Qdrant as Qdrant +from vectorize_client.models.qdrant1 import Qdrant1 as Qdrant1 +from vectorize_client.models.qdrant2 import Qdrant2 as Qdrant2 +from vectorize_client.models.remove_user_from_source_connector_request import RemoveUserFromSourceConnectorRequest as RemoveUserFromSourceConnectorRequest +from vectorize_client.models.remove_user_from_source_connector_response import RemoveUserFromSourceConnectorResponse as RemoveUserFromSourceConnectorResponse +from vectorize_client.models.retrieve_context import RetrieveContext as RetrieveContext +from vectorize_client.models.retrieve_context_message import RetrieveContextMessage as RetrieveContextMessage +from vectorize_client.models.retrieve_documents_request import RetrieveDocumentsRequest as RetrieveDocumentsRequest +from vectorize_client.models.retrieve_documents_response import RetrieveDocumentsResponse as RetrieveDocumentsResponse +from vectorize_client.models.sharepoint_auth_config import SHAREPOINTAuthConfig as SHAREPOINTAuthConfig +from vectorize_client.models.sharepoint_config import SHAREPOINTConfig as SHAREPOINTConfig +from vectorize_client.models.singlestore_auth_config import SINGLESTOREAuthConfig as SINGLESTOREAuthConfig +from vectorize_client.models.singlestore_config import SINGLESTOREConfig as SINGLESTOREConfig +from vectorize_client.models.supabase_auth_config import SUPABASEAuthConfig as SUPABASEAuthConfig +from vectorize_client.models.supabase_config import SUPABASEConfig as SUPABASEConfig +from vectorize_client.models.schedule_schema import ScheduleSchema as ScheduleSchema +from vectorize_client.models.schedule_schema_type import ScheduleSchemaType as ScheduleSchemaType +from vectorize_client.models.sharepoint import Sharepoint as Sharepoint +from vectorize_client.models.sharepoint1 import Sharepoint1 as Sharepoint1 +from vectorize_client.models.sharepoint2 import Sharepoint2 as Sharepoint2 +from vectorize_client.models.singlestore import Singlestore as Singlestore +from vectorize_client.models.singlestore1 import Singlestore1 as Singlestore1 +from vectorize_client.models.singlestore2 import Singlestore2 as Singlestore2 +from vectorize_client.models.source_connector import SourceConnector as SourceConnector +from vectorize_client.models.source_connector_input import SourceConnectorInput as SourceConnectorInput +from vectorize_client.models.source_connector_input_config import SourceConnectorInputConfig as SourceConnectorInputConfig +from vectorize_client.models.source_connector_schema import SourceConnectorSchema as SourceConnectorSchema +from vectorize_client.models.source_connector_type import SourceConnectorType as SourceConnectorType +from vectorize_client.models.start_deep_research_request import StartDeepResearchRequest as StartDeepResearchRequest +from vectorize_client.models.start_deep_research_response import StartDeepResearchResponse as StartDeepResearchResponse +from vectorize_client.models.start_extraction_request import StartExtractionRequest as StartExtractionRequest +from vectorize_client.models.start_extraction_response import StartExtractionResponse as StartExtractionResponse +from vectorize_client.models.start_file_upload_request import StartFileUploadRequest as StartFileUploadRequest +from vectorize_client.models.start_file_upload_response import StartFileUploadResponse as StartFileUploadResponse +from vectorize_client.models.start_file_upload_to_connector_request import StartFileUploadToConnectorRequest as StartFileUploadToConnectorRequest +from vectorize_client.models.start_file_upload_to_connector_response import StartFileUploadToConnectorResponse as StartFileUploadToConnectorResponse +from vectorize_client.models.start_pipeline_response import StartPipelineResponse as StartPipelineResponse +from vectorize_client.models.stop_pipeline_response import StopPipelineResponse as StopPipelineResponse +from vectorize_client.models.supabase import Supabase as Supabase +from vectorize_client.models.supabase1 import Supabase1 as Supabase1 +from vectorize_client.models.supabase2 import Supabase2 as Supabase2 +from vectorize_client.models.turbopuffer_auth_config import TURBOPUFFERAuthConfig as TURBOPUFFERAuthConfig +from vectorize_client.models.turbopuffer_config import TURBOPUFFERConfig as TURBOPUFFERConfig +from vectorize_client.models.turbopuffer import Turbopuffer as Turbopuffer +from vectorize_client.models.turbopuffer1 import Turbopuffer1 as Turbopuffer1 +from vectorize_client.models.turbopuffer2 import Turbopuffer2 as Turbopuffer2 +from vectorize_client.models.update_ai_platform_connector_request import UpdateAIPlatformConnectorRequest as UpdateAIPlatformConnectorRequest +from vectorize_client.models.update_ai_platform_connector_response import UpdateAIPlatformConnectorResponse as UpdateAIPlatformConnectorResponse +from vectorize_client.models.update_aiplatform_connector_request import UpdateAiplatformConnectorRequest as UpdateAiplatformConnectorRequest +from vectorize_client.models.update_destination_connector_request import UpdateDestinationConnectorRequest as UpdateDestinationConnectorRequest +from vectorize_client.models.update_destination_connector_response import UpdateDestinationConnectorResponse as UpdateDestinationConnectorResponse +from vectorize_client.models.update_source_connector_request import UpdateSourceConnectorRequest as UpdateSourceConnectorRequest +from vectorize_client.models.update_source_connector_response import UpdateSourceConnectorResponse as UpdateSourceConnectorResponse +from vectorize_client.models.update_source_connector_response_data import UpdateSourceConnectorResponseData as UpdateSourceConnectorResponseData +from vectorize_client.models.update_user_in_source_connector_request import UpdateUserInSourceConnectorRequest as UpdateUserInSourceConnectorRequest +from vectorize_client.models.update_user_in_source_connector_response import UpdateUserInSourceConnectorResponse as UpdateUserInSourceConnectorResponse +from vectorize_client.models.updated_ai_platform_connector_data import UpdatedAIPlatformConnectorData as UpdatedAIPlatformConnectorData +from vectorize_client.models.updated_destination_connector_data import UpdatedDestinationConnectorData as UpdatedDestinationConnectorData +from vectorize_client.models.upload_file import UploadFile as UploadFile +from vectorize_client.models.vertex_auth_config import VERTEXAuthConfig as VERTEXAuthConfig +from vectorize_client.models.voyage_auth_config import VOYAGEAuthConfig as VOYAGEAuthConfig +from vectorize_client.models.vertex import Vertex as Vertex +from vectorize_client.models.vertex1 import Vertex1 as Vertex1 +from vectorize_client.models.vertex2 import Vertex2 as Vertex2 +from vectorize_client.models.voyage import Voyage as Voyage +from vectorize_client.models.voyage1 import Voyage1 as Voyage1 +from vectorize_client.models.voyage2 import Voyage2 as Voyage2 +from vectorize_client.models.weaviate_auth_config import WEAVIATEAuthConfig as WEAVIATEAuthConfig +from vectorize_client.models.weaviate_config import WEAVIATEConfig as WEAVIATEConfig +from vectorize_client.models.webcrawler_auth_config import WEBCRAWLERAuthConfig as WEBCRAWLERAuthConfig +from vectorize_client.models.webcrawler_config import WEBCRAWLERConfig as WEBCRAWLERConfig +from vectorize_client.models.weaviate import Weaviate as Weaviate +from vectorize_client.models.weaviate1 import Weaviate1 as Weaviate1 +from vectorize_client.models.weaviate2 import Weaviate2 as Weaviate2 +from vectorize_client.models.web_crawler import WebCrawler as WebCrawler +from vectorize_client.models.web_crawler1 import WebCrawler1 as WebCrawler1 +from vectorize_client.models.web_crawler2 import WebCrawler2 as WebCrawler2 diff --git a/vectorize_client/api/__init__.py b/vectorize_client/api/__init__.py new file mode 100644 index 0000000..0b296d4 --- /dev/null +++ b/vectorize_client/api/__init__.py @@ -0,0 +1,11 @@ +# flake8: noqa + +# import apis into api package +from vectorize_client.api.connectors_ai_platforms_api import ConnectorsAIPlatformsApi +from vectorize_client.api.connectors_destination_connectors_api import ConnectorsDestinationConnectorsApi +from vectorize_client.api.connectors_source_connectors_api import ConnectorsSourceConnectorsApi +from vectorize_client.api.extraction_api import ExtractionApi +from vectorize_client.api.files_api import FilesApi +from vectorize_client.api.pipelines_api import PipelinesApi +from vectorize_client.api.uploads_api import UploadsApi + diff --git a/vectorize_client/api/connectors_ai_platforms_api.py b/vectorize_client/api/connectors_ai_platforms_api.py new file mode 100644 index 0000000..977b19d --- /dev/null +++ b/vectorize_client/api/connectors_ai_platforms_api.py @@ -0,0 +1,1515 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing import List +from typing_extensions import Annotated +from vectorize_client.models.ai_platform import AIPlatform +from vectorize_client.models.create_ai_platform_connector_request_inner import CreateAIPlatformConnectorRequestInner +from vectorize_client.models.create_ai_platform_connector_response import CreateAIPlatformConnectorResponse +from vectorize_client.models.delete_ai_platform_connector_response import DeleteAIPlatformConnectorResponse +from vectorize_client.models.get_ai_platform_connectors200_response import GetAIPlatformConnectors200Response +from vectorize_client.models.update_ai_platform_connector_response import UpdateAIPlatformConnectorResponse +from vectorize_client.models.update_aiplatform_connector_request import UpdateAiplatformConnectorRequest + +from vectorize_client.api_client import ApiClient, RequestSerialized +from vectorize_client.api_response import ApiResponse +from vectorize_client.rest import RESTResponseType + + +class ConnectorsAIPlatformsApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def create_ai_platform_connector( + self, + organization_id: StrictStr, + create_ai_platform_connector_request_inner: Annotated[List[CreateAIPlatformConnectorRequestInner], Field(min_length=1)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CreateAIPlatformConnectorResponse: + """Create a new AI platform connector + + Creates a new AI platform connector for embeddings and processing. The specific configuration fields required depend on the platform type selected. + + :param organization_id: (required) + :type organization_id: str + :param create_ai_platform_connector_request_inner: (required) + :type create_ai_platform_connector_request_inner: List[CreateAIPlatformConnectorRequestInner] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_ai_platform_connector_serialize( + organization_id=organization_id, + create_ai_platform_connector_request_inner=create_ai_platform_connector_request_inner, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CreateAIPlatformConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_ai_platform_connector_with_http_info( + self, + organization_id: StrictStr, + create_ai_platform_connector_request_inner: Annotated[List[CreateAIPlatformConnectorRequestInner], Field(min_length=1)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CreateAIPlatformConnectorResponse]: + """Create a new AI platform connector + + Creates a new AI platform connector for embeddings and processing. The specific configuration fields required depend on the platform type selected. + + :param organization_id: (required) + :type organization_id: str + :param create_ai_platform_connector_request_inner: (required) + :type create_ai_platform_connector_request_inner: List[CreateAIPlatformConnectorRequestInner] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_ai_platform_connector_serialize( + organization_id=organization_id, + create_ai_platform_connector_request_inner=create_ai_platform_connector_request_inner, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CreateAIPlatformConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_ai_platform_connector_without_preload_content( + self, + organization_id: StrictStr, + create_ai_platform_connector_request_inner: Annotated[List[CreateAIPlatformConnectorRequestInner], Field(min_length=1)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a new AI platform connector + + Creates a new AI platform connector for embeddings and processing. The specific configuration fields required depend on the platform type selected. + + :param organization_id: (required) + :type organization_id: str + :param create_ai_platform_connector_request_inner: (required) + :type create_ai_platform_connector_request_inner: List[CreateAIPlatformConnectorRequestInner] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_ai_platform_connector_serialize( + organization_id=organization_id, + create_ai_platform_connector_request_inner=create_ai_platform_connector_request_inner, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CreateAIPlatformConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_ai_platform_connector_serialize( + self, + organization_id, + create_ai_platform_connector_request_inner, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'CreateAIPlatformConnectorRequestInner': '', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization_id is not None: + _path_params['organizationId'] = organization_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if create_ai_platform_connector_request_inner is not None: + _body_params = create_ai_platform_connector_request_inner + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/org/{organizationId}/connectors/aiplatforms', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_ai_platform( + self, + organization: StrictStr, + aiplatform_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> DeleteAIPlatformConnectorResponse: + """Delete an AI platform connector + + + :param organization: (required) + :type organization: str + :param aiplatform_id: (required) + :type aiplatform_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_ai_platform_serialize( + organization=organization, + aiplatform_id=aiplatform_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DeleteAIPlatformConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_ai_platform_with_http_info( + self, + organization: StrictStr, + aiplatform_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[DeleteAIPlatformConnectorResponse]: + """Delete an AI platform connector + + + :param organization: (required) + :type organization: str + :param aiplatform_id: (required) + :type aiplatform_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_ai_platform_serialize( + organization=organization, + aiplatform_id=aiplatform_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DeleteAIPlatformConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_ai_platform_without_preload_content( + self, + organization: StrictStr, + aiplatform_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete an AI platform connector + + + :param organization: (required) + :type organization: str + :param aiplatform_id: (required) + :type aiplatform_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_ai_platform_serialize( + organization=organization, + aiplatform_id=aiplatform_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DeleteAIPlatformConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_ai_platform_serialize( + self, + organization, + aiplatform_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization is not None: + _path_params['organization'] = organization + if aiplatform_id is not None: + _path_params['aiplatformId'] = aiplatform_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/org/{organizationId}/connectors/aiplatforms/{aiplatformId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_ai_platform_connector( + self, + organization: StrictStr, + aiplatform_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> AIPlatform: + """Get an AI platform connector + + + :param organization: (required) + :type organization: str + :param aiplatform_id: (required) + :type aiplatform_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_ai_platform_connector_serialize( + organization=organization, + aiplatform_id=aiplatform_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AIPlatform", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_ai_platform_connector_with_http_info( + self, + organization: StrictStr, + aiplatform_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[AIPlatform]: + """Get an AI platform connector + + + :param organization: (required) + :type organization: str + :param aiplatform_id: (required) + :type aiplatform_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_ai_platform_connector_serialize( + organization=organization, + aiplatform_id=aiplatform_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AIPlatform", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_ai_platform_connector_without_preload_content( + self, + organization: StrictStr, + aiplatform_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get an AI platform connector + + + :param organization: (required) + :type organization: str + :param aiplatform_id: (required) + :type aiplatform_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_ai_platform_connector_serialize( + organization=organization, + aiplatform_id=aiplatform_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AIPlatform", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_ai_platform_connector_serialize( + self, + organization, + aiplatform_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization is not None: + _path_params['organization'] = organization + if aiplatform_id is not None: + _path_params['aiplatformId'] = aiplatform_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/org/{organizationId}/connectors/aiplatforms/{aiplatformId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_ai_platform_connectors( + self, + organization_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GetAIPlatformConnectors200Response: + """Get all existing AI Platform connectors + + + :param organization_id: (required) + :type organization_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_ai_platform_connectors_serialize( + organization_id=organization_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetAIPlatformConnectors200Response", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_ai_platform_connectors_with_http_info( + self, + organization_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GetAIPlatformConnectors200Response]: + """Get all existing AI Platform connectors + + + :param organization_id: (required) + :type organization_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_ai_platform_connectors_serialize( + organization_id=organization_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetAIPlatformConnectors200Response", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_ai_platform_connectors_without_preload_content( + self, + organization_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get all existing AI Platform connectors + + + :param organization_id: (required) + :type organization_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_ai_platform_connectors_serialize( + organization_id=organization_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetAIPlatformConnectors200Response", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_ai_platform_connectors_serialize( + self, + organization_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization_id is not None: + _path_params['organizationId'] = organization_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/org/{organizationId}/connectors/aiplatforms', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def update_ai_platform_connector( + self, + organization: StrictStr, + aiplatform_id: StrictStr, + update_aiplatform_connector_request: UpdateAiplatformConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UpdateAIPlatformConnectorResponse: + """Update an AI Platform connector + + + :param organization: (required) + :type organization: str + :param aiplatform_id: (required) + :type aiplatform_id: str + :param update_aiplatform_connector_request: (required) + :type update_aiplatform_connector_request: UpdateAiplatformConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_ai_platform_connector_serialize( + organization=organization, + aiplatform_id=aiplatform_id, + update_aiplatform_connector_request=update_aiplatform_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UpdateAIPlatformConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_ai_platform_connector_with_http_info( + self, + organization: StrictStr, + aiplatform_id: StrictStr, + update_aiplatform_connector_request: UpdateAiplatformConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UpdateAIPlatformConnectorResponse]: + """Update an AI Platform connector + + + :param organization: (required) + :type organization: str + :param aiplatform_id: (required) + :type aiplatform_id: str + :param update_aiplatform_connector_request: (required) + :type update_aiplatform_connector_request: UpdateAiplatformConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_ai_platform_connector_serialize( + organization=organization, + aiplatform_id=aiplatform_id, + update_aiplatform_connector_request=update_aiplatform_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UpdateAIPlatformConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def update_ai_platform_connector_without_preload_content( + self, + organization: StrictStr, + aiplatform_id: StrictStr, + update_aiplatform_connector_request: UpdateAiplatformConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update an AI Platform connector + + + :param organization: (required) + :type organization: str + :param aiplatform_id: (required) + :type aiplatform_id: str + :param update_aiplatform_connector_request: (required) + :type update_aiplatform_connector_request: UpdateAiplatformConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_ai_platform_connector_serialize( + organization=organization, + aiplatform_id=aiplatform_id, + update_aiplatform_connector_request=update_aiplatform_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UpdateAIPlatformConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_ai_platform_connector_serialize( + self, + organization, + aiplatform_id, + update_aiplatform_connector_request, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization is not None: + _path_params['organization'] = organization + if aiplatform_id is not None: + _path_params['aiplatformId'] = aiplatform_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if update_aiplatform_connector_request is not None: + _body_params = update_aiplatform_connector_request + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='PATCH', + resource_path='/org/{organizationId}/connectors/aiplatforms/{aiplatformId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/vectorize_client/api/connectors_destination_connectors_api.py b/vectorize_client/api/connectors_destination_connectors_api.py new file mode 100644 index 0000000..97ee78c --- /dev/null +++ b/vectorize_client/api/connectors_destination_connectors_api.py @@ -0,0 +1,1515 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing import List +from typing_extensions import Annotated +from vectorize_client.models.create_destination_connector_request_inner import CreateDestinationConnectorRequestInner +from vectorize_client.models.create_destination_connector_response import CreateDestinationConnectorResponse +from vectorize_client.models.delete_destination_connector_response import DeleteDestinationConnectorResponse +from vectorize_client.models.destination_connector import DestinationConnector +from vectorize_client.models.get_destination_connectors200_response import GetDestinationConnectors200Response +from vectorize_client.models.update_destination_connector_request import UpdateDestinationConnectorRequest +from vectorize_client.models.update_destination_connector_response import UpdateDestinationConnectorResponse + +from vectorize_client.api_client import ApiClient, RequestSerialized +from vectorize_client.api_response import ApiResponse +from vectorize_client.rest import RESTResponseType + + +class ConnectorsDestinationConnectorsApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def create_destination_connector( + self, + organization_id: StrictStr, + create_destination_connector_request_inner: Annotated[List[CreateDestinationConnectorRequestInner], Field(min_length=1)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CreateDestinationConnectorResponse: + """Create a new destination connector + + Creates a new destination connector for data storage. The specific configuration fields required depend on the connector type selected. + + :param organization_id: (required) + :type organization_id: str + :param create_destination_connector_request_inner: (required) + :type create_destination_connector_request_inner: List[CreateDestinationConnectorRequestInner] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_destination_connector_serialize( + organization_id=organization_id, + create_destination_connector_request_inner=create_destination_connector_request_inner, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CreateDestinationConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_destination_connector_with_http_info( + self, + organization_id: StrictStr, + create_destination_connector_request_inner: Annotated[List[CreateDestinationConnectorRequestInner], Field(min_length=1)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CreateDestinationConnectorResponse]: + """Create a new destination connector + + Creates a new destination connector for data storage. The specific configuration fields required depend on the connector type selected. + + :param organization_id: (required) + :type organization_id: str + :param create_destination_connector_request_inner: (required) + :type create_destination_connector_request_inner: List[CreateDestinationConnectorRequestInner] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_destination_connector_serialize( + organization_id=organization_id, + create_destination_connector_request_inner=create_destination_connector_request_inner, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CreateDestinationConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_destination_connector_without_preload_content( + self, + organization_id: StrictStr, + create_destination_connector_request_inner: Annotated[List[CreateDestinationConnectorRequestInner], Field(min_length=1)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a new destination connector + + Creates a new destination connector for data storage. The specific configuration fields required depend on the connector type selected. + + :param organization_id: (required) + :type organization_id: str + :param create_destination_connector_request_inner: (required) + :type create_destination_connector_request_inner: List[CreateDestinationConnectorRequestInner] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_destination_connector_serialize( + organization_id=organization_id, + create_destination_connector_request_inner=create_destination_connector_request_inner, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CreateDestinationConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_destination_connector_serialize( + self, + organization_id, + create_destination_connector_request_inner, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'CreateDestinationConnectorRequestInner': '', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization_id is not None: + _path_params['organizationId'] = organization_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if create_destination_connector_request_inner is not None: + _body_params = create_destination_connector_request_inner + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/org/{organizationId}/connectors/destinations', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_destination_connector( + self, + organization: StrictStr, + destination_connector_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> DeleteDestinationConnectorResponse: + """Delete a destination connector + + + :param organization: (required) + :type organization: str + :param destination_connector_id: (required) + :type destination_connector_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_destination_connector_serialize( + organization=organization, + destination_connector_id=destination_connector_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DeleteDestinationConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_destination_connector_with_http_info( + self, + organization: StrictStr, + destination_connector_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[DeleteDestinationConnectorResponse]: + """Delete a destination connector + + + :param organization: (required) + :type organization: str + :param destination_connector_id: (required) + :type destination_connector_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_destination_connector_serialize( + organization=organization, + destination_connector_id=destination_connector_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DeleteDestinationConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_destination_connector_without_preload_content( + self, + organization: StrictStr, + destination_connector_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete a destination connector + + + :param organization: (required) + :type organization: str + :param destination_connector_id: (required) + :type destination_connector_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_destination_connector_serialize( + organization=organization, + destination_connector_id=destination_connector_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DeleteDestinationConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_destination_connector_serialize( + self, + organization, + destination_connector_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization is not None: + _path_params['organization'] = organization + if destination_connector_id is not None: + _path_params['destinationConnectorId'] = destination_connector_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/org/{organizationId}/connectors/destinations/{destinationConnectorId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_destination_connector( + self, + organization: StrictStr, + destination_connector_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> DestinationConnector: + """Get a destination connector + + + :param organization: (required) + :type organization: str + :param destination_connector_id: (required) + :type destination_connector_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_destination_connector_serialize( + organization=organization, + destination_connector_id=destination_connector_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DestinationConnector", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_destination_connector_with_http_info( + self, + organization: StrictStr, + destination_connector_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[DestinationConnector]: + """Get a destination connector + + + :param organization: (required) + :type organization: str + :param destination_connector_id: (required) + :type destination_connector_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_destination_connector_serialize( + organization=organization, + destination_connector_id=destination_connector_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DestinationConnector", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_destination_connector_without_preload_content( + self, + organization: StrictStr, + destination_connector_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get a destination connector + + + :param organization: (required) + :type organization: str + :param destination_connector_id: (required) + :type destination_connector_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_destination_connector_serialize( + organization=organization, + destination_connector_id=destination_connector_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DestinationConnector", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_destination_connector_serialize( + self, + organization, + destination_connector_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization is not None: + _path_params['organization'] = organization + if destination_connector_id is not None: + _path_params['destinationConnectorId'] = destination_connector_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/org/{organizationId}/connectors/destinations/{destinationConnectorId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_destination_connectors( + self, + organization_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GetDestinationConnectors200Response: + """Get all existing destination connectors + + + :param organization_id: (required) + :type organization_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_destination_connectors_serialize( + organization_id=organization_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDestinationConnectors200Response", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_destination_connectors_with_http_info( + self, + organization_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GetDestinationConnectors200Response]: + """Get all existing destination connectors + + + :param organization_id: (required) + :type organization_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_destination_connectors_serialize( + organization_id=organization_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDestinationConnectors200Response", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_destination_connectors_without_preload_content( + self, + organization_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get all existing destination connectors + + + :param organization_id: (required) + :type organization_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_destination_connectors_serialize( + organization_id=organization_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDestinationConnectors200Response", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_destination_connectors_serialize( + self, + organization_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization_id is not None: + _path_params['organizationId'] = organization_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/org/{organizationId}/connectors/destinations', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def update_destination_connector( + self, + organization: StrictStr, + destination_connector_id: StrictStr, + update_destination_connector_request: UpdateDestinationConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UpdateDestinationConnectorResponse: + """Update a destination connector + + + :param organization: (required) + :type organization: str + :param destination_connector_id: (required) + :type destination_connector_id: str + :param update_destination_connector_request: (required) + :type update_destination_connector_request: UpdateDestinationConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_destination_connector_serialize( + organization=organization, + destination_connector_id=destination_connector_id, + update_destination_connector_request=update_destination_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UpdateDestinationConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_destination_connector_with_http_info( + self, + organization: StrictStr, + destination_connector_id: StrictStr, + update_destination_connector_request: UpdateDestinationConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UpdateDestinationConnectorResponse]: + """Update a destination connector + + + :param organization: (required) + :type organization: str + :param destination_connector_id: (required) + :type destination_connector_id: str + :param update_destination_connector_request: (required) + :type update_destination_connector_request: UpdateDestinationConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_destination_connector_serialize( + organization=organization, + destination_connector_id=destination_connector_id, + update_destination_connector_request=update_destination_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UpdateDestinationConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def update_destination_connector_without_preload_content( + self, + organization: StrictStr, + destination_connector_id: StrictStr, + update_destination_connector_request: UpdateDestinationConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update a destination connector + + + :param organization: (required) + :type organization: str + :param destination_connector_id: (required) + :type destination_connector_id: str + :param update_destination_connector_request: (required) + :type update_destination_connector_request: UpdateDestinationConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_destination_connector_serialize( + organization=organization, + destination_connector_id=destination_connector_id, + update_destination_connector_request=update_destination_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UpdateDestinationConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_destination_connector_serialize( + self, + organization, + destination_connector_id, + update_destination_connector_request, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization is not None: + _path_params['organization'] = organization + if destination_connector_id is not None: + _path_params['destinationConnectorId'] = destination_connector_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if update_destination_connector_request is not None: + _body_params = update_destination_connector_request + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='PATCH', + resource_path='/org/{organizationId}/connectors/destinations/{destinationConnectorId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/vectorize_client/api/connectors_source_connectors_api.py b/vectorize_client/api/connectors_source_connectors_api.py new file mode 100644 index 0000000..6e49ae2 --- /dev/null +++ b/vectorize_client/api/connectors_source_connectors_api.py @@ -0,0 +1,2469 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing import List +from typing_extensions import Annotated +from vectorize_client.models.add_user_from_source_connector_response import AddUserFromSourceConnectorResponse +from vectorize_client.models.add_user_to_source_connector_request import AddUserToSourceConnectorRequest +from vectorize_client.models.create_source_connector_request_inner import CreateSourceConnectorRequestInner +from vectorize_client.models.create_source_connector_response import CreateSourceConnectorResponse +from vectorize_client.models.delete_source_connector_response import DeleteSourceConnectorResponse +from vectorize_client.models.get_source_connectors200_response import GetSourceConnectors200Response +from vectorize_client.models.remove_user_from_source_connector_request import RemoveUserFromSourceConnectorRequest +from vectorize_client.models.remove_user_from_source_connector_response import RemoveUserFromSourceConnectorResponse +from vectorize_client.models.source_connector import SourceConnector +from vectorize_client.models.update_source_connector_request import UpdateSourceConnectorRequest +from vectorize_client.models.update_source_connector_response import UpdateSourceConnectorResponse +from vectorize_client.models.update_user_in_source_connector_request import UpdateUserInSourceConnectorRequest +from vectorize_client.models.update_user_in_source_connector_response import UpdateUserInSourceConnectorResponse + +from vectorize_client.api_client import ApiClient, RequestSerialized +from vectorize_client.api_response import ApiResponse +from vectorize_client.rest import RESTResponseType + + +class ConnectorsSourceConnectorsApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def add_user_to_source_connector( + self, + organization: StrictStr, + source_connector_id: StrictStr, + add_user_to_source_connector_request: AddUserToSourceConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> AddUserFromSourceConnectorResponse: + """Add a user to a source connector + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param add_user_to_source_connector_request: (required) + :type add_user_to_source_connector_request: AddUserToSourceConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_user_to_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + add_user_to_source_connector_request=add_user_to_source_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AddUserFromSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_user_to_source_connector_with_http_info( + self, + organization: StrictStr, + source_connector_id: StrictStr, + add_user_to_source_connector_request: AddUserToSourceConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[AddUserFromSourceConnectorResponse]: + """Add a user to a source connector + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param add_user_to_source_connector_request: (required) + :type add_user_to_source_connector_request: AddUserToSourceConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_user_to_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + add_user_to_source_connector_request=add_user_to_source_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AddUserFromSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def add_user_to_source_connector_without_preload_content( + self, + organization: StrictStr, + source_connector_id: StrictStr, + add_user_to_source_connector_request: AddUserToSourceConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Add a user to a source connector + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param add_user_to_source_connector_request: (required) + :type add_user_to_source_connector_request: AddUserToSourceConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_user_to_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + add_user_to_source_connector_request=add_user_to_source_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AddUserFromSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _add_user_to_source_connector_serialize( + self, + organization, + source_connector_id, + add_user_to_source_connector_request, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization is not None: + _path_params['organization'] = organization + if source_connector_id is not None: + _path_params['sourceConnectorId'] = source_connector_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if add_user_to_source_connector_request is not None: + _body_params = add_user_to_source_connector_request + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/org/{organizationId}/connectors/sources/{sourceConnectorId}/users', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def create_source_connector( + self, + organization_id: StrictStr, + create_source_connector_request_inner: Annotated[List[CreateSourceConnectorRequestInner], Field(min_length=1)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CreateSourceConnectorResponse: + """Create a new source connector + + Creates a new source connector for data ingestion. The specific configuration fields required depend on the connector type selected. + + :param organization_id: (required) + :type organization_id: str + :param create_source_connector_request_inner: (required) + :type create_source_connector_request_inner: List[CreateSourceConnectorRequestInner] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_source_connector_serialize( + organization_id=organization_id, + create_source_connector_request_inner=create_source_connector_request_inner, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CreateSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_source_connector_with_http_info( + self, + organization_id: StrictStr, + create_source_connector_request_inner: Annotated[List[CreateSourceConnectorRequestInner], Field(min_length=1)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CreateSourceConnectorResponse]: + """Create a new source connector + + Creates a new source connector for data ingestion. The specific configuration fields required depend on the connector type selected. + + :param organization_id: (required) + :type organization_id: str + :param create_source_connector_request_inner: (required) + :type create_source_connector_request_inner: List[CreateSourceConnectorRequestInner] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_source_connector_serialize( + organization_id=organization_id, + create_source_connector_request_inner=create_source_connector_request_inner, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CreateSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_source_connector_without_preload_content( + self, + organization_id: StrictStr, + create_source_connector_request_inner: Annotated[List[CreateSourceConnectorRequestInner], Field(min_length=1)], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a new source connector + + Creates a new source connector for data ingestion. The specific configuration fields required depend on the connector type selected. + + :param organization_id: (required) + :type organization_id: str + :param create_source_connector_request_inner: (required) + :type create_source_connector_request_inner: List[CreateSourceConnectorRequestInner] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_source_connector_serialize( + organization_id=organization_id, + create_source_connector_request_inner=create_source_connector_request_inner, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CreateSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_source_connector_serialize( + self, + organization_id, + create_source_connector_request_inner, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'CreateSourceConnectorRequestInner': '', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization_id is not None: + _path_params['organizationId'] = organization_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if create_source_connector_request_inner is not None: + _body_params = create_source_connector_request_inner + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/org/{organizationId}/connectors/sources', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_source_connector( + self, + organization: StrictStr, + source_connector_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> DeleteSourceConnectorResponse: + """Delete a source connector + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DeleteSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_source_connector_with_http_info( + self, + organization: StrictStr, + source_connector_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[DeleteSourceConnectorResponse]: + """Delete a source connector + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DeleteSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_source_connector_without_preload_content( + self, + organization: StrictStr, + source_connector_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete a source connector + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DeleteSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_source_connector_serialize( + self, + organization, + source_connector_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization is not None: + _path_params['organization'] = organization + if source_connector_id is not None: + _path_params['sourceConnectorId'] = source_connector_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/org/{organizationId}/connectors/sources/{sourceConnectorId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_user_from_source_connector( + self, + organization: StrictStr, + source_connector_id: StrictStr, + remove_user_from_source_connector_request: RemoveUserFromSourceConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RemoveUserFromSourceConnectorResponse: + """Delete a source connector user + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param remove_user_from_source_connector_request: (required) + :type remove_user_from_source_connector_request: RemoveUserFromSourceConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_user_from_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + remove_user_from_source_connector_request=remove_user_from_source_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RemoveUserFromSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_user_from_source_connector_with_http_info( + self, + organization: StrictStr, + source_connector_id: StrictStr, + remove_user_from_source_connector_request: RemoveUserFromSourceConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[RemoveUserFromSourceConnectorResponse]: + """Delete a source connector user + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param remove_user_from_source_connector_request: (required) + :type remove_user_from_source_connector_request: RemoveUserFromSourceConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_user_from_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + remove_user_from_source_connector_request=remove_user_from_source_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RemoveUserFromSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_user_from_source_connector_without_preload_content( + self, + organization: StrictStr, + source_connector_id: StrictStr, + remove_user_from_source_connector_request: RemoveUserFromSourceConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete a source connector user + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param remove_user_from_source_connector_request: (required) + :type remove_user_from_source_connector_request: RemoveUserFromSourceConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_user_from_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + remove_user_from_source_connector_request=remove_user_from_source_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RemoveUserFromSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_user_from_source_connector_serialize( + self, + organization, + source_connector_id, + remove_user_from_source_connector_request, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization is not None: + _path_params['organization'] = organization + if source_connector_id is not None: + _path_params['sourceConnectorId'] = source_connector_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if remove_user_from_source_connector_request is not None: + _body_params = remove_user_from_source_connector_request + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/org/{organizationId}/connectors/sources/{sourceConnectorId}/users', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_source_connector( + self, + organization: StrictStr, + source_connector_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> SourceConnector: + """Get a source connector + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SourceConnector", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_source_connector_with_http_info( + self, + organization: StrictStr, + source_connector_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[SourceConnector]: + """Get a source connector + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SourceConnector", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_source_connector_without_preload_content( + self, + organization: StrictStr, + source_connector_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get a source connector + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SourceConnector", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_source_connector_serialize( + self, + organization, + source_connector_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization is not None: + _path_params['organization'] = organization + if source_connector_id is not None: + _path_params['sourceConnectorId'] = source_connector_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/org/{organizationId}/connectors/sources/{sourceConnectorId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_source_connectors( + self, + organization_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GetSourceConnectors200Response: + """Get all existing source connectors + + + :param organization_id: (required) + :type organization_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_source_connectors_serialize( + organization_id=organization_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetSourceConnectors200Response", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_source_connectors_with_http_info( + self, + organization_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GetSourceConnectors200Response]: + """Get all existing source connectors + + + :param organization_id: (required) + :type organization_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_source_connectors_serialize( + organization_id=organization_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetSourceConnectors200Response", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_source_connectors_without_preload_content( + self, + organization_id: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get all existing source connectors + + + :param organization_id: (required) + :type organization_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_source_connectors_serialize( + organization_id=organization_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetSourceConnectors200Response", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_source_connectors_serialize( + self, + organization_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization_id is not None: + _path_params['organizationId'] = organization_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/org/{organizationId}/connectors/sources', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def update_source_connector( + self, + organization: StrictStr, + source_connector_id: StrictStr, + update_source_connector_request: UpdateSourceConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UpdateSourceConnectorResponse: + """Update a source connector + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param update_source_connector_request: (required) + :type update_source_connector_request: UpdateSourceConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + update_source_connector_request=update_source_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UpdateSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_source_connector_with_http_info( + self, + organization: StrictStr, + source_connector_id: StrictStr, + update_source_connector_request: UpdateSourceConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UpdateSourceConnectorResponse]: + """Update a source connector + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param update_source_connector_request: (required) + :type update_source_connector_request: UpdateSourceConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + update_source_connector_request=update_source_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UpdateSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def update_source_connector_without_preload_content( + self, + organization: StrictStr, + source_connector_id: StrictStr, + update_source_connector_request: UpdateSourceConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update a source connector + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param update_source_connector_request: (required) + :type update_source_connector_request: UpdateSourceConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + update_source_connector_request=update_source_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UpdateSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_source_connector_serialize( + self, + organization, + source_connector_id, + update_source_connector_request, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization is not None: + _path_params['organization'] = organization + if source_connector_id is not None: + _path_params['sourceConnectorId'] = source_connector_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if update_source_connector_request is not None: + _body_params = update_source_connector_request + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='PATCH', + resource_path='/org/{organizationId}/connectors/sources/{sourceConnectorId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def update_user_in_source_connector( + self, + organization: StrictStr, + source_connector_id: StrictStr, + update_user_in_source_connector_request: UpdateUserInSourceConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UpdateUserInSourceConnectorResponse: + """Update a source connector user + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param update_user_in_source_connector_request: (required) + :type update_user_in_source_connector_request: UpdateUserInSourceConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_user_in_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + update_user_in_source_connector_request=update_user_in_source_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UpdateUserInSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_user_in_source_connector_with_http_info( + self, + organization: StrictStr, + source_connector_id: StrictStr, + update_user_in_source_connector_request: UpdateUserInSourceConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UpdateUserInSourceConnectorResponse]: + """Update a source connector user + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param update_user_in_source_connector_request: (required) + :type update_user_in_source_connector_request: UpdateUserInSourceConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_user_in_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + update_user_in_source_connector_request=update_user_in_source_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UpdateUserInSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def update_user_in_source_connector_without_preload_content( + self, + organization: StrictStr, + source_connector_id: StrictStr, + update_user_in_source_connector_request: UpdateUserInSourceConnectorRequest, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update a source connector user + + + :param organization: (required) + :type organization: str + :param source_connector_id: (required) + :type source_connector_id: str + :param update_user_in_source_connector_request: (required) + :type update_user_in_source_connector_request: UpdateUserInSourceConnectorRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_user_in_source_connector_serialize( + organization=organization, + source_connector_id=source_connector_id, + update_user_in_source_connector_request=update_user_in_source_connector_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UpdateUserInSourceConnectorResponse", + '400': "GetPipelines400Response", + '401': "GetPipelines400Response", + '403': "GetPipelines400Response", + '404': "GetPipelines400Response", + '500': "GetPipelines400Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_user_in_source_connector_serialize( + self, + organization, + source_connector_id, + update_user_in_source_connector_request, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if organization is not None: + _path_params['organization'] = organization + if source_connector_id is not None: + _path_params['sourceConnectorId'] = source_connector_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if update_user_in_source_connector_request is not None: + _body_params = update_user_in_source_connector_request + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='PATCH', + resource_path='/org/{organizationId}/connectors/sources/{sourceConnectorId}/users', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/src/python/vectorize_client/api/extraction_api.py b/vectorize_client/api/extraction_api.py similarity index 96% rename from src/python/vectorize_client/api/extraction_api.py rename to vectorize_client/api/extraction_api.py index 6db8185..3b1b56f 100644 --- a/src/python/vectorize_client/api/extraction_api.py +++ b/vectorize_client/api/extraction_api.py @@ -311,7 +311,7 @@ def _get_extraction_result_serialize( return self.api_client.param_serialize( method='GET', - resource_path='/org/{organization}/extraction/{extractionId}', + resource_path='/org/{organizationId}/extraction/{extractionId}', path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -330,7 +330,7 @@ def _get_extraction_result_serialize( @validate_call def start_extraction( self, - organization: StrictStr, + organization_id: StrictStr, start_extraction_request: StartExtractionRequest, _request_timeout: Union[ None, @@ -348,8 +348,8 @@ def start_extraction( """Start content extraction from a file - :param organization: (required) - :type organization: str + :param organization_id: (required) + :type organization_id: str :param start_extraction_request: (required) :type start_extraction_request: StartExtractionRequest :param _request_timeout: timeout setting for this request. If one @@ -375,7 +375,7 @@ def start_extraction( """ # noqa: E501 _param = self._start_extraction_serialize( - organization=organization, + organization_id=organization_id, start_extraction_request=start_extraction_request, _request_auth=_request_auth, _content_type=_content_type, @@ -405,7 +405,7 @@ def start_extraction( @validate_call def start_extraction_with_http_info( self, - organization: StrictStr, + organization_id: StrictStr, start_extraction_request: StartExtractionRequest, _request_timeout: Union[ None, @@ -423,8 +423,8 @@ def start_extraction_with_http_info( """Start content extraction from a file - :param organization: (required) - :type organization: str + :param organization_id: (required) + :type organization_id: str :param start_extraction_request: (required) :type start_extraction_request: StartExtractionRequest :param _request_timeout: timeout setting for this request. If one @@ -450,7 +450,7 @@ def start_extraction_with_http_info( """ # noqa: E501 _param = self._start_extraction_serialize( - organization=organization, + organization_id=organization_id, start_extraction_request=start_extraction_request, _request_auth=_request_auth, _content_type=_content_type, @@ -480,7 +480,7 @@ def start_extraction_with_http_info( @validate_call def start_extraction_without_preload_content( self, - organization: StrictStr, + organization_id: StrictStr, start_extraction_request: StartExtractionRequest, _request_timeout: Union[ None, @@ -498,8 +498,8 @@ def start_extraction_without_preload_content( """Start content extraction from a file - :param organization: (required) - :type organization: str + :param organization_id: (required) + :type organization_id: str :param start_extraction_request: (required) :type start_extraction_request: StartExtractionRequest :param _request_timeout: timeout setting for this request. If one @@ -525,7 +525,7 @@ def start_extraction_without_preload_content( """ # noqa: E501 _param = self._start_extraction_serialize( - organization=organization, + organization_id=organization_id, start_extraction_request=start_extraction_request, _request_auth=_request_auth, _content_type=_content_type, @@ -550,7 +550,7 @@ def start_extraction_without_preload_content( def _start_extraction_serialize( self, - organization, + organization_id, start_extraction_request, _request_auth, _content_type, @@ -573,8 +573,8 @@ def _start_extraction_serialize( _body_params: Optional[bytes] = None # process the path parameters - if organization is not None: - _path_params['organization'] = organization + if organization_id is not None: + _path_params['organizationId'] = organization_id # process the query parameters # process the header parameters # process the form parameters @@ -612,7 +612,7 @@ def _start_extraction_serialize( return self.api_client.param_serialize( method='POST', - resource_path='/org/{organization}/extraction', + resource_path='/org/{organizationId}/extraction', path_params=_path_params, query_params=_query_params, header_params=_header_params, diff --git a/src/python/vectorize_client/api/files_api.py b/vectorize_client/api/files_api.py similarity index 94% rename from src/python/vectorize_client/api/files_api.py rename to vectorize_client/api/files_api.py index 9756ee2..e70f311 100644 --- a/src/python/vectorize_client/api/files_api.py +++ b/vectorize_client/api/files_api.py @@ -41,7 +41,7 @@ def __init__(self, api_client=None) -> None: @validate_call def start_file_upload( self, - organization: StrictStr, + organization_id: StrictStr, start_file_upload_request: StartFileUploadRequest, _request_timeout: Union[ None, @@ -59,8 +59,8 @@ def start_file_upload( """Upload a generic file to the platform - :param organization: (required) - :type organization: str + :param organization_id: (required) + :type organization_id: str :param start_file_upload_request: (required) :type start_file_upload_request: StartFileUploadRequest :param _request_timeout: timeout setting for this request. If one @@ -86,7 +86,7 @@ def start_file_upload( """ # noqa: E501 _param = self._start_file_upload_serialize( - organization=organization, + organization_id=organization_id, start_file_upload_request=start_file_upload_request, _request_auth=_request_auth, _content_type=_content_type, @@ -116,7 +116,7 @@ def start_file_upload( @validate_call def start_file_upload_with_http_info( self, - organization: StrictStr, + organization_id: StrictStr, start_file_upload_request: StartFileUploadRequest, _request_timeout: Union[ None, @@ -134,8 +134,8 @@ def start_file_upload_with_http_info( """Upload a generic file to the platform - :param organization: (required) - :type organization: str + :param organization_id: (required) + :type organization_id: str :param start_file_upload_request: (required) :type start_file_upload_request: StartFileUploadRequest :param _request_timeout: timeout setting for this request. If one @@ -161,7 +161,7 @@ def start_file_upload_with_http_info( """ # noqa: E501 _param = self._start_file_upload_serialize( - organization=organization, + organization_id=organization_id, start_file_upload_request=start_file_upload_request, _request_auth=_request_auth, _content_type=_content_type, @@ -191,7 +191,7 @@ def start_file_upload_with_http_info( @validate_call def start_file_upload_without_preload_content( self, - organization: StrictStr, + organization_id: StrictStr, start_file_upload_request: StartFileUploadRequest, _request_timeout: Union[ None, @@ -209,8 +209,8 @@ def start_file_upload_without_preload_content( """Upload a generic file to the platform - :param organization: (required) - :type organization: str + :param organization_id: (required) + :type organization_id: str :param start_file_upload_request: (required) :type start_file_upload_request: StartFileUploadRequest :param _request_timeout: timeout setting for this request. If one @@ -236,7 +236,7 @@ def start_file_upload_without_preload_content( """ # noqa: E501 _param = self._start_file_upload_serialize( - organization=organization, + organization_id=organization_id, start_file_upload_request=start_file_upload_request, _request_auth=_request_auth, _content_type=_content_type, @@ -261,7 +261,7 @@ def start_file_upload_without_preload_content( def _start_file_upload_serialize( self, - organization, + organization_id, start_file_upload_request, _request_auth, _content_type, @@ -284,8 +284,8 @@ def _start_file_upload_serialize( _body_params: Optional[bytes] = None # process the path parameters - if organization is not None: - _path_params['organization'] = organization + if organization_id is not None: + _path_params['organizationId'] = organization_id # process the query parameters # process the header parameters # process the form parameters @@ -323,7 +323,7 @@ def _start_file_upload_serialize( return self.api_client.param_serialize( method='POST', - resource_path='/org/{organization}/files', + resource_path='/org/{organizationId}/files', path_params=_path_params, query_params=_query_params, header_params=_header_params, diff --git a/src/python/vectorize_client/api/pipelines_api.py b/vectorize_client/api/pipelines_api.py similarity index 85% rename from src/python/vectorize_client/api/pipelines_api.py rename to vectorize_client/api/pipelines_api.py index edbfe60..27a64e4 100644 --- a/src/python/vectorize_client/api/pipelines_api.py +++ b/vectorize_client/api/pipelines_api.py @@ -54,7 +54,7 @@ def __init__(self, api_client=None) -> None: @validate_call def create_pipeline( self, - organization: StrictStr, + organization_id: StrictStr, pipeline_configuration_schema: PipelineConfigurationSchema, _request_timeout: Union[ None, @@ -69,11 +69,12 @@ def create_pipeline( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> CreatePipelineResponse: - """Create a new source pipeline. Config fields for sources: Amazon S3 (AWS_S3): Check for updates every (seconds) (idle-time): number, Path Prefix (path-prefix): text, Path Metadata Regex (path-metadata-regex): text, Path Regex Group Names (path-regex-group-names): array oftext) | Azure Blob Storage (AZURE_BLOB): Polling Interval (seconds) (idle-time): number, Path Prefix (path-prefix): text, Path Metadata Regex (path-metadata-regex): text, Path Regex Group Names (path-regex-group-names): array oftext) | Confluence (CONFLUENCE): Spaces (spaces): array oftext, Root Parents (root-parents): array oftext) | Discord (DISCORD): Emoji Filter (emoji): array oftext, Author Filter (author): array oftext, Ignore Author Filter (ignore-author): array oftext, Limit (limit): number) | Dropbox (DROPBOX): Read from these folders (optional) (path-prefix): array oftext) | Google Drive OAuth (GOOGLE_DRIVE_OAUTH): Polling Interval (seconds) (idle-time): number) | Google Drive (Service Account) (GOOGLE_DRIVE): Restrict ingest to these folder URLs (optional) (root-parents): array oftext, Polling Interval (seconds) (idle-time): number) | Google Drive Multi-User (Vectorize) (GOOGLE_DRIVE_OAUTH_MULTI): Polling Interval (seconds) (idle-time): number) | Google Drive Multi-User (White Label) (GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM): Polling Interval (seconds) (idle-time): number) | Firecrawl (FIRECRAWL): ) | GCP Cloud Storage (GCS): Check for updates every (seconds) (idle-time): number, Path Prefix (path-prefix): text, Path Metadata Regex (path-metadata-regex): text, Path Regex Group Names (path-regex-group-names): array oftext) | Intercom (INTERCOM): Reindex Interval (seconds) (reindexIntervalSeconds): number, Limit (limit): number, Tags (tags): array oftext) | OneDrive (ONE_DRIVE): Read starting from this folder (optional) (path-prefix): text) | SharePoint (SHAREPOINT): Site Name(s) (sites): array oftext) | Web Crawler (WEB_CRAWLER): Additional Allowed URLs or prefix(es) (allowed-domains-opt): array ofurl, Forbidden Paths (forbidden-paths): array oftext, Throttle (ms) (min-time-between-requests): number, Max Error Count (max-error-count): number, Max URLs (max-urls): number, Max Depth (max-depth): number, Reindex Interval (seconds) (reindex-interval-seconds): number) | File Upload (FILE_UPLOAD): ). Config fields for destinations: Couchbase Capella (CAPELLA): Bucket Name (bucket): text, Scope Name (scope): text, Collection Name (collection): text, Search Index Name (index): text) | DataStax Astra (DATASTAX): Collection Name (collection): text) | Elasticsearch (ELASTIC): Index Name (index): text) | Pinecone (PINECONE): Index Name (index): text, Namespace (namespace): text) | SingleStore (SINGLESTORE): Table Name (table): text) | Milvus (MILVUS): Collection Name (collection): text) | PostgreSQL (POSTGRESQL): Table Name (table): text) | Qdrant (QDRANT): Collection Name (collection): text) | Supabase (SUPABASE): Table Name (table): text) | Weaviate (WEAVIATE): Collection Name (collection): text) | Azure AI Search (AZUREAISEARCH): Index Name (index): text) | Built-in (VECTORIZE): ) | Chroma (CHROMA): Index Name (index): text) | MongoDB (MONGODB): Index Name (index): text). Config fields for AI platforms: + """Create a new pipeline + Creates a new pipeline with source connectors, destination connector, and AI platform configuration. The specific configuration fields required depend on the connector types selected. - :param organization: (required) - :type organization: str + :param organization_id: (required) + :type organization_id: str :param pipeline_configuration_schema: (required) :type pipeline_configuration_schema: PipelineConfigurationSchema :param _request_timeout: timeout setting for this request. If one @@ -99,7 +100,7 @@ def create_pipeline( """ # noqa: E501 _param = self._create_pipeline_serialize( - organization=organization, + organization_id=organization_id, pipeline_configuration_schema=pipeline_configuration_schema, _request_auth=_request_auth, _content_type=_content_type, @@ -129,7 +130,7 @@ def create_pipeline( @validate_call def create_pipeline_with_http_info( self, - organization: StrictStr, + organization_id: StrictStr, pipeline_configuration_schema: PipelineConfigurationSchema, _request_timeout: Union[ None, @@ -144,11 +145,12 @@ def create_pipeline_with_http_info( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> ApiResponse[CreatePipelineResponse]: - """Create a new source pipeline. Config fields for sources: Amazon S3 (AWS_S3): Check for updates every (seconds) (idle-time): number, Path Prefix (path-prefix): text, Path Metadata Regex (path-metadata-regex): text, Path Regex Group Names (path-regex-group-names): array oftext) | Azure Blob Storage (AZURE_BLOB): Polling Interval (seconds) (idle-time): number, Path Prefix (path-prefix): text, Path Metadata Regex (path-metadata-regex): text, Path Regex Group Names (path-regex-group-names): array oftext) | Confluence (CONFLUENCE): Spaces (spaces): array oftext, Root Parents (root-parents): array oftext) | Discord (DISCORD): Emoji Filter (emoji): array oftext, Author Filter (author): array oftext, Ignore Author Filter (ignore-author): array oftext, Limit (limit): number) | Dropbox (DROPBOX): Read from these folders (optional) (path-prefix): array oftext) | Google Drive OAuth (GOOGLE_DRIVE_OAUTH): Polling Interval (seconds) (idle-time): number) | Google Drive (Service Account) (GOOGLE_DRIVE): Restrict ingest to these folder URLs (optional) (root-parents): array oftext, Polling Interval (seconds) (idle-time): number) | Google Drive Multi-User (Vectorize) (GOOGLE_DRIVE_OAUTH_MULTI): Polling Interval (seconds) (idle-time): number) | Google Drive Multi-User (White Label) (GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM): Polling Interval (seconds) (idle-time): number) | Firecrawl (FIRECRAWL): ) | GCP Cloud Storage (GCS): Check for updates every (seconds) (idle-time): number, Path Prefix (path-prefix): text, Path Metadata Regex (path-metadata-regex): text, Path Regex Group Names (path-regex-group-names): array oftext) | Intercom (INTERCOM): Reindex Interval (seconds) (reindexIntervalSeconds): number, Limit (limit): number, Tags (tags): array oftext) | OneDrive (ONE_DRIVE): Read starting from this folder (optional) (path-prefix): text) | SharePoint (SHAREPOINT): Site Name(s) (sites): array oftext) | Web Crawler (WEB_CRAWLER): Additional Allowed URLs or prefix(es) (allowed-domains-opt): array ofurl, Forbidden Paths (forbidden-paths): array oftext, Throttle (ms) (min-time-between-requests): number, Max Error Count (max-error-count): number, Max URLs (max-urls): number, Max Depth (max-depth): number, Reindex Interval (seconds) (reindex-interval-seconds): number) | File Upload (FILE_UPLOAD): ). Config fields for destinations: Couchbase Capella (CAPELLA): Bucket Name (bucket): text, Scope Name (scope): text, Collection Name (collection): text, Search Index Name (index): text) | DataStax Astra (DATASTAX): Collection Name (collection): text) | Elasticsearch (ELASTIC): Index Name (index): text) | Pinecone (PINECONE): Index Name (index): text, Namespace (namespace): text) | SingleStore (SINGLESTORE): Table Name (table): text) | Milvus (MILVUS): Collection Name (collection): text) | PostgreSQL (POSTGRESQL): Table Name (table): text) | Qdrant (QDRANT): Collection Name (collection): text) | Supabase (SUPABASE): Table Name (table): text) | Weaviate (WEAVIATE): Collection Name (collection): text) | Azure AI Search (AZUREAISEARCH): Index Name (index): text) | Built-in (VECTORIZE): ) | Chroma (CHROMA): Index Name (index): text) | MongoDB (MONGODB): Index Name (index): text). Config fields for AI platforms: + """Create a new pipeline + Creates a new pipeline with source connectors, destination connector, and AI platform configuration. The specific configuration fields required depend on the connector types selected. - :param organization: (required) - :type organization: str + :param organization_id: (required) + :type organization_id: str :param pipeline_configuration_schema: (required) :type pipeline_configuration_schema: PipelineConfigurationSchema :param _request_timeout: timeout setting for this request. If one @@ -174,7 +176,7 @@ def create_pipeline_with_http_info( """ # noqa: E501 _param = self._create_pipeline_serialize( - organization=organization, + organization_id=organization_id, pipeline_configuration_schema=pipeline_configuration_schema, _request_auth=_request_auth, _content_type=_content_type, @@ -204,7 +206,7 @@ def create_pipeline_with_http_info( @validate_call def create_pipeline_without_preload_content( self, - organization: StrictStr, + organization_id: StrictStr, pipeline_configuration_schema: PipelineConfigurationSchema, _request_timeout: Union[ None, @@ -219,11 +221,12 @@ def create_pipeline_without_preload_content( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> RESTResponseType: - """Create a new source pipeline. Config fields for sources: Amazon S3 (AWS_S3): Check for updates every (seconds) (idle-time): number, Path Prefix (path-prefix): text, Path Metadata Regex (path-metadata-regex): text, Path Regex Group Names (path-regex-group-names): array oftext) | Azure Blob Storage (AZURE_BLOB): Polling Interval (seconds) (idle-time): number, Path Prefix (path-prefix): text, Path Metadata Regex (path-metadata-regex): text, Path Regex Group Names (path-regex-group-names): array oftext) | Confluence (CONFLUENCE): Spaces (spaces): array oftext, Root Parents (root-parents): array oftext) | Discord (DISCORD): Emoji Filter (emoji): array oftext, Author Filter (author): array oftext, Ignore Author Filter (ignore-author): array oftext, Limit (limit): number) | Dropbox (DROPBOX): Read from these folders (optional) (path-prefix): array oftext) | Google Drive OAuth (GOOGLE_DRIVE_OAUTH): Polling Interval (seconds) (idle-time): number) | Google Drive (Service Account) (GOOGLE_DRIVE): Restrict ingest to these folder URLs (optional) (root-parents): array oftext, Polling Interval (seconds) (idle-time): number) | Google Drive Multi-User (Vectorize) (GOOGLE_DRIVE_OAUTH_MULTI): Polling Interval (seconds) (idle-time): number) | Google Drive Multi-User (White Label) (GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM): Polling Interval (seconds) (idle-time): number) | Firecrawl (FIRECRAWL): ) | GCP Cloud Storage (GCS): Check for updates every (seconds) (idle-time): number, Path Prefix (path-prefix): text, Path Metadata Regex (path-metadata-regex): text, Path Regex Group Names (path-regex-group-names): array oftext) | Intercom (INTERCOM): Reindex Interval (seconds) (reindexIntervalSeconds): number, Limit (limit): number, Tags (tags): array oftext) | OneDrive (ONE_DRIVE): Read starting from this folder (optional) (path-prefix): text) | SharePoint (SHAREPOINT): Site Name(s) (sites): array oftext) | Web Crawler (WEB_CRAWLER): Additional Allowed URLs or prefix(es) (allowed-domains-opt): array ofurl, Forbidden Paths (forbidden-paths): array oftext, Throttle (ms) (min-time-between-requests): number, Max Error Count (max-error-count): number, Max URLs (max-urls): number, Max Depth (max-depth): number, Reindex Interval (seconds) (reindex-interval-seconds): number) | File Upload (FILE_UPLOAD): ). Config fields for destinations: Couchbase Capella (CAPELLA): Bucket Name (bucket): text, Scope Name (scope): text, Collection Name (collection): text, Search Index Name (index): text) | DataStax Astra (DATASTAX): Collection Name (collection): text) | Elasticsearch (ELASTIC): Index Name (index): text) | Pinecone (PINECONE): Index Name (index): text, Namespace (namespace): text) | SingleStore (SINGLESTORE): Table Name (table): text) | Milvus (MILVUS): Collection Name (collection): text) | PostgreSQL (POSTGRESQL): Table Name (table): text) | Qdrant (QDRANT): Collection Name (collection): text) | Supabase (SUPABASE): Table Name (table): text) | Weaviate (WEAVIATE): Collection Name (collection): text) | Azure AI Search (AZUREAISEARCH): Index Name (index): text) | Built-in (VECTORIZE): ) | Chroma (CHROMA): Index Name (index): text) | MongoDB (MONGODB): Index Name (index): text). Config fields for AI platforms: + """Create a new pipeline + Creates a new pipeline with source connectors, destination connector, and AI platform configuration. The specific configuration fields required depend on the connector types selected. - :param organization: (required) - :type organization: str + :param organization_id: (required) + :type organization_id: str :param pipeline_configuration_schema: (required) :type pipeline_configuration_schema: PipelineConfigurationSchema :param _request_timeout: timeout setting for this request. If one @@ -249,7 +252,7 @@ def create_pipeline_without_preload_content( """ # noqa: E501 _param = self._create_pipeline_serialize( - organization=organization, + organization_id=organization_id, pipeline_configuration_schema=pipeline_configuration_schema, _request_auth=_request_auth, _content_type=_content_type, @@ -274,7 +277,7 @@ def create_pipeline_without_preload_content( def _create_pipeline_serialize( self, - organization, + organization_id, pipeline_configuration_schema, _request_auth, _content_type, @@ -297,8 +300,8 @@ def _create_pipeline_serialize( _body_params: Optional[bytes] = None # process the path parameters - if organization is not None: - _path_params['organization'] = organization + if organization_id is not None: + _path_params['organizationId'] = organization_id # process the query parameters # process the header parameters # process the form parameters @@ -336,7 +339,7 @@ def _create_pipeline_serialize( return self.api_client.param_serialize( method='POST', - resource_path='/org/{organization}/pipelines', + resource_path='/org/{organizationId}/pipelines', path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -355,8 +358,8 @@ def _create_pipeline_serialize( @validate_call def delete_pipeline( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -373,10 +376,10 @@ def delete_pipeline( """Delete a pipeline - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -400,8 +403,8 @@ def delete_pipeline( """ # noqa: E501 _param = self._delete_pipeline_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -430,8 +433,8 @@ def delete_pipeline( @validate_call def delete_pipeline_with_http_info( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -448,10 +451,10 @@ def delete_pipeline_with_http_info( """Delete a pipeline - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -475,8 +478,8 @@ def delete_pipeline_with_http_info( """ # noqa: E501 _param = self._delete_pipeline_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -505,8 +508,8 @@ def delete_pipeline_with_http_info( @validate_call def delete_pipeline_without_preload_content( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -523,10 +526,10 @@ def delete_pipeline_without_preload_content( """Delete a pipeline - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -550,8 +553,8 @@ def delete_pipeline_without_preload_content( """ # noqa: E501 _param = self._delete_pipeline_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -575,8 +578,8 @@ def delete_pipeline_without_preload_content( def _delete_pipeline_serialize( self, - organization, - pipeline, + organization_id, + pipeline_id, _request_auth, _content_type, _headers, @@ -598,10 +601,10 @@ def _delete_pipeline_serialize( _body_params: Optional[bytes] = None # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if pipeline is not None: - _path_params['pipeline'] = pipeline + if organization_id is not None: + _path_params['organizationId'] = organization_id + if pipeline_id is not None: + _path_params['pipelineId'] = pipeline_id # process the query parameters # process the header parameters # process the form parameters @@ -624,7 +627,7 @@ def _delete_pipeline_serialize( return self.api_client.param_serialize( method='DELETE', - resource_path='/org/{organization}/pipelines/{pipeline}', + resource_path='/org/{organizationId}/pipelines/{pipelineId}', path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -927,7 +930,7 @@ def _get_deep_research_result_serialize( return self.api_client.param_serialize( method='GET', - resource_path='/org/{organization}/pipelines/{pipeline}/deep-research/{researchId}', + resource_path='/org/{organizationId}/pipelines/{pipelineId}/deep-research/{researchId}', path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -946,8 +949,8 @@ def _get_deep_research_result_serialize( @validate_call def get_pipeline( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -964,10 +967,10 @@ def get_pipeline( """Get a pipeline - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -991,8 +994,8 @@ def get_pipeline( """ # noqa: E501 _param = self._get_pipeline_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1021,8 +1024,8 @@ def get_pipeline( @validate_call def get_pipeline_with_http_info( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1039,10 +1042,10 @@ def get_pipeline_with_http_info( """Get a pipeline - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1066,8 +1069,8 @@ def get_pipeline_with_http_info( """ # noqa: E501 _param = self._get_pipeline_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1096,8 +1099,8 @@ def get_pipeline_with_http_info( @validate_call def get_pipeline_without_preload_content( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1114,10 +1117,10 @@ def get_pipeline_without_preload_content( """Get a pipeline - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1141,8 +1144,8 @@ def get_pipeline_without_preload_content( """ # noqa: E501 _param = self._get_pipeline_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1166,8 +1169,8 @@ def get_pipeline_without_preload_content( def _get_pipeline_serialize( self, - organization, - pipeline, + organization_id, + pipeline_id, _request_auth, _content_type, _headers, @@ -1189,10 +1192,10 @@ def _get_pipeline_serialize( _body_params: Optional[bytes] = None # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if pipeline is not None: - _path_params['pipeline'] = pipeline + if organization_id is not None: + _path_params['organizationId'] = organization_id + if pipeline_id is not None: + _path_params['pipelineId'] = pipeline_id # process the query parameters # process the header parameters # process the form parameters @@ -1215,7 +1218,7 @@ def _get_pipeline_serialize( return self.api_client.param_serialize( method='GET', - resource_path='/org/{organization}/pipelines/{pipeline}', + resource_path='/org/{organizationId}/pipelines/{pipelineId}', path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -1234,8 +1237,8 @@ def _get_pipeline_serialize( @validate_call def get_pipeline_events( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, next_token: Optional[StrictStr] = None, _request_timeout: Union[ None, @@ -1253,10 +1256,10 @@ def get_pipeline_events( """Get pipeline events - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param next_token: :type next_token: str :param _request_timeout: timeout setting for this request. If one @@ -1282,8 +1285,8 @@ def get_pipeline_events( """ # noqa: E501 _param = self._get_pipeline_events_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, next_token=next_token, _request_auth=_request_auth, _content_type=_content_type, @@ -1313,8 +1316,8 @@ def get_pipeline_events( @validate_call def get_pipeline_events_with_http_info( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, next_token: Optional[StrictStr] = None, _request_timeout: Union[ None, @@ -1332,10 +1335,10 @@ def get_pipeline_events_with_http_info( """Get pipeline events - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param next_token: :type next_token: str :param _request_timeout: timeout setting for this request. If one @@ -1361,8 +1364,8 @@ def get_pipeline_events_with_http_info( """ # noqa: E501 _param = self._get_pipeline_events_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, next_token=next_token, _request_auth=_request_auth, _content_type=_content_type, @@ -1392,8 +1395,8 @@ def get_pipeline_events_with_http_info( @validate_call def get_pipeline_events_without_preload_content( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, next_token: Optional[StrictStr] = None, _request_timeout: Union[ None, @@ -1411,10 +1414,10 @@ def get_pipeline_events_without_preload_content( """Get pipeline events - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param next_token: :type next_token: str :param _request_timeout: timeout setting for this request. If one @@ -1440,8 +1443,8 @@ def get_pipeline_events_without_preload_content( """ # noqa: E501 _param = self._get_pipeline_events_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, next_token=next_token, _request_auth=_request_auth, _content_type=_content_type, @@ -1466,8 +1469,8 @@ def get_pipeline_events_without_preload_content( def _get_pipeline_events_serialize( self, - organization, - pipeline, + organization_id, + pipeline_id, next_token, _request_auth, _content_type, @@ -1490,10 +1493,10 @@ def _get_pipeline_events_serialize( _body_params: Optional[bytes] = None # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if pipeline is not None: - _path_params['pipeline'] = pipeline + if organization_id is not None: + _path_params['organizationId'] = organization_id + if pipeline_id is not None: + _path_params['pipelineId'] = pipeline_id # process the query parameters if next_token is not None: @@ -1520,7 +1523,7 @@ def _get_pipeline_events_serialize( return self.api_client.param_serialize( method='GET', - resource_path='/org/{organization}/pipelines/{pipeline}/events', + resource_path='/org/{organizationId}/pipelines/{pipelineId}/events', path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -1539,8 +1542,8 @@ def _get_pipeline_events_serialize( @validate_call def get_pipeline_metrics( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1557,10 +1560,10 @@ def get_pipeline_metrics( """Get pipeline metrics - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1584,8 +1587,8 @@ def get_pipeline_metrics( """ # noqa: E501 _param = self._get_pipeline_metrics_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1614,8 +1617,8 @@ def get_pipeline_metrics( @validate_call def get_pipeline_metrics_with_http_info( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1632,10 +1635,10 @@ def get_pipeline_metrics_with_http_info( """Get pipeline metrics - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1659,8 +1662,8 @@ def get_pipeline_metrics_with_http_info( """ # noqa: E501 _param = self._get_pipeline_metrics_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1689,8 +1692,8 @@ def get_pipeline_metrics_with_http_info( @validate_call def get_pipeline_metrics_without_preload_content( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1707,10 +1710,10 @@ def get_pipeline_metrics_without_preload_content( """Get pipeline metrics - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1734,8 +1737,8 @@ def get_pipeline_metrics_without_preload_content( """ # noqa: E501 _param = self._get_pipeline_metrics_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1759,8 +1762,8 @@ def get_pipeline_metrics_without_preload_content( def _get_pipeline_metrics_serialize( self, - organization, - pipeline, + organization_id, + pipeline_id, _request_auth, _content_type, _headers, @@ -1782,10 +1785,10 @@ def _get_pipeline_metrics_serialize( _body_params: Optional[bytes] = None # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if pipeline is not None: - _path_params['pipeline'] = pipeline + if organization_id is not None: + _path_params['organizationId'] = organization_id + if pipeline_id is not None: + _path_params['pipelineId'] = pipeline_id # process the query parameters # process the header parameters # process the form parameters @@ -1808,7 +1811,7 @@ def _get_pipeline_metrics_serialize( return self.api_client.param_serialize( method='GET', - resource_path='/org/{organization}/pipelines/{pipeline}/metrics', + resource_path='/org/{organizationId}/pipelines/{pipelineId}/metrics', path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -1827,7 +1830,7 @@ def _get_pipeline_metrics_serialize( @validate_call def get_pipelines( self, - organization: StrictStr, + organization_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1841,11 +1844,12 @@ def get_pipelines( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> GetPipelinesResponse: - """Get all existing pipelines + """Get all pipelines + Returns a list of all pipelines in the organization - :param organization: (required) - :type organization: str + :param organization_id: (required) + :type organization_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1869,7 +1873,7 @@ def get_pipelines( """ # noqa: E501 _param = self._get_pipelines_serialize( - organization=organization, + organization_id=organization_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1898,7 +1902,7 @@ def get_pipelines( @validate_call def get_pipelines_with_http_info( self, - organization: StrictStr, + organization_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1912,11 +1916,12 @@ def get_pipelines_with_http_info( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> ApiResponse[GetPipelinesResponse]: - """Get all existing pipelines + """Get all pipelines + Returns a list of all pipelines in the organization - :param organization: (required) - :type organization: str + :param organization_id: (required) + :type organization_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1940,7 +1945,7 @@ def get_pipelines_with_http_info( """ # noqa: E501 _param = self._get_pipelines_serialize( - organization=organization, + organization_id=organization_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1969,7 +1974,7 @@ def get_pipelines_with_http_info( @validate_call def get_pipelines_without_preload_content( self, - organization: StrictStr, + organization_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1983,11 +1988,12 @@ def get_pipelines_without_preload_content( _headers: Optional[Dict[StrictStr, Any]] = None, _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, ) -> RESTResponseType: - """Get all existing pipelines + """Get all pipelines + Returns a list of all pipelines in the organization - :param organization: (required) - :type organization: str + :param organization_id: (required) + :type organization_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -2011,7 +2017,7 @@ def get_pipelines_without_preload_content( """ # noqa: E501 _param = self._get_pipelines_serialize( - organization=organization, + organization_id=organization_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -2035,7 +2041,7 @@ def get_pipelines_without_preload_content( def _get_pipelines_serialize( self, - organization, + organization_id, _request_auth, _content_type, _headers, @@ -2057,8 +2063,8 @@ def _get_pipelines_serialize( _body_params: Optional[bytes] = None # process the path parameters - if organization is not None: - _path_params['organization'] = organization + if organization_id is not None: + _path_params['organizationId'] = organization_id # process the query parameters # process the header parameters # process the form parameters @@ -2081,7 +2087,7 @@ def _get_pipelines_serialize( return self.api_client.param_serialize( method='GET', - resource_path='/org/{organization}/pipelines', + resource_path='/org/{organizationId}/pipelines', path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -2100,8 +2106,8 @@ def _get_pipelines_serialize( @validate_call def retrieve_documents( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, retrieve_documents_request: RetrieveDocumentsRequest, _request_timeout: Union[ None, @@ -2119,10 +2125,10 @@ def retrieve_documents( """Retrieve documents from a pipeline - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param retrieve_documents_request: (required) :type retrieve_documents_request: RetrieveDocumentsRequest :param _request_timeout: timeout setting for this request. If one @@ -2148,8 +2154,8 @@ def retrieve_documents( """ # noqa: E501 _param = self._retrieve_documents_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, retrieve_documents_request=retrieve_documents_request, _request_auth=_request_auth, _content_type=_content_type, @@ -2179,8 +2185,8 @@ def retrieve_documents( @validate_call def retrieve_documents_with_http_info( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, retrieve_documents_request: RetrieveDocumentsRequest, _request_timeout: Union[ None, @@ -2198,10 +2204,10 @@ def retrieve_documents_with_http_info( """Retrieve documents from a pipeline - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param retrieve_documents_request: (required) :type retrieve_documents_request: RetrieveDocumentsRequest :param _request_timeout: timeout setting for this request. If one @@ -2227,8 +2233,8 @@ def retrieve_documents_with_http_info( """ # noqa: E501 _param = self._retrieve_documents_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, retrieve_documents_request=retrieve_documents_request, _request_auth=_request_auth, _content_type=_content_type, @@ -2258,8 +2264,8 @@ def retrieve_documents_with_http_info( @validate_call def retrieve_documents_without_preload_content( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, retrieve_documents_request: RetrieveDocumentsRequest, _request_timeout: Union[ None, @@ -2277,10 +2283,10 @@ def retrieve_documents_without_preload_content( """Retrieve documents from a pipeline - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param retrieve_documents_request: (required) :type retrieve_documents_request: RetrieveDocumentsRequest :param _request_timeout: timeout setting for this request. If one @@ -2306,8 +2312,8 @@ def retrieve_documents_without_preload_content( """ # noqa: E501 _param = self._retrieve_documents_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, retrieve_documents_request=retrieve_documents_request, _request_auth=_request_auth, _content_type=_content_type, @@ -2332,8 +2338,8 @@ def retrieve_documents_without_preload_content( def _retrieve_documents_serialize( self, - organization, - pipeline, + organization_id, + pipeline_id, retrieve_documents_request, _request_auth, _content_type, @@ -2356,10 +2362,10 @@ def _retrieve_documents_serialize( _body_params: Optional[bytes] = None # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if pipeline is not None: - _path_params['pipeline'] = pipeline + if organization_id is not None: + _path_params['organizationId'] = organization_id + if pipeline_id is not None: + _path_params['pipelineId'] = pipeline_id # process the query parameters # process the header parameters # process the form parameters @@ -2397,7 +2403,7 @@ def _retrieve_documents_serialize( return self.api_client.param_serialize( method='POST', - resource_path='/org/{organization}/pipelines/{pipeline}/retrieval', + resource_path='/org/{organizationId}/pipelines/{pipelineId}/retrieval', path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -2416,8 +2422,8 @@ def _retrieve_documents_serialize( @validate_call def start_deep_research( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, start_deep_research_request: StartDeepResearchRequest, _request_timeout: Union[ None, @@ -2435,10 +2441,10 @@ def start_deep_research( """Start a deep research - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param start_deep_research_request: (required) :type start_deep_research_request: StartDeepResearchRequest :param _request_timeout: timeout setting for this request. If one @@ -2464,8 +2470,8 @@ def start_deep_research( """ # noqa: E501 _param = self._start_deep_research_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, start_deep_research_request=start_deep_research_request, _request_auth=_request_auth, _content_type=_content_type, @@ -2495,8 +2501,8 @@ def start_deep_research( @validate_call def start_deep_research_with_http_info( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, start_deep_research_request: StartDeepResearchRequest, _request_timeout: Union[ None, @@ -2514,10 +2520,10 @@ def start_deep_research_with_http_info( """Start a deep research - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param start_deep_research_request: (required) :type start_deep_research_request: StartDeepResearchRequest :param _request_timeout: timeout setting for this request. If one @@ -2543,8 +2549,8 @@ def start_deep_research_with_http_info( """ # noqa: E501 _param = self._start_deep_research_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, start_deep_research_request=start_deep_research_request, _request_auth=_request_auth, _content_type=_content_type, @@ -2574,8 +2580,8 @@ def start_deep_research_with_http_info( @validate_call def start_deep_research_without_preload_content( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, start_deep_research_request: StartDeepResearchRequest, _request_timeout: Union[ None, @@ -2593,10 +2599,10 @@ def start_deep_research_without_preload_content( """Start a deep research - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param start_deep_research_request: (required) :type start_deep_research_request: StartDeepResearchRequest :param _request_timeout: timeout setting for this request. If one @@ -2622,8 +2628,8 @@ def start_deep_research_without_preload_content( """ # noqa: E501 _param = self._start_deep_research_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, start_deep_research_request=start_deep_research_request, _request_auth=_request_auth, _content_type=_content_type, @@ -2648,8 +2654,8 @@ def start_deep_research_without_preload_content( def _start_deep_research_serialize( self, - organization, - pipeline, + organization_id, + pipeline_id, start_deep_research_request, _request_auth, _content_type, @@ -2672,10 +2678,10 @@ def _start_deep_research_serialize( _body_params: Optional[bytes] = None # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if pipeline is not None: - _path_params['pipeline'] = pipeline + if organization_id is not None: + _path_params['organizationId'] = organization_id + if pipeline_id is not None: + _path_params['pipelineId'] = pipeline_id # process the query parameters # process the header parameters # process the form parameters @@ -2713,7 +2719,7 @@ def _start_deep_research_serialize( return self.api_client.param_serialize( method='POST', - resource_path='/org/{organization}/pipelines/{pipeline}/deep-research', + resource_path='/org/{organizationId}/pipelines/{pipelineId}/deep-research', path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -2732,8 +2738,8 @@ def _start_deep_research_serialize( @validate_call def start_pipeline( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -2750,10 +2756,10 @@ def start_pipeline( """Start a pipeline - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -2777,8 +2783,8 @@ def start_pipeline( """ # noqa: E501 _param = self._start_pipeline_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -2807,8 +2813,8 @@ def start_pipeline( @validate_call def start_pipeline_with_http_info( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -2825,10 +2831,10 @@ def start_pipeline_with_http_info( """Start a pipeline - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -2852,8 +2858,8 @@ def start_pipeline_with_http_info( """ # noqa: E501 _param = self._start_pipeline_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -2882,8 +2888,8 @@ def start_pipeline_with_http_info( @validate_call def start_pipeline_without_preload_content( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -2900,10 +2906,10 @@ def start_pipeline_without_preload_content( """Start a pipeline - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -2927,8 +2933,8 @@ def start_pipeline_without_preload_content( """ # noqa: E501 _param = self._start_pipeline_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -2952,8 +2958,8 @@ def start_pipeline_without_preload_content( def _start_pipeline_serialize( self, - organization, - pipeline, + organization_id, + pipeline_id, _request_auth, _content_type, _headers, @@ -2975,10 +2981,10 @@ def _start_pipeline_serialize( _body_params: Optional[bytes] = None # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if pipeline is not None: - _path_params['pipeline'] = pipeline + if organization_id is not None: + _path_params['organizationId'] = organization_id + if pipeline_id is not None: + _path_params['pipelineId'] = pipeline_id # process the query parameters # process the header parameters # process the form parameters @@ -3001,7 +3007,7 @@ def _start_pipeline_serialize( return self.api_client.param_serialize( method='POST', - resource_path='/org/{organization}/pipelines/{pipeline}/start', + resource_path='/org/{organizationId}/pipelines/{pipelineId}/start', path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -3020,8 +3026,8 @@ def _start_pipeline_serialize( @validate_call def stop_pipeline( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -3038,10 +3044,10 @@ def stop_pipeline( """Stop a pipeline - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -3065,8 +3071,8 @@ def stop_pipeline( """ # noqa: E501 _param = self._stop_pipeline_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -3095,8 +3101,8 @@ def stop_pipeline( @validate_call def stop_pipeline_with_http_info( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -3113,10 +3119,10 @@ def stop_pipeline_with_http_info( """Stop a pipeline - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -3140,8 +3146,8 @@ def stop_pipeline_with_http_info( """ # noqa: E501 _param = self._stop_pipeline_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -3170,8 +3176,8 @@ def stop_pipeline_with_http_info( @validate_call def stop_pipeline_without_preload_content( self, - organization: StrictStr, - pipeline: StrictStr, + organization_id: StrictStr, + pipeline_id: StrictStr, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -3188,10 +3194,10 @@ def stop_pipeline_without_preload_content( """Stop a pipeline - :param organization: (required) - :type organization: str - :param pipeline: (required) - :type pipeline: str + :param organization_id: (required) + :type organization_id: str + :param pipeline_id: (required) + :type pipeline_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -3215,8 +3221,8 @@ def stop_pipeline_without_preload_content( """ # noqa: E501 _param = self._stop_pipeline_serialize( - organization=organization, - pipeline=pipeline, + organization_id=organization_id, + pipeline_id=pipeline_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -3240,8 +3246,8 @@ def stop_pipeline_without_preload_content( def _stop_pipeline_serialize( self, - organization, - pipeline, + organization_id, + pipeline_id, _request_auth, _content_type, _headers, @@ -3263,10 +3269,10 @@ def _stop_pipeline_serialize( _body_params: Optional[bytes] = None # process the path parameters - if organization is not None: - _path_params['organization'] = organization - if pipeline is not None: - _path_params['pipeline'] = pipeline + if organization_id is not None: + _path_params['organizationId'] = organization_id + if pipeline_id is not None: + _path_params['pipelineId'] = pipeline_id # process the query parameters # process the header parameters # process the form parameters @@ -3289,7 +3295,7 @@ def _stop_pipeline_serialize( return self.api_client.param_serialize( method='POST', - resource_path='/org/{organization}/pipelines/{pipeline}/stop', + resource_path='/org/{organizationId}/pipelines/{pipelineId}/stop', path_params=_path_params, query_params=_query_params, header_params=_header_params, diff --git a/src/python/vectorize_client/api/uploads_api.py b/vectorize_client/api/uploads_api.py similarity index 99% rename from src/python/vectorize_client/api/uploads_api.py rename to vectorize_client/api/uploads_api.py index 4372dde..8c89e04 100644 --- a/src/python/vectorize_client/api/uploads_api.py +++ b/vectorize_client/api/uploads_api.py @@ -312,7 +312,7 @@ def _delete_file_from_connector_serialize( return self.api_client.param_serialize( method='DELETE', - resource_path='/org/{organization}/uploads/{connectorId}/files', + resource_path='/org/{organizationId}/uploads/{connectorId}/files', path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -600,7 +600,7 @@ def _get_upload_files_from_connector_serialize( return self.api_client.param_serialize( method='GET', - resource_path='/org/{organization}/uploads/{connectorId}/files', + resource_path='/org/{organizationId}/uploads/{connectorId}/files', path_params=_path_params, query_params=_query_params, header_params=_header_params, @@ -916,7 +916,7 @@ def _start_file_upload_to_connector_serialize( return self.api_client.param_serialize( method='PUT', - resource_path='/org/{organization}/uploads/{connectorId}/files', + resource_path='/org/{organizationId}/uploads/{connectorId}/files', path_params=_path_params, query_params=_query_params, header_params=_header_params, diff --git a/src/python/vectorize_client/api_client.py b/vectorize_client/api_client.py similarity index 99% rename from src/python/vectorize_client/api_client.py rename to vectorize_client/api_client.py index 6d9ed35..b7ca4c8 100644 --- a/src/python/vectorize_client/api_client.py +++ b/vectorize_client/api_client.py @@ -382,6 +382,10 @@ def sanitize_for_serialization(self, obj): else: obj_dict = obj.__dict__ + if isinstance(obj_dict, list): + # here we handle instances that can either be a list or something else, and only became a real list by calling to_dict() + return self.sanitize_for_serialization(obj_dict) + return { key: self.sanitize_for_serialization(val) for key, val in obj_dict.items() diff --git a/src/python/vectorize_client/api_response.py b/vectorize_client/api_response.py similarity index 100% rename from src/python/vectorize_client/api_response.py rename to vectorize_client/api_response.py diff --git a/src/python/vectorize_client/configuration.py b/vectorize_client/configuration.py similarity index 97% rename from src/python/vectorize_client/configuration.py rename to vectorize_client/configuration.py index e3166ce..ab09b9a 100644 --- a/src/python/vectorize_client/configuration.py +++ b/vectorize_client/configuration.py @@ -18,7 +18,7 @@ from logging import FileHandler import multiprocessing import sys -from typing import Any, ClassVar, Dict, List, Literal, Optional, TypedDict +from typing import Any, ClassVar, Dict, List, Literal, Optional, TypedDict, Union from typing_extensions import NotRequired, Self import urllib3 @@ -161,6 +161,8 @@ class Configuration: :param ssl_ca_cert: str - the path to a file of concatenated CA certificates in PEM format. :param retries: Number of retries for API requests. + :param ca_cert_data: verify the peer using concatenated CA certificate data + in PEM (str) or DER (bytes) format. :Example: """ @@ -175,13 +177,14 @@ def __init__( username: Optional[str]=None, password: Optional[str]=None, access_token: Optional[str]=None, - server_index: Optional[int]=None, + server_index: Optional[int]=None, server_variables: Optional[ServerVariablesT]=None, server_operation_index: Optional[Dict[int, int]]=None, server_operation_variables: Optional[Dict[int, ServerVariablesT]]=None, ignore_operation_servers: bool=False, ssl_ca_cert: Optional[str]=None, retries: Optional[int] = None, + ca_cert_data: Optional[Union[str, bytes]] = None, *, debug: Optional[bool] = None, ) -> None: @@ -259,6 +262,10 @@ def __init__( self.ssl_ca_cert = ssl_ca_cert """Set this to customize the certificate file to verify the peer. """ + self.ca_cert_data = ca_cert_data + """Set this to verify the peer using PEM (str) or DER (bytes) + certificate data. + """ self.cert_file = None """client certificate file """ diff --git a/src/python/vectorize_client/exceptions.py b/vectorize_client/exceptions.py similarity index 100% rename from src/python/vectorize_client/exceptions.py rename to vectorize_client/exceptions.py diff --git a/vectorize_client/models/__init__.py b/vectorize_client/models/__init__.py new file mode 100644 index 0000000..a3de4d4 --- /dev/null +++ b/vectorize_client/models/__init__.py @@ -0,0 +1,304 @@ +# coding: utf-8 + +# flake8: noqa +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +# import models into model package +from vectorize_client.models.ai_platform import AIPlatform +from vectorize_client.models.ai_platform_config_schema import AIPlatformConfigSchema +from vectorize_client.models.ai_platform_input import AIPlatformInput +from vectorize_client.models.ai_platform_schema import AIPlatformSchema +from vectorize_client.models.ai_platform_type import AIPlatformType +from vectorize_client.models.awss3_auth_config import AWSS3AuthConfig +from vectorize_client.models.awss3_config import AWSS3Config +from vectorize_client.models.azureaisearch_auth_config import AZUREAISEARCHAuthConfig +from vectorize_client.models.azureaisearch_config import AZUREAISEARCHConfig +from vectorize_client.models.azureblob_auth_config import AZUREBLOBAuthConfig +from vectorize_client.models.azureblob_config import AZUREBLOBConfig +from vectorize_client.models.add_user_from_source_connector_response import AddUserFromSourceConnectorResponse +from vectorize_client.models.add_user_to_source_connector_request import AddUserToSourceConnectorRequest +from vectorize_client.models.add_user_to_source_connector_request_selected_files import AddUserToSourceConnectorRequestSelectedFiles +from vectorize_client.models.add_user_to_source_connector_request_selected_files_any_of import AddUserToSourceConnectorRequestSelectedFilesAnyOf +from vectorize_client.models.add_user_to_source_connector_request_selected_files_any_of_value import AddUserToSourceConnectorRequestSelectedFilesAnyOfValue +from vectorize_client.models.advanced_query import AdvancedQuery +from vectorize_client.models.amazon_s3 import AmazonS3 +from vectorize_client.models.amazon_s31 import AmazonS31 +from vectorize_client.models.amazon_s32 import AmazonS32 +from vectorize_client.models.azure_blob_storage import AzureBlobStorage +from vectorize_client.models.azure_blob_storage1 import AzureBlobStorage1 +from vectorize_client.models.azure_blob_storage2 import AzureBlobStorage2 +from vectorize_client.models.azureaisearch import Azureaisearch +from vectorize_client.models.azureaisearch1 import Azureaisearch1 +from vectorize_client.models.azureaisearch2 import Azureaisearch2 +from vectorize_client.models.bedrock_auth_config import BEDROCKAuthConfig +from vectorize_client.models.bedrock import Bedrock +from vectorize_client.models.bedrock1 import Bedrock1 +from vectorize_client.models.bedrock2 import Bedrock2 +from vectorize_client.models.capella_auth_config import CAPELLAAuthConfig +from vectorize_client.models.capella_config import CAPELLAConfig +from vectorize_client.models.confluence_auth_config import CONFLUENCEAuthConfig +from vectorize_client.models.confluence_config import CONFLUENCEConfig +from vectorize_client.models.capella import Capella +from vectorize_client.models.capella1 import Capella1 +from vectorize_client.models.capella2 import Capella2 +from vectorize_client.models.confluence import Confluence +from vectorize_client.models.confluence1 import Confluence1 +from vectorize_client.models.confluence2 import Confluence2 +from vectorize_client.models.create_ai_platform_connector import CreateAIPlatformConnector +from vectorize_client.models.create_ai_platform_connector_request_inner import CreateAIPlatformConnectorRequestInner +from vectorize_client.models.create_ai_platform_connector_response import CreateAIPlatformConnectorResponse +from vectorize_client.models.create_destination_connector import CreateDestinationConnector +from vectorize_client.models.create_destination_connector_request_inner import CreateDestinationConnectorRequestInner +from vectorize_client.models.create_destination_connector_response import CreateDestinationConnectorResponse +from vectorize_client.models.create_pipeline_response import CreatePipelineResponse +from vectorize_client.models.create_pipeline_response_data import CreatePipelineResponseData +from vectorize_client.models.create_source_connector import CreateSourceConnector +from vectorize_client.models.create_source_connector_request_inner import CreateSourceConnectorRequestInner +from vectorize_client.models.create_source_connector_response import CreateSourceConnectorResponse +from vectorize_client.models.created_ai_platform_connector import CreatedAIPlatformConnector +from vectorize_client.models.created_destination_connector import CreatedDestinationConnector +from vectorize_client.models.created_source_connector import CreatedSourceConnector +from vectorize_client.models.datastax_auth_config import DATASTAXAuthConfig +from vectorize_client.models.datastax_config import DATASTAXConfig +from vectorize_client.models.discord_auth_config import DISCORDAuthConfig +from vectorize_client.models.discord_config import DISCORDConfig +from vectorize_client.models.dropbox_auth_config import DROPBOXAuthConfig +from vectorize_client.models.dropbox_config import DROPBOXConfig +from vectorize_client.models.dropboxoauth_auth_config import DROPBOXOAUTHAuthConfig +from vectorize_client.models.dropboxoauthmulti_auth_config import DROPBOXOAUTHMULTIAuthConfig +from vectorize_client.models.dropboxoauthmulticustom_auth_config import DROPBOXOAUTHMULTICUSTOMAuthConfig +from vectorize_client.models.datastax import Datastax +from vectorize_client.models.datastax1 import Datastax1 +from vectorize_client.models.datastax2 import Datastax2 +from vectorize_client.models.deep_research_result import DeepResearchResult +from vectorize_client.models.delete_ai_platform_connector_response import DeleteAIPlatformConnectorResponse +from vectorize_client.models.delete_destination_connector_response import DeleteDestinationConnectorResponse +from vectorize_client.models.delete_file_response import DeleteFileResponse +from vectorize_client.models.delete_pipeline_response import DeletePipelineResponse +from vectorize_client.models.delete_source_connector_response import DeleteSourceConnectorResponse +from vectorize_client.models.destination_connector import DestinationConnector +from vectorize_client.models.destination_connector_input import DestinationConnectorInput +from vectorize_client.models.destination_connector_input_config import DestinationConnectorInputConfig +from vectorize_client.models.destination_connector_schema import DestinationConnectorSchema +from vectorize_client.models.destination_connector_type import DestinationConnectorType +from vectorize_client.models.discord import Discord +from vectorize_client.models.discord1 import Discord1 +from vectorize_client.models.discord2 import Discord2 +from vectorize_client.models.document import Document +from vectorize_client.models.dropbox import Dropbox +from vectorize_client.models.dropbox1 import Dropbox1 +from vectorize_client.models.dropbox2 import Dropbox2 +from vectorize_client.models.dropbox_oauth import DropboxOauth +from vectorize_client.models.dropbox_oauth1 import DropboxOauth1 +from vectorize_client.models.dropbox_oauth2 import DropboxOauth2 +from vectorize_client.models.dropbox_oauth_multi import DropboxOauthMulti +from vectorize_client.models.dropbox_oauth_multi1 import DropboxOauthMulti1 +from vectorize_client.models.dropbox_oauth_multi2 import DropboxOauthMulti2 +from vectorize_client.models.dropbox_oauth_multi_custom import DropboxOauthMultiCustom +from vectorize_client.models.dropbox_oauth_multi_custom1 import DropboxOauthMultiCustom1 +from vectorize_client.models.dropbox_oauth_multi_custom2 import DropboxOauthMultiCustom2 +from vectorize_client.models.elastic_auth_config import ELASTICAuthConfig +from vectorize_client.models.elastic_config import ELASTICConfig +from vectorize_client.models.elastic import Elastic +from vectorize_client.models.elastic1 import Elastic1 +from vectorize_client.models.elastic2 import Elastic2 +from vectorize_client.models.extraction_chunking_strategy import ExtractionChunkingStrategy +from vectorize_client.models.extraction_result import ExtractionResult +from vectorize_client.models.extraction_result_response import ExtractionResultResponse +from vectorize_client.models.extraction_type import ExtractionType +from vectorize_client.models.fileupload_auth_config import FILEUPLOADAuthConfig +from vectorize_client.models.firecrawl_auth_config import FIRECRAWLAuthConfig +from vectorize_client.models.firecrawl_config import FIRECRAWLConfig +from vectorize_client.models.fireflies_auth_config import FIREFLIESAuthConfig +from vectorize_client.models.fireflies_config import FIREFLIESConfig +from vectorize_client.models.file_upload import FileUpload +from vectorize_client.models.file_upload1 import FileUpload1 +from vectorize_client.models.file_upload2 import FileUpload2 +from vectorize_client.models.firecrawl import Firecrawl +from vectorize_client.models.firecrawl1 import Firecrawl1 +from vectorize_client.models.firecrawl2 import Firecrawl2 +from vectorize_client.models.fireflies import Fireflies +from vectorize_client.models.fireflies1 import Fireflies1 +from vectorize_client.models.fireflies2 import Fireflies2 +from vectorize_client.models.gcs_auth_config import GCSAuthConfig +from vectorize_client.models.gcs_config import GCSConfig +from vectorize_client.models.github_auth_config import GITHUBAuthConfig +from vectorize_client.models.github_config import GITHUBConfig +from vectorize_client.models.googledrive_auth_config import GOOGLEDRIVEAuthConfig +from vectorize_client.models.googledrive_config import GOOGLEDRIVEConfig +from vectorize_client.models.googledriveoauth_auth_config import GOOGLEDRIVEOAUTHAuthConfig +from vectorize_client.models.googledriveoauth_config import GOOGLEDRIVEOAUTHConfig +from vectorize_client.models.googledriveoauthmulti_auth_config import GOOGLEDRIVEOAUTHMULTIAuthConfig +from vectorize_client.models.googledriveoauthmulticustom_auth_config import GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig +from vectorize_client.models.googledriveoauthmulticustom_config import GOOGLEDRIVEOAUTHMULTICUSTOMConfig +from vectorize_client.models.googledriveoauthmulti_config import GOOGLEDRIVEOAUTHMULTIConfig +from vectorize_client.models.get_ai_platform_connectors200_response import GetAIPlatformConnectors200Response +from vectorize_client.models.get_deep_research_response import GetDeepResearchResponse +from vectorize_client.models.get_destination_connectors200_response import GetDestinationConnectors200Response +from vectorize_client.models.get_pipeline_events_response import GetPipelineEventsResponse +from vectorize_client.models.get_pipeline_metrics_response import GetPipelineMetricsResponse +from vectorize_client.models.get_pipeline_response import GetPipelineResponse +from vectorize_client.models.get_pipelines400_response import GetPipelines400Response +from vectorize_client.models.get_pipelines_response import GetPipelinesResponse +from vectorize_client.models.get_source_connectors200_response import GetSourceConnectors200Response +from vectorize_client.models.get_upload_files_response import GetUploadFilesResponse +from vectorize_client.models.github import Github +from vectorize_client.models.github1 import Github1 +from vectorize_client.models.github2 import Github2 +from vectorize_client.models.google_cloud_storage import GoogleCloudStorage +from vectorize_client.models.google_cloud_storage1 import GoogleCloudStorage1 +from vectorize_client.models.google_cloud_storage2 import GoogleCloudStorage2 +from vectorize_client.models.google_drive import GoogleDrive +from vectorize_client.models.google_drive1 import GoogleDrive1 +from vectorize_client.models.google_drive2 import GoogleDrive2 +from vectorize_client.models.google_drive_o_auth import GoogleDriveOAuth +from vectorize_client.models.google_drive_o_auth1 import GoogleDriveOAuth1 +from vectorize_client.models.google_drive_o_auth2 import GoogleDriveOAuth2 +from vectorize_client.models.google_drive_oauth_multi import GoogleDriveOauthMulti +from vectorize_client.models.google_drive_oauth_multi1 import GoogleDriveOauthMulti1 +from vectorize_client.models.google_drive_oauth_multi2 import GoogleDriveOauthMulti2 +from vectorize_client.models.google_drive_oauth_multi_custom import GoogleDriveOauthMultiCustom +from vectorize_client.models.google_drive_oauth_multi_custom1 import GoogleDriveOauthMultiCustom1 +from vectorize_client.models.google_drive_oauth_multi_custom2 import GoogleDriveOauthMultiCustom2 +from vectorize_client.models.intercom_auth_config import INTERCOMAuthConfig +from vectorize_client.models.intercom_config import INTERCOMConfig +from vectorize_client.models.intercom import Intercom +from vectorize_client.models.intercom1 import Intercom1 +from vectorize_client.models.intercom2 import Intercom2 +from vectorize_client.models.milvus_auth_config import MILVUSAuthConfig +from vectorize_client.models.milvus_config import MILVUSConfig +from vectorize_client.models.metadata_extraction_strategy import MetadataExtractionStrategy +from vectorize_client.models.metadata_extraction_strategy_schema import MetadataExtractionStrategySchema +from vectorize_client.models.milvus import Milvus +from vectorize_client.models.milvus1 import Milvus1 +from vectorize_client.models.milvus2 import Milvus2 +from vectorize_client.models.n8_n_config import N8NConfig +from vectorize_client.models.notion_auth_config import NOTIONAuthConfig +from vectorize_client.models.notion_config import NOTIONConfig +from vectorize_client.models.notionoauthmulti_auth_config import NOTIONOAUTHMULTIAuthConfig +from vectorize_client.models.notionoauthmulticustom_auth_config import NOTIONOAUTHMULTICUSTOMAuthConfig +from vectorize_client.models.notion import Notion +from vectorize_client.models.notion1 import Notion1 +from vectorize_client.models.notion2 import Notion2 +from vectorize_client.models.notion_oauth_multi import NotionOauthMulti +from vectorize_client.models.notion_oauth_multi1 import NotionOauthMulti1 +from vectorize_client.models.notion_oauth_multi2 import NotionOauthMulti2 +from vectorize_client.models.notion_oauth_multi_custom import NotionOauthMultiCustom +from vectorize_client.models.notion_oauth_multi_custom1 import NotionOauthMultiCustom1 +from vectorize_client.models.notion_oauth_multi_custom2 import NotionOauthMultiCustom2 +from vectorize_client.models.onedrive_auth_config import ONEDRIVEAuthConfig +from vectorize_client.models.onedrive_config import ONEDRIVEConfig +from vectorize_client.models.openai_auth_config import OPENAIAuthConfig +from vectorize_client.models.one_drive import OneDrive +from vectorize_client.models.one_drive1 import OneDrive1 +from vectorize_client.models.one_drive2 import OneDrive2 +from vectorize_client.models.openai import Openai +from vectorize_client.models.openai1 import Openai1 +from vectorize_client.models.openai2 import Openai2 +from vectorize_client.models.pinecone_auth_config import PINECONEAuthConfig +from vectorize_client.models.pinecone_config import PINECONEConfig +from vectorize_client.models.postgresql_auth_config import POSTGRESQLAuthConfig +from vectorize_client.models.postgresql_config import POSTGRESQLConfig +from vectorize_client.models.pinecone import Pinecone +from vectorize_client.models.pinecone1 import Pinecone1 +from vectorize_client.models.pinecone2 import Pinecone2 +from vectorize_client.models.pipeline_ai_platform_request_inner import PipelineAIPlatformRequestInner +from vectorize_client.models.pipeline_configuration_schema import PipelineConfigurationSchema +from vectorize_client.models.pipeline_destination_connector_request_inner import PipelineDestinationConnectorRequestInner +from vectorize_client.models.pipeline_events import PipelineEvents +from vectorize_client.models.pipeline_list_summary import PipelineListSummary +from vectorize_client.models.pipeline_metrics import PipelineMetrics +from vectorize_client.models.pipeline_source_connector_request_inner import PipelineSourceConnectorRequestInner +from vectorize_client.models.pipeline_summary import PipelineSummary +from vectorize_client.models.postgresql import Postgresql +from vectorize_client.models.postgresql1 import Postgresql1 +from vectorize_client.models.postgresql2 import Postgresql2 +from vectorize_client.models.qdrant_auth_config import QDRANTAuthConfig +from vectorize_client.models.qdrant_config import QDRANTConfig +from vectorize_client.models.qdrant import Qdrant +from vectorize_client.models.qdrant1 import Qdrant1 +from vectorize_client.models.qdrant2 import Qdrant2 +from vectorize_client.models.remove_user_from_source_connector_request import RemoveUserFromSourceConnectorRequest +from vectorize_client.models.remove_user_from_source_connector_response import RemoveUserFromSourceConnectorResponse +from vectorize_client.models.retrieve_context import RetrieveContext +from vectorize_client.models.retrieve_context_message import RetrieveContextMessage +from vectorize_client.models.retrieve_documents_request import RetrieveDocumentsRequest +from vectorize_client.models.retrieve_documents_response import RetrieveDocumentsResponse +from vectorize_client.models.sharepoint_auth_config import SHAREPOINTAuthConfig +from vectorize_client.models.sharepoint_config import SHAREPOINTConfig +from vectorize_client.models.singlestore_auth_config import SINGLESTOREAuthConfig +from vectorize_client.models.singlestore_config import SINGLESTOREConfig +from vectorize_client.models.supabase_auth_config import SUPABASEAuthConfig +from vectorize_client.models.supabase_config import SUPABASEConfig +from vectorize_client.models.schedule_schema import ScheduleSchema +from vectorize_client.models.schedule_schema_type import ScheduleSchemaType +from vectorize_client.models.sharepoint import Sharepoint +from vectorize_client.models.sharepoint1 import Sharepoint1 +from vectorize_client.models.sharepoint2 import Sharepoint2 +from vectorize_client.models.singlestore import Singlestore +from vectorize_client.models.singlestore1 import Singlestore1 +from vectorize_client.models.singlestore2 import Singlestore2 +from vectorize_client.models.source_connector import SourceConnector +from vectorize_client.models.source_connector_input import SourceConnectorInput +from vectorize_client.models.source_connector_input_config import SourceConnectorInputConfig +from vectorize_client.models.source_connector_schema import SourceConnectorSchema +from vectorize_client.models.source_connector_type import SourceConnectorType +from vectorize_client.models.start_deep_research_request import StartDeepResearchRequest +from vectorize_client.models.start_deep_research_response import StartDeepResearchResponse +from vectorize_client.models.start_extraction_request import StartExtractionRequest +from vectorize_client.models.start_extraction_response import StartExtractionResponse +from vectorize_client.models.start_file_upload_request import StartFileUploadRequest +from vectorize_client.models.start_file_upload_response import StartFileUploadResponse +from vectorize_client.models.start_file_upload_to_connector_request import StartFileUploadToConnectorRequest +from vectorize_client.models.start_file_upload_to_connector_response import StartFileUploadToConnectorResponse +from vectorize_client.models.start_pipeline_response import StartPipelineResponse +from vectorize_client.models.stop_pipeline_response import StopPipelineResponse +from vectorize_client.models.supabase import Supabase +from vectorize_client.models.supabase1 import Supabase1 +from vectorize_client.models.supabase2 import Supabase2 +from vectorize_client.models.turbopuffer_auth_config import TURBOPUFFERAuthConfig +from vectorize_client.models.turbopuffer_config import TURBOPUFFERConfig +from vectorize_client.models.turbopuffer import Turbopuffer +from vectorize_client.models.turbopuffer1 import Turbopuffer1 +from vectorize_client.models.turbopuffer2 import Turbopuffer2 +from vectorize_client.models.update_ai_platform_connector_request import UpdateAIPlatformConnectorRequest +from vectorize_client.models.update_ai_platform_connector_response import UpdateAIPlatformConnectorResponse +from vectorize_client.models.update_aiplatform_connector_request import UpdateAiplatformConnectorRequest +from vectorize_client.models.update_destination_connector_request import UpdateDestinationConnectorRequest +from vectorize_client.models.update_destination_connector_response import UpdateDestinationConnectorResponse +from vectorize_client.models.update_source_connector_request import UpdateSourceConnectorRequest +from vectorize_client.models.update_source_connector_response import UpdateSourceConnectorResponse +from vectorize_client.models.update_source_connector_response_data import UpdateSourceConnectorResponseData +from vectorize_client.models.update_user_in_source_connector_request import UpdateUserInSourceConnectorRequest +from vectorize_client.models.update_user_in_source_connector_response import UpdateUserInSourceConnectorResponse +from vectorize_client.models.updated_ai_platform_connector_data import UpdatedAIPlatformConnectorData +from vectorize_client.models.updated_destination_connector_data import UpdatedDestinationConnectorData +from vectorize_client.models.upload_file import UploadFile +from vectorize_client.models.vertex_auth_config import VERTEXAuthConfig +from vectorize_client.models.voyage_auth_config import VOYAGEAuthConfig +from vectorize_client.models.vertex import Vertex +from vectorize_client.models.vertex1 import Vertex1 +from vectorize_client.models.vertex2 import Vertex2 +from vectorize_client.models.voyage import Voyage +from vectorize_client.models.voyage1 import Voyage1 +from vectorize_client.models.voyage2 import Voyage2 +from vectorize_client.models.weaviate_auth_config import WEAVIATEAuthConfig +from vectorize_client.models.weaviate_config import WEAVIATEConfig +from vectorize_client.models.webcrawler_auth_config import WEBCRAWLERAuthConfig +from vectorize_client.models.webcrawler_config import WEBCRAWLERConfig +from vectorize_client.models.weaviate import Weaviate +from vectorize_client.models.weaviate1 import Weaviate1 +from vectorize_client.models.weaviate2 import Weaviate2 +from vectorize_client.models.web_crawler import WebCrawler +from vectorize_client.models.web_crawler1 import WebCrawler1 +from vectorize_client.models.web_crawler2 import WebCrawler2 diff --git a/src/python/vectorize_client/models/add_user_from_source_connector_response.py b/vectorize_client/models/add_user_from_source_connector_response.py similarity index 100% rename from src/python/vectorize_client/models/add_user_from_source_connector_response.py rename to vectorize_client/models/add_user_from_source_connector_response.py diff --git a/src/python/vectorize_client/models/add_user_to_source_connector_request.py b/vectorize_client/models/add_user_to_source_connector_request.py similarity index 72% rename from src/python/vectorize_client/models/add_user_to_source_connector_request.py rename to vectorize_client/models/add_user_to_source_connector_request.py index ab54109..5ab517b 100644 --- a/src/python/vectorize_client/models/add_user_to_source_connector_request.py +++ b/vectorize_client/models/add_user_to_source_connector_request.py @@ -18,8 +18,8 @@ import json from pydantic import BaseModel, ConfigDict, Field, StrictStr -from typing import Any, ClassVar, Dict, List -from vectorize_client.models.add_user_to_source_connector_request_selected_files_value import AddUserToSourceConnectorRequestSelectedFilesValue +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.add_user_to_source_connector_request_selected_files import AddUserToSourceConnectorRequestSelectedFiles from typing import Optional, Set from typing_extensions import Self @@ -28,9 +28,10 @@ class AddUserToSourceConnectorRequest(BaseModel): AddUserToSourceConnectorRequest """ # noqa: E501 user_id: StrictStr = Field(alias="userId") - selected_files: Dict[str, AddUserToSourceConnectorRequestSelectedFilesValue] = Field(alias="selectedFiles") - refresh_token: StrictStr = Field(alias="refreshToken") - __properties: ClassVar[List[str]] = ["userId", "selectedFiles", "refreshToken"] + selected_files: AddUserToSourceConnectorRequestSelectedFiles = Field(alias="selectedFiles") + refresh_token: Optional[StrictStr] = Field(default=None, alias="refreshToken") + access_token: Optional[StrictStr] = Field(default=None, alias="accessToken") + __properties: ClassVar[List[str]] = ["userId", "selectedFiles", "refreshToken", "accessToken"] model_config = ConfigDict( populate_by_name=True, @@ -71,13 +72,9 @@ def to_dict(self) -> Dict[str, Any]: exclude=excluded_fields, exclude_none=True, ) - # override the default output from pydantic by calling `to_dict()` of each value in selected_files (dict) - _field_dict = {} + # override the default output from pydantic by calling `to_dict()` of selected_files if self.selected_files: - for _key_selected_files in self.selected_files: - if self.selected_files[_key_selected_files]: - _field_dict[_key_selected_files] = self.selected_files[_key_selected_files].to_dict() - _dict['selectedFiles'] = _field_dict + _dict['selectedFiles'] = self.selected_files.to_dict() return _dict @classmethod @@ -91,13 +88,9 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: _obj = cls.model_validate({ "userId": obj.get("userId"), - "selectedFiles": dict( - (_k, AddUserToSourceConnectorRequestSelectedFilesValue.from_dict(_v)) - for _k, _v in obj["selectedFiles"].items() - ) - if obj.get("selectedFiles") is not None - else None, - "refreshToken": obj.get("refreshToken") + "selectedFiles": AddUserToSourceConnectorRequestSelectedFiles.from_dict(obj["selectedFiles"]) if obj.get("selectedFiles") is not None else None, + "refreshToken": obj.get("refreshToken"), + "accessToken": obj.get("accessToken") }) return _obj diff --git a/vectorize_client/models/add_user_to_source_connector_request_selected_files.py b/vectorize_client/models/add_user_to_source_connector_request_selected_files.py new file mode 100644 index 0000000..426bc0e --- /dev/null +++ b/vectorize_client/models/add_user_to_source_connector_request_selected_files.py @@ -0,0 +1,137 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +from inspect import getfullargspec +import json +import pprint +import re # noqa: F401 +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Dict, Optional +from vectorize_client.models.add_user_to_source_connector_request_selected_files_any_of import AddUserToSourceConnectorRequestSelectedFilesAnyOf +from vectorize_client.models.add_user_to_source_connector_request_selected_files_any_of_value import AddUserToSourceConnectorRequestSelectedFilesAnyOfValue +from typing import Union, Any, List, Set, TYPE_CHECKING, Optional, Dict +from typing_extensions import Literal, Self +from pydantic import Field + +ADDUSERTOSOURCECONNECTORREQUESTSELECTEDFILES_ANY_OF_SCHEMAS = ["AddUserToSourceConnectorRequestSelectedFilesAnyOf", "Dict[str, AddUserToSourceConnectorRequestSelectedFilesAnyOfValue]"] + +class AddUserToSourceConnectorRequestSelectedFiles(BaseModel): + """ + AddUserToSourceConnectorRequestSelectedFiles + """ + + # data type: Dict[str, AddUserToSourceConnectorRequestSelectedFilesAnyOfValue] + anyof_schema_1_validator: Optional[Dict[str, AddUserToSourceConnectorRequestSelectedFilesAnyOfValue]] = None + # data type: AddUserToSourceConnectorRequestSelectedFilesAnyOf + anyof_schema_2_validator: Optional[AddUserToSourceConnectorRequestSelectedFilesAnyOf] = None + if TYPE_CHECKING: + actual_instance: Optional[Union[AddUserToSourceConnectorRequestSelectedFilesAnyOf, Dict[str, AddUserToSourceConnectorRequestSelectedFilesAnyOfValue]]] = None + else: + actual_instance: Any = None + any_of_schemas: Set[str] = { "AddUserToSourceConnectorRequestSelectedFilesAnyOf", "Dict[str, AddUserToSourceConnectorRequestSelectedFilesAnyOfValue]" } + + model_config = { + "validate_assignment": True, + "protected_namespaces": (), + } + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_anyof(cls, v): + instance = AddUserToSourceConnectorRequestSelectedFiles.model_construct() + error_messages = [] + # validate data type: Dict[str, AddUserToSourceConnectorRequestSelectedFilesAnyOfValue] + try: + instance.anyof_schema_1_validator = v + return v + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # validate data type: AddUserToSourceConnectorRequestSelectedFilesAnyOf + if not isinstance(v, AddUserToSourceConnectorRequestSelectedFilesAnyOf): + error_messages.append(f"Error! Input type `{type(v)}` is not `AddUserToSourceConnectorRequestSelectedFilesAnyOf`") + else: + return v + + if error_messages: + # no match + raise ValueError("No match found when setting the actual_instance in AddUserToSourceConnectorRequestSelectedFiles with anyOf schemas: AddUserToSourceConnectorRequestSelectedFilesAnyOf, Dict[str, AddUserToSourceConnectorRequestSelectedFilesAnyOfValue]. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Dict[str, Any]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + # deserialize data into Dict[str, AddUserToSourceConnectorRequestSelectedFilesAnyOfValue] + try: + # validation + instance.anyof_schema_1_validator = json.loads(json_str) + # assign value to actual_instance + instance.actual_instance = instance.anyof_schema_1_validator + return instance + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # anyof_schema_2_validator: Optional[AddUserToSourceConnectorRequestSelectedFilesAnyOf] = None + try: + instance.actual_instance = AddUserToSourceConnectorRequestSelectedFilesAnyOf.from_json(json_str) + return instance + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if error_messages: + # no match + raise ValueError("No match found when deserializing the JSON string into AddUserToSourceConnectorRequestSelectedFiles with anyOf schemas: AddUserToSourceConnectorRequestSelectedFilesAnyOf, Dict[str, AddUserToSourceConnectorRequestSelectedFilesAnyOfValue]. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], AddUserToSourceConnectorRequestSelectedFilesAnyOf, Dict[str, AddUserToSourceConnectorRequestSelectedFilesAnyOfValue]]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) + + diff --git a/vectorize_client/models/add_user_to_source_connector_request_selected_files_any_of.py b/vectorize_client/models/add_user_to_source_connector_request_selected_files_any_of.py new file mode 100644 index 0000000..2f8a725 --- /dev/null +++ b/vectorize_client/models/add_user_to_source_connector_request_selected_files_any_of.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class AddUserToSourceConnectorRequestSelectedFilesAnyOf(BaseModel): + """ + AddUserToSourceConnectorRequestSelectedFilesAnyOf + """ # noqa: E501 + page_ids: Optional[List[StrictStr]] = Field(default=None, alias="pageIds") + database_ids: Optional[List[StrictStr]] = Field(default=None, alias="databaseIds") + __properties: ClassVar[List[str]] = ["pageIds", "databaseIds"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AddUserToSourceConnectorRequestSelectedFilesAnyOf from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AddUserToSourceConnectorRequestSelectedFilesAnyOf from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "pageIds": obj.get("pageIds"), + "databaseIds": obj.get("databaseIds") + }) + return _obj + + diff --git a/src/python/vectorize_client/models/add_user_to_source_connector_request_selected_files_value.py b/vectorize_client/models/add_user_to_source_connector_request_selected_files_any_of_value.py similarity index 91% rename from src/python/vectorize_client/models/add_user_to_source_connector_request_selected_files_value.py rename to vectorize_client/models/add_user_to_source_connector_request_selected_files_any_of_value.py index c640930..5d3cbae 100644 --- a/src/python/vectorize_client/models/add_user_to_source_connector_request_selected_files_value.py +++ b/vectorize_client/models/add_user_to_source_connector_request_selected_files_any_of_value.py @@ -22,9 +22,9 @@ from typing import Optional, Set from typing_extensions import Self -class AddUserToSourceConnectorRequestSelectedFilesValue(BaseModel): +class AddUserToSourceConnectorRequestSelectedFilesAnyOfValue(BaseModel): """ - AddUserToSourceConnectorRequestSelectedFilesValue + AddUserToSourceConnectorRequestSelectedFilesAnyOfValue """ # noqa: E501 name: StrictStr mime_type: StrictStr = Field(alias="mimeType") @@ -48,7 +48,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of AddUserToSourceConnectorRequestSelectedFilesValue from a JSON string""" + """Create an instance of AddUserToSourceConnectorRequestSelectedFilesAnyOfValue from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -73,7 +73,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of AddUserToSourceConnectorRequestSelectedFilesValue from a dict""" + """Create an instance of AddUserToSourceConnectorRequestSelectedFilesAnyOfValue from a dict""" if obj is None: return None diff --git a/vectorize_client/models/advanced_query.py b/vectorize_client/models/advanced_query.py new file mode 100644 index 0000000..9c13b28 --- /dev/null +++ b/vectorize_client/models/advanced_query.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing import Optional, Set +from typing_extensions import Self + +class AdvancedQuery(BaseModel): + """ + Advanced query parameters for enhanced search capabilities. + """ # noqa: E501 + mode: Optional[StrictStr] = Field(default=None, description="Search mode: 'text', 'vector', or 'hybrid'. Defaults to 'vector' if not specified.") + text_fields: Optional[List[StrictStr]] = Field(default=None, description="Fields to perform text search on.", alias="text-fields") + match_type: Optional[StrictStr] = Field(default=None, description="Type of text match to perform.", alias="match-type") + text_boost: Optional[Union[StrictFloat, StrictInt]] = Field(default=None, description="Multiplier for text search scores.", alias="text-boost") + filters: Optional[Dict[str, Any]] = Field(default=None, description="Elasticsearch-compatible filter object.") + __properties: ClassVar[List[str]] = ["mode", "text-fields", "match-type", "text-boost", "filters"] + + @field_validator('mode') + def mode_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['text', 'vector', 'hybrid']): + raise ValueError("must be one of enum values ('text', 'vector', 'hybrid')") + return value + + @field_validator('match_type') + def match_type_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['match', 'match_phrase', 'multi_match']): + raise ValueError("must be one of enum values ('match', 'match_phrase', 'multi_match')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AdvancedQuery from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AdvancedQuery from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "mode": obj.get("mode"), + "text-fields": obj.get("text-fields"), + "match-type": obj.get("match-type"), + "text-boost": obj.get("text-boost"), + "filters": obj.get("filters") + }) + return _obj + + diff --git a/src/python/vectorize_client/models/ai_platform.py b/vectorize_client/models/ai_platform.py similarity index 100% rename from src/python/vectorize_client/models/ai_platform.py rename to vectorize_client/models/ai_platform.py diff --git a/src/python/vectorize_client/models/ai_platform_config_schema.py b/vectorize_client/models/ai_platform_config_schema.py similarity index 100% rename from src/python/vectorize_client/models/ai_platform_config_schema.py rename to vectorize_client/models/ai_platform_config_schema.py diff --git a/vectorize_client/models/ai_platform_input.py b/vectorize_client/models/ai_platform_input.py new file mode 100644 index 0000000..336afe8 --- /dev/null +++ b/vectorize_client/models/ai_platform_input.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class AIPlatformInput(BaseModel): + """ + AI platform configuration + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the AI platform") + type: StrictStr = Field(description="Type of AI platform") + config: Optional[Any] = Field(description="Configuration specific to the AI platform") + __properties: ClassVar[List[str]] = ["id", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['BEDROCK', 'VERTEX', 'OPENAI', 'VOYAGE']): + raise ValueError("must be one of enum values ('BEDROCK', 'VERTEX', 'OPENAI', 'VOYAGE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AIPlatformInput from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # set to None if config (nullable) is None + # and model_fields_set contains the field + if self.config is None and "config" in self.model_fields_set: + _dict['config'] = None + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AIPlatformInput from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type"), + "config": obj.get("config") + }) + return _obj + + diff --git a/src/python/vectorize_client/models/ai_platform_schema.py b/vectorize_client/models/ai_platform_schema.py similarity index 100% rename from src/python/vectorize_client/models/ai_platform_schema.py rename to vectorize_client/models/ai_platform_schema.py diff --git a/src/python/vectorize_client/models/ai_platform_type.py b/vectorize_client/models/ai_platform_type.py similarity index 96% rename from src/python/vectorize_client/models/ai_platform_type.py rename to vectorize_client/models/ai_platform_type.py index e60cf2e..6a91fd8 100644 --- a/src/python/vectorize_client/models/ai_platform_type.py +++ b/vectorize_client/models/ai_platform_type.py @@ -30,7 +30,6 @@ class AIPlatformType(str, Enum): VERTEX = 'VERTEX' OPENAI = 'OPENAI' VOYAGE = 'VOYAGE' - VECTORIZE = 'VECTORIZE' @classmethod def from_json(cls, json_str: str) -> Self: diff --git a/vectorize_client/models/amazon_s3.py b/vectorize_client/models/amazon_s3.py new file mode 100644 index 0000000..8901d4f --- /dev/null +++ b/vectorize_client/models/amazon_s3.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.awss3_config import AWSS3Config +from typing import Optional, Set +from typing_extensions import Self + +class AmazonS3(BaseModel): + """ + AmazonS3 + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"AWS_S3\")") + config: AWSS3Config + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['AWS_S3']): + raise ValueError("must be one of enum values ('AWS_S3')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AmazonS3 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AmazonS3 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": AWSS3Config.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/amazon_s31.py b/vectorize_client/models/amazon_s31.py new file mode 100644 index 0000000..2158d83 --- /dev/null +++ b/vectorize_client/models/amazon_s31.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.awss3_config import AWSS3Config +from typing import Optional, Set +from typing_extensions import Self + +class AmazonS31(BaseModel): + """ + AmazonS31 + """ # noqa: E501 + config: Optional[AWSS3Config] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AmazonS31 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AmazonS31 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": AWSS3Config.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/amazon_s32.py b/vectorize_client/models/amazon_s32.py new file mode 100644 index 0000000..c661969 --- /dev/null +++ b/vectorize_client/models/amazon_s32.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class AmazonS32(BaseModel): + """ + AmazonS32 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"AWS_S3\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['AWS_S3']): + raise ValueError("must be one of enum values ('AWS_S3')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AmazonS32 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AmazonS32 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/awss3_auth_config.py b/vectorize_client/models/awss3_auth_config.py new file mode 100644 index 0000000..4aac3c9 --- /dev/null +++ b/vectorize_client/models/awss3_auth_config.py @@ -0,0 +1,114 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class AWSS3AuthConfig(BaseModel): + """ + Authentication configuration for Amazon S3 + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + access_key: Annotated[str, Field(strict=True)] = Field(description="Access Key. Example: Enter Access Key", alias="access-key") + secret_key: Annotated[str, Field(strict=True)] = Field(description="Secret Key. Example: Enter Secret Key", alias="secret-key") + bucket_name: StrictStr = Field(description="Bucket Name. Example: Enter your S3 Bucket Name", alias="bucket-name") + endpoint: Optional[StrictStr] = Field(default=None, description="Endpoint. Example: Enter Endpoint URL") + region: Optional[StrictStr] = Field(default=None, description="Region. Example: Region Name") + archiver: StrictBool = Field(description="Allow as archive destination") + __properties: ClassVar[List[str]] = ["name", "access-key", "secret-key", "bucket-name", "endpoint", "region", "archiver"] + + @field_validator('access_key') + def access_key_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + @field_validator('secret_key') + def secret_key_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AWSS3AuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AWSS3AuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "access-key": obj.get("access-key"), + "secret-key": obj.get("secret-key"), + "bucket-name": obj.get("bucket-name"), + "endpoint": obj.get("endpoint"), + "region": obj.get("region"), + "archiver": obj.get("archiver") if obj.get("archiver") is not None else False + }) + return _obj + + diff --git a/vectorize_client/models/awss3_config.py b/vectorize_client/models/awss3_config.py new file mode 100644 index 0000000..4fa0ba2 --- /dev/null +++ b/vectorize_client/models/awss3_config.py @@ -0,0 +1,106 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class AWSS3Config(BaseModel): + """ + Configuration for Amazon S3 connector + """ # noqa: E501 + file_extensions: List[StrictStr] = Field(description="File Extensions", alias="file-extensions") + idle_time: Union[Annotated[float, Field(strict=True, ge=1)], Annotated[int, Field(strict=True, ge=1)]] = Field(description="Check for updates every (seconds)", alias="idle-time") + recursive: Optional[StrictBool] = Field(default=None, description="Recursively scan all folders in the bucket") + path_prefix: Optional[StrictStr] = Field(default=None, description="Path Prefix", alias="path-prefix") + path_metadata_regex: Optional[StrictStr] = Field(default=None, description="Path Metadata Regex", alias="path-metadata-regex") + path_regex_group_names: Optional[StrictStr] = Field(default=None, description="Path Regex Group Names. Example: Enter Group Name", alias="path-regex-group-names") + __properties: ClassVar[List[str]] = ["file-extensions", "idle-time", "recursive", "path-prefix", "path-metadata-regex", "path-regex-group-names"] + + @field_validator('file_extensions') + def file_extensions_validate_enum(cls, value): + """Validates the enum""" + for i in value: + if i not in set([]): + raise ValueError("each list item must be one of ()") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AWSS3Config from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AWSS3Config from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "file-extensions": obj.get("file-extensions"), + "idle-time": obj.get("idle-time") if obj.get("idle-time") is not None else 5, + "recursive": obj.get("recursive"), + "path-prefix": obj.get("path-prefix"), + "path-metadata-regex": obj.get("path-metadata-regex"), + "path-regex-group-names": obj.get("path-regex-group-names") + }) + return _obj + + diff --git a/vectorize_client/models/azure_blob_storage.py b/vectorize_client/models/azure_blob_storage.py new file mode 100644 index 0000000..d77dae7 --- /dev/null +++ b/vectorize_client/models/azure_blob_storage.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.azureblob_config import AZUREBLOBConfig +from typing import Optional, Set +from typing_extensions import Self + +class AzureBlobStorage(BaseModel): + """ + AzureBlobStorage + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"AZURE_BLOB\")") + config: AZUREBLOBConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['AZURE_BLOB']): + raise ValueError("must be one of enum values ('AZURE_BLOB')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AzureBlobStorage from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AzureBlobStorage from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": AZUREBLOBConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/azure_blob_storage1.py b/vectorize_client/models/azure_blob_storage1.py new file mode 100644 index 0000000..4624842 --- /dev/null +++ b/vectorize_client/models/azure_blob_storage1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.azureblob_config import AZUREBLOBConfig +from typing import Optional, Set +from typing_extensions import Self + +class AzureBlobStorage1(BaseModel): + """ + AzureBlobStorage1 + """ # noqa: E501 + config: Optional[AZUREBLOBConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AzureBlobStorage1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AzureBlobStorage1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": AZUREBLOBConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/azure_blob_storage2.py b/vectorize_client/models/azure_blob_storage2.py new file mode 100644 index 0000000..486f1ba --- /dev/null +++ b/vectorize_client/models/azure_blob_storage2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class AzureBlobStorage2(BaseModel): + """ + AzureBlobStorage2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"AZURE_BLOB\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['AZURE_BLOB']): + raise ValueError("must be one of enum values ('AZURE_BLOB')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AzureBlobStorage2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AzureBlobStorage2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/azureaisearch.py b/vectorize_client/models/azureaisearch.py new file mode 100644 index 0000000..aaadead --- /dev/null +++ b/vectorize_client/models/azureaisearch.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.azureaisearch_config import AZUREAISEARCHConfig +from typing import Optional, Set +from typing_extensions import Self + +class Azureaisearch(BaseModel): + """ + Azureaisearch + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"AZUREAISEARCH\")") + config: AZUREAISEARCHConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['AZUREAISEARCH']): + raise ValueError("must be one of enum values ('AZUREAISEARCH')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Azureaisearch from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Azureaisearch from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": AZUREAISEARCHConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/azureaisearch1.py b/vectorize_client/models/azureaisearch1.py new file mode 100644 index 0000000..bd000c5 --- /dev/null +++ b/vectorize_client/models/azureaisearch1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.azureaisearch_config import AZUREAISEARCHConfig +from typing import Optional, Set +from typing_extensions import Self + +class Azureaisearch1(BaseModel): + """ + Azureaisearch1 + """ # noqa: E501 + config: Optional[AZUREAISEARCHConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Azureaisearch1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Azureaisearch1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": AZUREAISEARCHConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/azureaisearch2.py b/vectorize_client/models/azureaisearch2.py new file mode 100644 index 0000000..518e179 --- /dev/null +++ b/vectorize_client/models/azureaisearch2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Azureaisearch2(BaseModel): + """ + Azureaisearch2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"AZUREAISEARCH\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['AZUREAISEARCH']): + raise ValueError("must be one of enum values ('AZUREAISEARCH')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Azureaisearch2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Azureaisearch2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/azureaisearch_auth_config.py b/vectorize_client/models/azureaisearch_auth_config.py new file mode 100644 index 0000000..1f80237 --- /dev/null +++ b/vectorize_client/models/azureaisearch_auth_config.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class AZUREAISEARCHAuthConfig(BaseModel): + """ + Authentication configuration for Azure AI Search + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for your Azure AI Search integration") + service_name: StrictStr = Field(description="Azure AI Search Service Name. Example: Enter your Azure AI Search service name", alias="service-name") + api_key: Annotated[str, Field(strict=True)] = Field(description="API Key. Example: Enter your API key", alias="api-key") + __properties: ClassVar[List[str]] = ["name", "service-name", "api-key"] + + @field_validator('api_key') + def api_key_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AZUREAISEARCHAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AZUREAISEARCHAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "service-name": obj.get("service-name"), + "api-key": obj.get("api-key") + }) + return _obj + + diff --git a/vectorize_client/models/azureaisearch_config.py b/vectorize_client/models/azureaisearch_config.py new file mode 100644 index 0000000..a4197a9 --- /dev/null +++ b/vectorize_client/models/azureaisearch_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class AZUREAISEARCHConfig(BaseModel): + """ + Configuration for Azure AI Search connector + """ # noqa: E501 + index: Annotated[str, Field(strict=True)] = Field(description="Index Name. Example: Enter index name") + __properties: ClassVar[List[str]] = ["index"] + + @field_validator('index') + def index_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^[a-z0-9][a-z0-9-]*[a-z0-9]$", value): + raise ValueError(r"must validate the regular expression /^[a-z0-9][a-z0-9-]*[a-z0-9]$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AZUREAISEARCHConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AZUREAISEARCHConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "index": obj.get("index") + }) + return _obj + + diff --git a/vectorize_client/models/azureblob_auth_config.py b/vectorize_client/models/azureblob_auth_config.py new file mode 100644 index 0000000..9a02590 --- /dev/null +++ b/vectorize_client/models/azureblob_auth_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class AZUREBLOBAuthConfig(BaseModel): + """ + Authentication configuration for Azure Blob Storage + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + storage_account_name: StrictStr = Field(description="Storage Account Name. Example: Enter Storage Account Name", alias="storage-account-name") + storage_account_key: StrictStr = Field(description="Storage Account Key. Example: Enter Storage Account Key", alias="storage-account-key") + container: StrictStr = Field(description="Container. Example: Enter Container Name") + endpoint: Optional[StrictStr] = Field(default=None, description="Endpoint. Example: Enter Endpoint URL") + __properties: ClassVar[List[str]] = ["name", "storage-account-name", "storage-account-key", "container", "endpoint"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AZUREBLOBAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AZUREBLOBAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "storage-account-name": obj.get("storage-account-name"), + "storage-account-key": obj.get("storage-account-key"), + "container": obj.get("container"), + "endpoint": obj.get("endpoint") + }) + return _obj + + diff --git a/vectorize_client/models/azureblob_config.py b/vectorize_client/models/azureblob_config.py new file mode 100644 index 0000000..26e1be0 --- /dev/null +++ b/vectorize_client/models/azureblob_config.py @@ -0,0 +1,106 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class AZUREBLOBConfig(BaseModel): + """ + Configuration for Azure Blob Storage connector + """ # noqa: E501 + file_extensions: List[StrictStr] = Field(description="File Extensions", alias="file-extensions") + idle_time: Union[Annotated[float, Field(strict=True, ge=1)], Annotated[int, Field(strict=True, ge=1)]] = Field(description="Polling Interval (seconds)", alias="idle-time") + recursive: Optional[StrictBool] = Field(default=None, description="Recursively scan all folders in the bucket") + path_prefix: Optional[StrictStr] = Field(default=None, description="Path Prefix", alias="path-prefix") + path_metadata_regex: Optional[StrictStr] = Field(default=None, description="Path Metadata Regex", alias="path-metadata-regex") + path_regex_group_names: Optional[StrictStr] = Field(default=None, description="Path Regex Group Names. Example: Enter Group Name", alias="path-regex-group-names") + __properties: ClassVar[List[str]] = ["file-extensions", "idle-time", "recursive", "path-prefix", "path-metadata-regex", "path-regex-group-names"] + + @field_validator('file_extensions') + def file_extensions_validate_enum(cls, value): + """Validates the enum""" + for i in value: + if i not in set([]): + raise ValueError("each list item must be one of ()") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AZUREBLOBConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AZUREBLOBConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "file-extensions": obj.get("file-extensions"), + "idle-time": obj.get("idle-time") if obj.get("idle-time") is not None else 5, + "recursive": obj.get("recursive"), + "path-prefix": obj.get("path-prefix"), + "path-metadata-regex": obj.get("path-metadata-regex"), + "path-regex-group-names": obj.get("path-regex-group-names") + }) + return _obj + + diff --git a/vectorize_client/models/bedrock.py b/vectorize_client/models/bedrock.py new file mode 100644 index 0000000..5151e94 --- /dev/null +++ b/vectorize_client/models/bedrock.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.bedrock_auth_config import BEDROCKAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class Bedrock(BaseModel): + """ + Bedrock + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"BEDROCK\")") + config: BEDROCKAuthConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['BEDROCK']): + raise ValueError("must be one of enum values ('BEDROCK')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Bedrock from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Bedrock from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": BEDROCKAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/bedrock1.py b/vectorize_client/models/bedrock1.py new file mode 100644 index 0000000..81d0314 --- /dev/null +++ b/vectorize_client/models/bedrock1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.bedrock_auth_config import BEDROCKAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class Bedrock1(BaseModel): + """ + Bedrock1 + """ # noqa: E501 + config: Optional[BEDROCKAuthConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Bedrock1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Bedrock1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": BEDROCKAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/bedrock2.py b/vectorize_client/models/bedrock2.py new file mode 100644 index 0000000..359a55d --- /dev/null +++ b/vectorize_client/models/bedrock2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Bedrock2(BaseModel): + """ + Bedrock2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"BEDROCK\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['BEDROCK']): + raise ValueError("must be one of enum values ('BEDROCK')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Bedrock2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Bedrock2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/bedrock_auth_config.py b/vectorize_client/models/bedrock_auth_config.py new file mode 100644 index 0000000..cb8c8d5 --- /dev/null +++ b/vectorize_client/models/bedrock_auth_config.py @@ -0,0 +1,108 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class BEDROCKAuthConfig(BaseModel): + """ + Authentication configuration for Amazon Bedrock + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for your Amazon Bedrock integration") + access_key: Annotated[str, Field(strict=True)] = Field(description="Access Key. Example: Enter your Amazon Bedrock Access Key", alias="access-key") + key: Annotated[str, Field(strict=True)] = Field(description="Secret Key. Example: Enter your Amazon Bedrock Secret Key") + region: StrictStr = Field(description="Region. Example: Region Name") + __properties: ClassVar[List[str]] = ["name", "access-key", "key", "region"] + + @field_validator('access_key') + def access_key_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + @field_validator('key') + def key_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of BEDROCKAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of BEDROCKAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "access-key": obj.get("access-key"), + "key": obj.get("key"), + "region": obj.get("region") + }) + return _obj + + diff --git a/vectorize_client/models/capella.py b/vectorize_client/models/capella.py new file mode 100644 index 0000000..a635d69 --- /dev/null +++ b/vectorize_client/models/capella.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.capella_config import CAPELLAConfig +from typing import Optional, Set +from typing_extensions import Self + +class Capella(BaseModel): + """ + Capella + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"CAPELLA\")") + config: CAPELLAConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['CAPELLA']): + raise ValueError("must be one of enum values ('CAPELLA')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Capella from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Capella from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": CAPELLAConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/capella1.py b/vectorize_client/models/capella1.py new file mode 100644 index 0000000..288f2e5 --- /dev/null +++ b/vectorize_client/models/capella1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.capella_config import CAPELLAConfig +from typing import Optional, Set +from typing_extensions import Self + +class Capella1(BaseModel): + """ + Capella1 + """ # noqa: E501 + config: Optional[CAPELLAConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Capella1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Capella1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": CAPELLAConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/capella2.py b/vectorize_client/models/capella2.py new file mode 100644 index 0000000..77eea69 --- /dev/null +++ b/vectorize_client/models/capella2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Capella2(BaseModel): + """ + Capella2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"CAPELLA\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['CAPELLA']): + raise ValueError("must be one of enum values ('CAPELLA')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Capella2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Capella2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/capella_auth_config.py b/vectorize_client/models/capella_auth_config.py new file mode 100644 index 0000000..d50a71a --- /dev/null +++ b/vectorize_client/models/capella_auth_config.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class CAPELLAAuthConfig(BaseModel): + """ + Authentication configuration for Couchbase Capella + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for your Capella integration") + username: StrictStr = Field(description="Cluster Access Name. Example: Enter your cluster access name") + password: StrictStr = Field(description="Cluster Access Password. Example: Enter your cluster access password") + connection_string: StrictStr = Field(description="Connection String. Example: Enter your connection string", alias="connection-string") + __properties: ClassVar[List[str]] = ["name", "username", "password", "connection-string"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CAPELLAAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CAPELLAAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "username": obj.get("username"), + "password": obj.get("password"), + "connection-string": obj.get("connection-string") + }) + return _obj + + diff --git a/vectorize_client/models/capella_config.py b/vectorize_client/models/capella_config.py new file mode 100644 index 0000000..8d12e54 --- /dev/null +++ b/vectorize_client/models/capella_config.py @@ -0,0 +1,94 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class CAPELLAConfig(BaseModel): + """ + Configuration for Couchbase Capella connector + """ # noqa: E501 + bucket: StrictStr = Field(description="Bucket Name. Example: Enter bucket name") + scope: StrictStr = Field(description="Scope Name. Example: Enter scope name") + collection: StrictStr = Field(description="Collection Name. Example: Enter collection name") + index: Annotated[str, Field(strict=True, max_length=255)] = Field(description="Search Index Name. Example: Enter search index name") + __properties: ClassVar[List[str]] = ["bucket", "scope", "collection", "index"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CAPELLAConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CAPELLAConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "bucket": obj.get("bucket"), + "scope": obj.get("scope"), + "collection": obj.get("collection"), + "index": obj.get("index") + }) + return _obj + + diff --git a/vectorize_client/models/confluence.py b/vectorize_client/models/confluence.py new file mode 100644 index 0000000..ba4b17d --- /dev/null +++ b/vectorize_client/models/confluence.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.confluence_config import CONFLUENCEConfig +from typing import Optional, Set +from typing_extensions import Self + +class Confluence(BaseModel): + """ + Confluence + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"CONFLUENCE\")") + config: CONFLUENCEConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['CONFLUENCE']): + raise ValueError("must be one of enum values ('CONFLUENCE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Confluence from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Confluence from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": CONFLUENCEConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/confluence1.py b/vectorize_client/models/confluence1.py new file mode 100644 index 0000000..c908a5c --- /dev/null +++ b/vectorize_client/models/confluence1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.confluence_config import CONFLUENCEConfig +from typing import Optional, Set +from typing_extensions import Self + +class Confluence1(BaseModel): + """ + Confluence1 + """ # noqa: E501 + config: Optional[CONFLUENCEConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Confluence1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Confluence1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": CONFLUENCEConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/confluence2.py b/vectorize_client/models/confluence2.py new file mode 100644 index 0000000..36a95bf --- /dev/null +++ b/vectorize_client/models/confluence2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Confluence2(BaseModel): + """ + Confluence2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"CONFLUENCE\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['CONFLUENCE']): + raise ValueError("must be one of enum values ('CONFLUENCE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Confluence2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Confluence2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/confluence_auth_config.py b/vectorize_client/models/confluence_auth_config.py new file mode 100644 index 0000000..2d76be1 --- /dev/null +++ b/vectorize_client/models/confluence_auth_config.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class CONFLUENCEAuthConfig(BaseModel): + """ + Authentication configuration for Confluence + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + username: StrictStr = Field(description="Username. Example: Enter your Confluence username") + api_token: Annotated[str, Field(strict=True)] = Field(description="API Token. Example: Enter your Confluence API token", alias="api-token") + domain: StrictStr = Field(description="Domain. Example: Enter your Confluence domain (e.g. my-domain.atlassian.net or confluence..com)") + __properties: ClassVar[List[str]] = ["name", "username", "api-token", "domain"] + + @field_validator('api_token') + def api_token_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CONFLUENCEAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CONFLUENCEAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "username": obj.get("username"), + "api-token": obj.get("api-token"), + "domain": obj.get("domain") + }) + return _obj + + diff --git a/vectorize_client/models/confluence_config.py b/vectorize_client/models/confluence_config.py new file mode 100644 index 0000000..db9bb6b --- /dev/null +++ b/vectorize_client/models/confluence_config.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class CONFLUENCEConfig(BaseModel): + """ + Configuration for Confluence connector + """ # noqa: E501 + spaces: StrictStr = Field(description="Spaces. Example: Spaces to include (name, key or id)") + root_parents: Optional[StrictStr] = Field(default=None, description="Root Parents. Example: Enter root parent pages", alias="root-parents") + __properties: ClassVar[List[str]] = ["spaces", "root-parents"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CONFLUENCEConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CONFLUENCEConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "spaces": obj.get("spaces"), + "root-parents": obj.get("root-parents") + }) + return _obj + + diff --git a/src/python/vectorize_client/models/create_ai_platform_connector.py b/vectorize_client/models/create_ai_platform_connector.py similarity index 100% rename from src/python/vectorize_client/models/create_ai_platform_connector.py rename to vectorize_client/models/create_ai_platform_connector.py diff --git a/vectorize_client/models/create_ai_platform_connector_request_inner.py b/vectorize_client/models/create_ai_platform_connector_request_inner.py new file mode 100644 index 0000000..1aa5d7f --- /dev/null +++ b/vectorize_client/models/create_ai_platform_connector_request_inner.py @@ -0,0 +1,165 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from vectorize_client.models.bedrock import Bedrock +from vectorize_client.models.openai import Openai +from vectorize_client.models.vertex import Vertex +from vectorize_client.models.voyage import Voyage +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +CREATEAIPLATFORMCONNECTORREQUESTINNER_ONE_OF_SCHEMAS = ["Bedrock", "Openai", "Vertex", "Voyage"] + +class CreateAIPlatformConnectorRequestInner(BaseModel): + """ + CreateAIPlatformConnectorRequestInner + """ + # data type: Bedrock + oneof_schema_1_validator: Optional[Bedrock] = None + # data type: Vertex + oneof_schema_2_validator: Optional[Vertex] = None + # data type: Openai + oneof_schema_3_validator: Optional[Openai] = None + # data type: Voyage + oneof_schema_4_validator: Optional[Voyage] = None + actual_instance: Optional[Union[Bedrock, Openai, Vertex, Voyage]] = None + one_of_schemas: Set[str] = { "Bedrock", "Openai", "Vertex", "Voyage" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = CreateAIPlatformConnectorRequestInner.model_construct() + error_messages = [] + match = 0 + # validate data type: Bedrock + if not isinstance(v, Bedrock): + error_messages.append(f"Error! Input type `{type(v)}` is not `Bedrock`") + else: + match += 1 + # validate data type: Vertex + if not isinstance(v, Vertex): + error_messages.append(f"Error! Input type `{type(v)}` is not `Vertex`") + else: + match += 1 + # validate data type: Openai + if not isinstance(v, Openai): + error_messages.append(f"Error! Input type `{type(v)}` is not `Openai`") + else: + match += 1 + # validate data type: Voyage + if not isinstance(v, Voyage): + error_messages.append(f"Error! Input type `{type(v)}` is not `Voyage`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in CreateAIPlatformConnectorRequestInner with oneOf schemas: Bedrock, Openai, Vertex, Voyage. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in CreateAIPlatformConnectorRequestInner with oneOf schemas: Bedrock, Openai, Vertex, Voyage. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into Bedrock + try: + instance.actual_instance = Bedrock.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Vertex + try: + instance.actual_instance = Vertex.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Openai + try: + instance.actual_instance = Openai.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Voyage + try: + instance.actual_instance = Voyage.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into CreateAIPlatformConnectorRequestInner with oneOf schemas: Bedrock, Openai, Vertex, Voyage. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into CreateAIPlatformConnectorRequestInner with oneOf schemas: Bedrock, Openai, Vertex, Voyage. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], Bedrock, Openai, Vertex, Voyage]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) + + diff --git a/src/python/vectorize_client/models/create_ai_platform_connector_response.py b/vectorize_client/models/create_ai_platform_connector_response.py similarity index 100% rename from src/python/vectorize_client/models/create_ai_platform_connector_response.py rename to vectorize_client/models/create_ai_platform_connector_response.py diff --git a/src/python/vectorize_client/models/create_destination_connector.py b/vectorize_client/models/create_destination_connector.py similarity index 100% rename from src/python/vectorize_client/models/create_destination_connector.py rename to vectorize_client/models/create_destination_connector.py diff --git a/vectorize_client/models/create_destination_connector_request_inner.py b/vectorize_client/models/create_destination_connector_request_inner.py new file mode 100644 index 0000000..888c8ed --- /dev/null +++ b/vectorize_client/models/create_destination_connector_request_inner.py @@ -0,0 +1,277 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from vectorize_client.models.azureaisearch import Azureaisearch +from vectorize_client.models.capella import Capella +from vectorize_client.models.datastax import Datastax +from vectorize_client.models.elastic import Elastic +from vectorize_client.models.milvus import Milvus +from vectorize_client.models.pinecone import Pinecone +from vectorize_client.models.postgresql import Postgresql +from vectorize_client.models.qdrant import Qdrant +from vectorize_client.models.singlestore import Singlestore +from vectorize_client.models.supabase import Supabase +from vectorize_client.models.turbopuffer import Turbopuffer +from vectorize_client.models.weaviate import Weaviate +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +CREATEDESTINATIONCONNECTORREQUESTINNER_ONE_OF_SCHEMAS = ["Azureaisearch", "Capella", "Datastax", "Elastic", "Milvus", "Pinecone", "Postgresql", "Qdrant", "Singlestore", "Supabase", "Turbopuffer", "Weaviate"] + +class CreateDestinationConnectorRequestInner(BaseModel): + """ + CreateDestinationConnectorRequestInner + """ + # data type: Capella + oneof_schema_1_validator: Optional[Capella] = None + # data type: Datastax + oneof_schema_2_validator: Optional[Datastax] = None + # data type: Elastic + oneof_schema_3_validator: Optional[Elastic] = None + # data type: Pinecone + oneof_schema_4_validator: Optional[Pinecone] = None + # data type: Singlestore + oneof_schema_5_validator: Optional[Singlestore] = None + # data type: Milvus + oneof_schema_6_validator: Optional[Milvus] = None + # data type: Postgresql + oneof_schema_7_validator: Optional[Postgresql] = None + # data type: Qdrant + oneof_schema_8_validator: Optional[Qdrant] = None + # data type: Supabase + oneof_schema_9_validator: Optional[Supabase] = None + # data type: Weaviate + oneof_schema_10_validator: Optional[Weaviate] = None + # data type: Azureaisearch + oneof_schema_11_validator: Optional[Azureaisearch] = None + # data type: Turbopuffer + oneof_schema_12_validator: Optional[Turbopuffer] = None + actual_instance: Optional[Union[Azureaisearch, Capella, Datastax, Elastic, Milvus, Pinecone, Postgresql, Qdrant, Singlestore, Supabase, Turbopuffer, Weaviate]] = None + one_of_schemas: Set[str] = { "Azureaisearch", "Capella", "Datastax", "Elastic", "Milvus", "Pinecone", "Postgresql", "Qdrant", "Singlestore", "Supabase", "Turbopuffer", "Weaviate" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = CreateDestinationConnectorRequestInner.model_construct() + error_messages = [] + match = 0 + # validate data type: Capella + if not isinstance(v, Capella): + error_messages.append(f"Error! Input type `{type(v)}` is not `Capella`") + else: + match += 1 + # validate data type: Datastax + if not isinstance(v, Datastax): + error_messages.append(f"Error! Input type `{type(v)}` is not `Datastax`") + else: + match += 1 + # validate data type: Elastic + if not isinstance(v, Elastic): + error_messages.append(f"Error! Input type `{type(v)}` is not `Elastic`") + else: + match += 1 + # validate data type: Pinecone + if not isinstance(v, Pinecone): + error_messages.append(f"Error! Input type `{type(v)}` is not `Pinecone`") + else: + match += 1 + # validate data type: Singlestore + if not isinstance(v, Singlestore): + error_messages.append(f"Error! Input type `{type(v)}` is not `Singlestore`") + else: + match += 1 + # validate data type: Milvus + if not isinstance(v, Milvus): + error_messages.append(f"Error! Input type `{type(v)}` is not `Milvus`") + else: + match += 1 + # validate data type: Postgresql + if not isinstance(v, Postgresql): + error_messages.append(f"Error! Input type `{type(v)}` is not `Postgresql`") + else: + match += 1 + # validate data type: Qdrant + if not isinstance(v, Qdrant): + error_messages.append(f"Error! Input type `{type(v)}` is not `Qdrant`") + else: + match += 1 + # validate data type: Supabase + if not isinstance(v, Supabase): + error_messages.append(f"Error! Input type `{type(v)}` is not `Supabase`") + else: + match += 1 + # validate data type: Weaviate + if not isinstance(v, Weaviate): + error_messages.append(f"Error! Input type `{type(v)}` is not `Weaviate`") + else: + match += 1 + # validate data type: Azureaisearch + if not isinstance(v, Azureaisearch): + error_messages.append(f"Error! Input type `{type(v)}` is not `Azureaisearch`") + else: + match += 1 + # validate data type: Turbopuffer + if not isinstance(v, Turbopuffer): + error_messages.append(f"Error! Input type `{type(v)}` is not `Turbopuffer`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in CreateDestinationConnectorRequestInner with oneOf schemas: Azureaisearch, Capella, Datastax, Elastic, Milvus, Pinecone, Postgresql, Qdrant, Singlestore, Supabase, Turbopuffer, Weaviate. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in CreateDestinationConnectorRequestInner with oneOf schemas: Azureaisearch, Capella, Datastax, Elastic, Milvus, Pinecone, Postgresql, Qdrant, Singlestore, Supabase, Turbopuffer, Weaviate. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into Capella + try: + instance.actual_instance = Capella.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Datastax + try: + instance.actual_instance = Datastax.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Elastic + try: + instance.actual_instance = Elastic.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Pinecone + try: + instance.actual_instance = Pinecone.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Singlestore + try: + instance.actual_instance = Singlestore.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Milvus + try: + instance.actual_instance = Milvus.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Postgresql + try: + instance.actual_instance = Postgresql.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Qdrant + try: + instance.actual_instance = Qdrant.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Supabase + try: + instance.actual_instance = Supabase.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Weaviate + try: + instance.actual_instance = Weaviate.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Azureaisearch + try: + instance.actual_instance = Azureaisearch.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Turbopuffer + try: + instance.actual_instance = Turbopuffer.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into CreateDestinationConnectorRequestInner with oneOf schemas: Azureaisearch, Capella, Datastax, Elastic, Milvus, Pinecone, Postgresql, Qdrant, Singlestore, Supabase, Turbopuffer, Weaviate. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into CreateDestinationConnectorRequestInner with oneOf schemas: Azureaisearch, Capella, Datastax, Elastic, Milvus, Pinecone, Postgresql, Qdrant, Singlestore, Supabase, Turbopuffer, Weaviate. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], Azureaisearch, Capella, Datastax, Elastic, Milvus, Pinecone, Postgresql, Qdrant, Singlestore, Supabase, Turbopuffer, Weaviate]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) + + diff --git a/src/python/vectorize_client/models/create_destination_connector_response.py b/vectorize_client/models/create_destination_connector_response.py similarity index 100% rename from src/python/vectorize_client/models/create_destination_connector_response.py rename to vectorize_client/models/create_destination_connector_response.py diff --git a/src/python/vectorize_client/models/create_pipeline_response.py b/vectorize_client/models/create_pipeline_response.py similarity index 100% rename from src/python/vectorize_client/models/create_pipeline_response.py rename to vectorize_client/models/create_pipeline_response.py diff --git a/src/python/vectorize_client/models/create_pipeline_response_data.py b/vectorize_client/models/create_pipeline_response_data.py similarity index 100% rename from src/python/vectorize_client/models/create_pipeline_response_data.py rename to vectorize_client/models/create_pipeline_response_data.py diff --git a/src/python/vectorize_client/models/create_source_connector.py b/vectorize_client/models/create_source_connector.py similarity index 100% rename from src/python/vectorize_client/models/create_source_connector.py rename to vectorize_client/models/create_source_connector.py diff --git a/vectorize_client/models/create_source_connector_request_inner.py b/vectorize_client/models/create_source_connector_request_inner.py new file mode 100644 index 0000000..d3ac177 --- /dev/null +++ b/vectorize_client/models/create_source_connector_request_inner.py @@ -0,0 +1,445 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from vectorize_client.models.amazon_s3 import AmazonS3 +from vectorize_client.models.azure_blob_storage import AzureBlobStorage +from vectorize_client.models.confluence import Confluence +from vectorize_client.models.discord import Discord +from vectorize_client.models.dropbox import Dropbox +from vectorize_client.models.dropbox_oauth import DropboxOauth +from vectorize_client.models.dropbox_oauth_multi import DropboxOauthMulti +from vectorize_client.models.dropbox_oauth_multi_custom import DropboxOauthMultiCustom +from vectorize_client.models.file_upload import FileUpload +from vectorize_client.models.firecrawl import Firecrawl +from vectorize_client.models.fireflies import Fireflies +from vectorize_client.models.github import Github +from vectorize_client.models.google_cloud_storage import GoogleCloudStorage +from vectorize_client.models.google_drive import GoogleDrive +from vectorize_client.models.google_drive_o_auth import GoogleDriveOAuth +from vectorize_client.models.google_drive_oauth_multi import GoogleDriveOauthMulti +from vectorize_client.models.google_drive_oauth_multi_custom import GoogleDriveOauthMultiCustom +from vectorize_client.models.intercom import Intercom +from vectorize_client.models.notion import Notion +from vectorize_client.models.notion_oauth_multi import NotionOauthMulti +from vectorize_client.models.notion_oauth_multi_custom import NotionOauthMultiCustom +from vectorize_client.models.one_drive import OneDrive +from vectorize_client.models.sharepoint import Sharepoint +from vectorize_client.models.web_crawler import WebCrawler +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +CREATESOURCECONNECTORREQUESTINNER_ONE_OF_SCHEMAS = ["AmazonS3", "AzureBlobStorage", "Confluence", "Discord", "Dropbox", "DropboxOauth", "DropboxOauthMulti", "DropboxOauthMultiCustom", "FileUpload", "Firecrawl", "Fireflies", "Github", "GoogleCloudStorage", "GoogleDrive", "GoogleDriveOAuth", "GoogleDriveOauthMulti", "GoogleDriveOauthMultiCustom", "Intercom", "Notion", "NotionOauthMulti", "NotionOauthMultiCustom", "OneDrive", "Sharepoint", "WebCrawler"] + +class CreateSourceConnectorRequestInner(BaseModel): + """ + CreateSourceConnectorRequestInner + """ + # data type: AmazonS3 + oneof_schema_1_validator: Optional[AmazonS3] = None + # data type: AzureBlobStorage + oneof_schema_2_validator: Optional[AzureBlobStorage] = None + # data type: Confluence + oneof_schema_3_validator: Optional[Confluence] = None + # data type: Discord + oneof_schema_4_validator: Optional[Discord] = None + # data type: Dropbox + oneof_schema_5_validator: Optional[Dropbox] = None + # data type: DropboxOauth + oneof_schema_6_validator: Optional[DropboxOauth] = None + # data type: DropboxOauthMulti + oneof_schema_7_validator: Optional[DropboxOauthMulti] = None + # data type: DropboxOauthMultiCustom + oneof_schema_8_validator: Optional[DropboxOauthMultiCustom] = None + # data type: GoogleDriveOAuth + oneof_schema_9_validator: Optional[GoogleDriveOAuth] = None + # data type: GoogleDrive + oneof_schema_10_validator: Optional[GoogleDrive] = None + # data type: GoogleDriveOauthMulti + oneof_schema_11_validator: Optional[GoogleDriveOauthMulti] = None + # data type: GoogleDriveOauthMultiCustom + oneof_schema_12_validator: Optional[GoogleDriveOauthMultiCustom] = None + # data type: Firecrawl + oneof_schema_13_validator: Optional[Firecrawl] = None + # data type: GoogleCloudStorage + oneof_schema_14_validator: Optional[GoogleCloudStorage] = None + # data type: Intercom + oneof_schema_15_validator: Optional[Intercom] = None + # data type: Notion + oneof_schema_16_validator: Optional[Notion] = None + # data type: NotionOauthMulti + oneof_schema_17_validator: Optional[NotionOauthMulti] = None + # data type: NotionOauthMultiCustom + oneof_schema_18_validator: Optional[NotionOauthMultiCustom] = None + # data type: OneDrive + oneof_schema_19_validator: Optional[OneDrive] = None + # data type: Sharepoint + oneof_schema_20_validator: Optional[Sharepoint] = None + # data type: WebCrawler + oneof_schema_21_validator: Optional[WebCrawler] = None + # data type: FileUpload + oneof_schema_22_validator: Optional[FileUpload] = None + # data type: Github + oneof_schema_23_validator: Optional[Github] = None + # data type: Fireflies + oneof_schema_24_validator: Optional[Fireflies] = None + actual_instance: Optional[Union[AmazonS3, AzureBlobStorage, Confluence, Discord, Dropbox, DropboxOauth, DropboxOauthMulti, DropboxOauthMultiCustom, FileUpload, Firecrawl, Fireflies, Github, GoogleCloudStorage, GoogleDrive, GoogleDriveOAuth, GoogleDriveOauthMulti, GoogleDriveOauthMultiCustom, Intercom, Notion, NotionOauthMulti, NotionOauthMultiCustom, OneDrive, Sharepoint, WebCrawler]] = None + one_of_schemas: Set[str] = { "AmazonS3", "AzureBlobStorage", "Confluence", "Discord", "Dropbox", "DropboxOauth", "DropboxOauthMulti", "DropboxOauthMultiCustom", "FileUpload", "Firecrawl", "Fireflies", "Github", "GoogleCloudStorage", "GoogleDrive", "GoogleDriveOAuth", "GoogleDriveOauthMulti", "GoogleDriveOauthMultiCustom", "Intercom", "Notion", "NotionOauthMulti", "NotionOauthMultiCustom", "OneDrive", "Sharepoint", "WebCrawler" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = CreateSourceConnectorRequestInner.model_construct() + error_messages = [] + match = 0 + # validate data type: AmazonS3 + if not isinstance(v, AmazonS3): + error_messages.append(f"Error! Input type `{type(v)}` is not `AmazonS3`") + else: + match += 1 + # validate data type: AzureBlobStorage + if not isinstance(v, AzureBlobStorage): + error_messages.append(f"Error! Input type `{type(v)}` is not `AzureBlobStorage`") + else: + match += 1 + # validate data type: Confluence + if not isinstance(v, Confluence): + error_messages.append(f"Error! Input type `{type(v)}` is not `Confluence`") + else: + match += 1 + # validate data type: Discord + if not isinstance(v, Discord): + error_messages.append(f"Error! Input type `{type(v)}` is not `Discord`") + else: + match += 1 + # validate data type: Dropbox + if not isinstance(v, Dropbox): + error_messages.append(f"Error! Input type `{type(v)}` is not `Dropbox`") + else: + match += 1 + # validate data type: DropboxOauth + if not isinstance(v, DropboxOauth): + error_messages.append(f"Error! Input type `{type(v)}` is not `DropboxOauth`") + else: + match += 1 + # validate data type: DropboxOauthMulti + if not isinstance(v, DropboxOauthMulti): + error_messages.append(f"Error! Input type `{type(v)}` is not `DropboxOauthMulti`") + else: + match += 1 + # validate data type: DropboxOauthMultiCustom + if not isinstance(v, DropboxOauthMultiCustom): + error_messages.append(f"Error! Input type `{type(v)}` is not `DropboxOauthMultiCustom`") + else: + match += 1 + # validate data type: GoogleDriveOAuth + if not isinstance(v, GoogleDriveOAuth): + error_messages.append(f"Error! Input type `{type(v)}` is not `GoogleDriveOAuth`") + else: + match += 1 + # validate data type: GoogleDrive + if not isinstance(v, GoogleDrive): + error_messages.append(f"Error! Input type `{type(v)}` is not `GoogleDrive`") + else: + match += 1 + # validate data type: GoogleDriveOauthMulti + if not isinstance(v, GoogleDriveOauthMulti): + error_messages.append(f"Error! Input type `{type(v)}` is not `GoogleDriveOauthMulti`") + else: + match += 1 + # validate data type: GoogleDriveOauthMultiCustom + if not isinstance(v, GoogleDriveOauthMultiCustom): + error_messages.append(f"Error! Input type `{type(v)}` is not `GoogleDriveOauthMultiCustom`") + else: + match += 1 + # validate data type: Firecrawl + if not isinstance(v, Firecrawl): + error_messages.append(f"Error! Input type `{type(v)}` is not `Firecrawl`") + else: + match += 1 + # validate data type: GoogleCloudStorage + if not isinstance(v, GoogleCloudStorage): + error_messages.append(f"Error! Input type `{type(v)}` is not `GoogleCloudStorage`") + else: + match += 1 + # validate data type: Intercom + if not isinstance(v, Intercom): + error_messages.append(f"Error! Input type `{type(v)}` is not `Intercom`") + else: + match += 1 + # validate data type: Notion + if not isinstance(v, Notion): + error_messages.append(f"Error! Input type `{type(v)}` is not `Notion`") + else: + match += 1 + # validate data type: NotionOauthMulti + if not isinstance(v, NotionOauthMulti): + error_messages.append(f"Error! Input type `{type(v)}` is not `NotionOauthMulti`") + else: + match += 1 + # validate data type: NotionOauthMultiCustom + if not isinstance(v, NotionOauthMultiCustom): + error_messages.append(f"Error! Input type `{type(v)}` is not `NotionOauthMultiCustom`") + else: + match += 1 + # validate data type: OneDrive + if not isinstance(v, OneDrive): + error_messages.append(f"Error! Input type `{type(v)}` is not `OneDrive`") + else: + match += 1 + # validate data type: Sharepoint + if not isinstance(v, Sharepoint): + error_messages.append(f"Error! Input type `{type(v)}` is not `Sharepoint`") + else: + match += 1 + # validate data type: WebCrawler + if not isinstance(v, WebCrawler): + error_messages.append(f"Error! Input type `{type(v)}` is not `WebCrawler`") + else: + match += 1 + # validate data type: FileUpload + if not isinstance(v, FileUpload): + error_messages.append(f"Error! Input type `{type(v)}` is not `FileUpload`") + else: + match += 1 + # validate data type: Github + if not isinstance(v, Github): + error_messages.append(f"Error! Input type `{type(v)}` is not `Github`") + else: + match += 1 + # validate data type: Fireflies + if not isinstance(v, Fireflies): + error_messages.append(f"Error! Input type `{type(v)}` is not `Fireflies`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in CreateSourceConnectorRequestInner with oneOf schemas: AmazonS3, AzureBlobStorage, Confluence, Discord, Dropbox, DropboxOauth, DropboxOauthMulti, DropboxOauthMultiCustom, FileUpload, Firecrawl, Fireflies, Github, GoogleCloudStorage, GoogleDrive, GoogleDriveOAuth, GoogleDriveOauthMulti, GoogleDriveOauthMultiCustom, Intercom, Notion, NotionOauthMulti, NotionOauthMultiCustom, OneDrive, Sharepoint, WebCrawler. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in CreateSourceConnectorRequestInner with oneOf schemas: AmazonS3, AzureBlobStorage, Confluence, Discord, Dropbox, DropboxOauth, DropboxOauthMulti, DropboxOauthMultiCustom, FileUpload, Firecrawl, Fireflies, Github, GoogleCloudStorage, GoogleDrive, GoogleDriveOAuth, GoogleDriveOauthMulti, GoogleDriveOauthMultiCustom, Intercom, Notion, NotionOauthMulti, NotionOauthMultiCustom, OneDrive, Sharepoint, WebCrawler. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into AmazonS3 + try: + instance.actual_instance = AmazonS3.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into AzureBlobStorage + try: + instance.actual_instance = AzureBlobStorage.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Confluence + try: + instance.actual_instance = Confluence.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Discord + try: + instance.actual_instance = Discord.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Dropbox + try: + instance.actual_instance = Dropbox.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into DropboxOauth + try: + instance.actual_instance = DropboxOauth.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into DropboxOauthMulti + try: + instance.actual_instance = DropboxOauthMulti.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into DropboxOauthMultiCustom + try: + instance.actual_instance = DropboxOauthMultiCustom.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GoogleDriveOAuth + try: + instance.actual_instance = GoogleDriveOAuth.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GoogleDrive + try: + instance.actual_instance = GoogleDrive.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GoogleDriveOauthMulti + try: + instance.actual_instance = GoogleDriveOauthMulti.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GoogleDriveOauthMultiCustom + try: + instance.actual_instance = GoogleDriveOauthMultiCustom.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Firecrawl + try: + instance.actual_instance = Firecrawl.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GoogleCloudStorage + try: + instance.actual_instance = GoogleCloudStorage.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Intercom + try: + instance.actual_instance = Intercom.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Notion + try: + instance.actual_instance = Notion.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into NotionOauthMulti + try: + instance.actual_instance = NotionOauthMulti.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into NotionOauthMultiCustom + try: + instance.actual_instance = NotionOauthMultiCustom.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into OneDrive + try: + instance.actual_instance = OneDrive.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Sharepoint + try: + instance.actual_instance = Sharepoint.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into WebCrawler + try: + instance.actual_instance = WebCrawler.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into FileUpload + try: + instance.actual_instance = FileUpload.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Github + try: + instance.actual_instance = Github.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Fireflies + try: + instance.actual_instance = Fireflies.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into CreateSourceConnectorRequestInner with oneOf schemas: AmazonS3, AzureBlobStorage, Confluence, Discord, Dropbox, DropboxOauth, DropboxOauthMulti, DropboxOauthMultiCustom, FileUpload, Firecrawl, Fireflies, Github, GoogleCloudStorage, GoogleDrive, GoogleDriveOAuth, GoogleDriveOauthMulti, GoogleDriveOauthMultiCustom, Intercom, Notion, NotionOauthMulti, NotionOauthMultiCustom, OneDrive, Sharepoint, WebCrawler. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into CreateSourceConnectorRequestInner with oneOf schemas: AmazonS3, AzureBlobStorage, Confluence, Discord, Dropbox, DropboxOauth, DropboxOauthMulti, DropboxOauthMultiCustom, FileUpload, Firecrawl, Fireflies, Github, GoogleCloudStorage, GoogleDrive, GoogleDriveOAuth, GoogleDriveOauthMulti, GoogleDriveOauthMultiCustom, Intercom, Notion, NotionOauthMulti, NotionOauthMultiCustom, OneDrive, Sharepoint, WebCrawler. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], AmazonS3, AzureBlobStorage, Confluence, Discord, Dropbox, DropboxOauth, DropboxOauthMulti, DropboxOauthMultiCustom, FileUpload, Firecrawl, Fireflies, Github, GoogleCloudStorage, GoogleDrive, GoogleDriveOAuth, GoogleDriveOauthMulti, GoogleDriveOauthMultiCustom, Intercom, Notion, NotionOauthMulti, NotionOauthMultiCustom, OneDrive, Sharepoint, WebCrawler]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) + + diff --git a/src/python/vectorize_client/models/create_source_connector_response.py b/vectorize_client/models/create_source_connector_response.py similarity index 100% rename from src/python/vectorize_client/models/create_source_connector_response.py rename to vectorize_client/models/create_source_connector_response.py diff --git a/src/python/vectorize_client/models/created_ai_platform_connector.py b/vectorize_client/models/created_ai_platform_connector.py similarity index 100% rename from src/python/vectorize_client/models/created_ai_platform_connector.py rename to vectorize_client/models/created_ai_platform_connector.py diff --git a/src/python/vectorize_client/models/created_destination_connector.py b/vectorize_client/models/created_destination_connector.py similarity index 100% rename from src/python/vectorize_client/models/created_destination_connector.py rename to vectorize_client/models/created_destination_connector.py diff --git a/src/python/vectorize_client/models/created_source_connector.py b/vectorize_client/models/created_source_connector.py similarity index 100% rename from src/python/vectorize_client/models/created_source_connector.py rename to vectorize_client/models/created_source_connector.py diff --git a/vectorize_client/models/datastax.py b/vectorize_client/models/datastax.py new file mode 100644 index 0000000..3765cc1 --- /dev/null +++ b/vectorize_client/models/datastax.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.datastax_config import DATASTAXConfig +from typing import Optional, Set +from typing_extensions import Self + +class Datastax(BaseModel): + """ + Datastax + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"DATASTAX\")") + config: DATASTAXConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['DATASTAX']): + raise ValueError("must be one of enum values ('DATASTAX')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Datastax from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Datastax from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": DATASTAXConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/datastax1.py b/vectorize_client/models/datastax1.py new file mode 100644 index 0000000..6e3f41b --- /dev/null +++ b/vectorize_client/models/datastax1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.datastax_config import DATASTAXConfig +from typing import Optional, Set +from typing_extensions import Self + +class Datastax1(BaseModel): + """ + Datastax1 + """ # noqa: E501 + config: Optional[DATASTAXConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Datastax1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Datastax1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": DATASTAXConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/datastax2.py b/vectorize_client/models/datastax2.py new file mode 100644 index 0000000..f11a0ad --- /dev/null +++ b/vectorize_client/models/datastax2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Datastax2(BaseModel): + """ + Datastax2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"DATASTAX\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['DATASTAX']): + raise ValueError("must be one of enum values ('DATASTAX')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Datastax2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Datastax2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/datastax_auth_config.py b/vectorize_client/models/datastax_auth_config.py new file mode 100644 index 0000000..215dd47 --- /dev/null +++ b/vectorize_client/models/datastax_auth_config.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class DATASTAXAuthConfig(BaseModel): + """ + Authentication configuration for DataStax Astra + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for your DataStax integration") + endpoint_secret: StrictStr = Field(description="API Endpoint. Example: Enter your API endpoint") + token: Annotated[str, Field(strict=True)] = Field(description="Application Token. Example: Enter your application token") + __properties: ClassVar[List[str]] = ["name", "endpoint_secret", "token"] + + @field_validator('token') + def token_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DATASTAXAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DATASTAXAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "endpoint_secret": obj.get("endpoint_secret"), + "token": obj.get("token") + }) + return _obj + + diff --git a/vectorize_client/models/datastax_config.py b/vectorize_client/models/datastax_config.py new file mode 100644 index 0000000..dfe9dd9 --- /dev/null +++ b/vectorize_client/models/datastax_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class DATASTAXConfig(BaseModel): + """ + Configuration for DataStax Astra connector + """ # noqa: E501 + collection: Annotated[str, Field(strict=True)] = Field(description="Collection Name. Example: Enter collection name") + __properties: ClassVar[List[str]] = ["collection"] + + @field_validator('collection') + def collection_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^[a-zA-Z][a-zA-Z0-9_]*$", value): + raise ValueError(r"must validate the regular expression /^[a-zA-Z][a-zA-Z0-9_]*$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DATASTAXConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DATASTAXConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "collection": obj.get("collection") + }) + return _obj + + diff --git a/src/python/vectorize_client/models/deep_research_result.py b/vectorize_client/models/deep_research_result.py similarity index 100% rename from src/python/vectorize_client/models/deep_research_result.py rename to vectorize_client/models/deep_research_result.py diff --git a/src/python/vectorize_client/models/delete_ai_platform_connector_response.py b/vectorize_client/models/delete_ai_platform_connector_response.py similarity index 100% rename from src/python/vectorize_client/models/delete_ai_platform_connector_response.py rename to vectorize_client/models/delete_ai_platform_connector_response.py diff --git a/src/python/vectorize_client/models/delete_destination_connector_response.py b/vectorize_client/models/delete_destination_connector_response.py similarity index 100% rename from src/python/vectorize_client/models/delete_destination_connector_response.py rename to vectorize_client/models/delete_destination_connector_response.py diff --git a/src/python/vectorize_client/models/delete_file_response.py b/vectorize_client/models/delete_file_response.py similarity index 100% rename from src/python/vectorize_client/models/delete_file_response.py rename to vectorize_client/models/delete_file_response.py diff --git a/src/python/vectorize_client/models/delete_pipeline_response.py b/vectorize_client/models/delete_pipeline_response.py similarity index 100% rename from src/python/vectorize_client/models/delete_pipeline_response.py rename to vectorize_client/models/delete_pipeline_response.py diff --git a/src/python/vectorize_client/models/delete_source_connector_response.py b/vectorize_client/models/delete_source_connector_response.py similarity index 100% rename from src/python/vectorize_client/models/delete_source_connector_response.py rename to vectorize_client/models/delete_source_connector_response.py diff --git a/src/python/vectorize_client/models/destination_connector.py b/vectorize_client/models/destination_connector.py similarity index 100% rename from src/python/vectorize_client/models/destination_connector.py rename to vectorize_client/models/destination_connector.py diff --git a/vectorize_client/models/destination_connector_input.py b/vectorize_client/models/destination_connector_input.py new file mode 100644 index 0000000..45c5540 --- /dev/null +++ b/vectorize_client/models/destination_connector_input.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.destination_connector_input_config import DestinationConnectorInputConfig +from typing import Optional, Set +from typing_extensions import Self + +class DestinationConnectorInput(BaseModel): + """ + Destination connector configuration + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the destination connector") + type: StrictStr = Field(description="Type of destination connector") + config: DestinationConnectorInputConfig + __properties: ClassVar[List[str]] = ["id", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['CAPELLA', 'DATASTAX', 'ELASTIC', 'PINECONE', 'SINGLESTORE', 'MILVUS', 'POSTGRESQL', 'QDRANT', 'SUPABASE', 'WEAVIATE', 'AZUREAISEARCH', 'TURBOPUFFER']): + raise ValueError("must be one of enum values ('CAPELLA', 'DATASTAX', 'ELASTIC', 'PINECONE', 'SINGLESTORE', 'MILVUS', 'POSTGRESQL', 'QDRANT', 'SUPABASE', 'WEAVIATE', 'AZUREAISEARCH', 'TURBOPUFFER')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DestinationConnectorInput from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DestinationConnectorInput from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type"), + "config": DestinationConnectorInputConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/destination_connector_input_config.py b/vectorize_client/models/destination_connector_input_config.py new file mode 100644 index 0000000..421d999 --- /dev/null +++ b/vectorize_client/models/destination_connector_input_config.py @@ -0,0 +1,277 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from vectorize_client.models.azureaisearch_config import AZUREAISEARCHConfig +from vectorize_client.models.capella_config import CAPELLAConfig +from vectorize_client.models.datastax_config import DATASTAXConfig +from vectorize_client.models.elastic_config import ELASTICConfig +from vectorize_client.models.milvus_config import MILVUSConfig +from vectorize_client.models.pinecone_config import PINECONEConfig +from vectorize_client.models.postgresql_config import POSTGRESQLConfig +from vectorize_client.models.qdrant_config import QDRANTConfig +from vectorize_client.models.singlestore_config import SINGLESTOREConfig +from vectorize_client.models.supabase_config import SUPABASEConfig +from vectorize_client.models.turbopuffer_config import TURBOPUFFERConfig +from vectorize_client.models.weaviate_config import WEAVIATEConfig +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +DESTINATIONCONNECTORINPUTCONFIG_ONE_OF_SCHEMAS = ["AZUREAISEARCHConfig", "CAPELLAConfig", "DATASTAXConfig", "ELASTICConfig", "MILVUSConfig", "PINECONEConfig", "POSTGRESQLConfig", "QDRANTConfig", "SINGLESTOREConfig", "SUPABASEConfig", "TURBOPUFFERConfig", "WEAVIATEConfig"] + +class DestinationConnectorInputConfig(BaseModel): + """ + Configuration specific to the connector type + """ + # data type: CAPELLAConfig + oneof_schema_1_validator: Optional[CAPELLAConfig] = None + # data type: DATASTAXConfig + oneof_schema_2_validator: Optional[DATASTAXConfig] = None + # data type: ELASTICConfig + oneof_schema_3_validator: Optional[ELASTICConfig] = None + # data type: PINECONEConfig + oneof_schema_4_validator: Optional[PINECONEConfig] = None + # data type: SINGLESTOREConfig + oneof_schema_5_validator: Optional[SINGLESTOREConfig] = None + # data type: MILVUSConfig + oneof_schema_6_validator: Optional[MILVUSConfig] = None + # data type: POSTGRESQLConfig + oneof_schema_7_validator: Optional[POSTGRESQLConfig] = None + # data type: QDRANTConfig + oneof_schema_8_validator: Optional[QDRANTConfig] = None + # data type: SUPABASEConfig + oneof_schema_9_validator: Optional[SUPABASEConfig] = None + # data type: WEAVIATEConfig + oneof_schema_10_validator: Optional[WEAVIATEConfig] = None + # data type: AZUREAISEARCHConfig + oneof_schema_11_validator: Optional[AZUREAISEARCHConfig] = None + # data type: TURBOPUFFERConfig + oneof_schema_12_validator: Optional[TURBOPUFFERConfig] = None + actual_instance: Optional[Union[AZUREAISEARCHConfig, CAPELLAConfig, DATASTAXConfig, ELASTICConfig, MILVUSConfig, PINECONEConfig, POSTGRESQLConfig, QDRANTConfig, SINGLESTOREConfig, SUPABASEConfig, TURBOPUFFERConfig, WEAVIATEConfig]] = None + one_of_schemas: Set[str] = { "AZUREAISEARCHConfig", "CAPELLAConfig", "DATASTAXConfig", "ELASTICConfig", "MILVUSConfig", "PINECONEConfig", "POSTGRESQLConfig", "QDRANTConfig", "SINGLESTOREConfig", "SUPABASEConfig", "TURBOPUFFERConfig", "WEAVIATEConfig" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = DestinationConnectorInputConfig.model_construct() + error_messages = [] + match = 0 + # validate data type: CAPELLAConfig + if not isinstance(v, CAPELLAConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `CAPELLAConfig`") + else: + match += 1 + # validate data type: DATASTAXConfig + if not isinstance(v, DATASTAXConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `DATASTAXConfig`") + else: + match += 1 + # validate data type: ELASTICConfig + if not isinstance(v, ELASTICConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `ELASTICConfig`") + else: + match += 1 + # validate data type: PINECONEConfig + if not isinstance(v, PINECONEConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `PINECONEConfig`") + else: + match += 1 + # validate data type: SINGLESTOREConfig + if not isinstance(v, SINGLESTOREConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `SINGLESTOREConfig`") + else: + match += 1 + # validate data type: MILVUSConfig + if not isinstance(v, MILVUSConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `MILVUSConfig`") + else: + match += 1 + # validate data type: POSTGRESQLConfig + if not isinstance(v, POSTGRESQLConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `POSTGRESQLConfig`") + else: + match += 1 + # validate data type: QDRANTConfig + if not isinstance(v, QDRANTConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `QDRANTConfig`") + else: + match += 1 + # validate data type: SUPABASEConfig + if not isinstance(v, SUPABASEConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `SUPABASEConfig`") + else: + match += 1 + # validate data type: WEAVIATEConfig + if not isinstance(v, WEAVIATEConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `WEAVIATEConfig`") + else: + match += 1 + # validate data type: AZUREAISEARCHConfig + if not isinstance(v, AZUREAISEARCHConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `AZUREAISEARCHConfig`") + else: + match += 1 + # validate data type: TURBOPUFFERConfig + if not isinstance(v, TURBOPUFFERConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `TURBOPUFFERConfig`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in DestinationConnectorInputConfig with oneOf schemas: AZUREAISEARCHConfig, CAPELLAConfig, DATASTAXConfig, ELASTICConfig, MILVUSConfig, PINECONEConfig, POSTGRESQLConfig, QDRANTConfig, SINGLESTOREConfig, SUPABASEConfig, TURBOPUFFERConfig, WEAVIATEConfig. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in DestinationConnectorInputConfig with oneOf schemas: AZUREAISEARCHConfig, CAPELLAConfig, DATASTAXConfig, ELASTICConfig, MILVUSConfig, PINECONEConfig, POSTGRESQLConfig, QDRANTConfig, SINGLESTOREConfig, SUPABASEConfig, TURBOPUFFERConfig, WEAVIATEConfig. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into CAPELLAConfig + try: + instance.actual_instance = CAPELLAConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into DATASTAXConfig + try: + instance.actual_instance = DATASTAXConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into ELASTICConfig + try: + instance.actual_instance = ELASTICConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into PINECONEConfig + try: + instance.actual_instance = PINECONEConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into SINGLESTOREConfig + try: + instance.actual_instance = SINGLESTOREConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into MILVUSConfig + try: + instance.actual_instance = MILVUSConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into POSTGRESQLConfig + try: + instance.actual_instance = POSTGRESQLConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into QDRANTConfig + try: + instance.actual_instance = QDRANTConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into SUPABASEConfig + try: + instance.actual_instance = SUPABASEConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into WEAVIATEConfig + try: + instance.actual_instance = WEAVIATEConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into AZUREAISEARCHConfig + try: + instance.actual_instance = AZUREAISEARCHConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into TURBOPUFFERConfig + try: + instance.actual_instance = TURBOPUFFERConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into DestinationConnectorInputConfig with oneOf schemas: AZUREAISEARCHConfig, CAPELLAConfig, DATASTAXConfig, ELASTICConfig, MILVUSConfig, PINECONEConfig, POSTGRESQLConfig, QDRANTConfig, SINGLESTOREConfig, SUPABASEConfig, TURBOPUFFERConfig, WEAVIATEConfig. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into DestinationConnectorInputConfig with oneOf schemas: AZUREAISEARCHConfig, CAPELLAConfig, DATASTAXConfig, ELASTICConfig, MILVUSConfig, PINECONEConfig, POSTGRESQLConfig, QDRANTConfig, SINGLESTOREConfig, SUPABASEConfig, TURBOPUFFERConfig, WEAVIATEConfig. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], AZUREAISEARCHConfig, CAPELLAConfig, DATASTAXConfig, ELASTICConfig, MILVUSConfig, PINECONEConfig, POSTGRESQLConfig, QDRANTConfig, SINGLESTOREConfig, SUPABASEConfig, TURBOPUFFERConfig, WEAVIATEConfig]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) + + diff --git a/src/python/vectorize_client/models/destination_connector_schema.py b/vectorize_client/models/destination_connector_schema.py similarity index 100% rename from src/python/vectorize_client/models/destination_connector_schema.py rename to vectorize_client/models/destination_connector_schema.py diff --git a/src/python/vectorize_client/models/destination_connector_type.py b/vectorize_client/models/destination_connector_type.py similarity index 92% rename from src/python/vectorize_client/models/destination_connector_type.py rename to vectorize_client/models/destination_connector_type.py index ca06824..0701c9c 100644 --- a/src/python/vectorize_client/models/destination_connector_type.py +++ b/vectorize_client/models/destination_connector_type.py @@ -37,9 +37,7 @@ class DestinationConnectorType(str, Enum): SUPABASE = 'SUPABASE' WEAVIATE = 'WEAVIATE' AZUREAISEARCH = 'AZUREAISEARCH' - VECTORIZE = 'VECTORIZE' - CHROMA = 'CHROMA' - MONGODB = 'MONGODB' + TURBOPUFFER = 'TURBOPUFFER' @classmethod def from_json(cls, json_str: str) -> Self: diff --git a/vectorize_client/models/discord.py b/vectorize_client/models/discord.py new file mode 100644 index 0000000..03884a6 --- /dev/null +++ b/vectorize_client/models/discord.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.discord_config import DISCORDConfig +from typing import Optional, Set +from typing_extensions import Self + +class Discord(BaseModel): + """ + Discord + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"DISCORD\")") + config: DISCORDConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['DISCORD']): + raise ValueError("must be one of enum values ('DISCORD')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Discord from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Discord from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": DISCORDConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/discord1.py b/vectorize_client/models/discord1.py new file mode 100644 index 0000000..97743a0 --- /dev/null +++ b/vectorize_client/models/discord1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.discord_config import DISCORDConfig +from typing import Optional, Set +from typing_extensions import Self + +class Discord1(BaseModel): + """ + Discord1 + """ # noqa: E501 + config: Optional[DISCORDConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Discord1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Discord1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": DISCORDConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/discord2.py b/vectorize_client/models/discord2.py new file mode 100644 index 0000000..1056b11 --- /dev/null +++ b/vectorize_client/models/discord2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Discord2(BaseModel): + """ + Discord2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"DISCORD\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['DISCORD']): + raise ValueError("must be one of enum values ('DISCORD')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Discord2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Discord2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/discord_auth_config.py b/vectorize_client/models/discord_auth_config.py new file mode 100644 index 0000000..760f905 --- /dev/null +++ b/vectorize_client/models/discord_auth_config.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class DISCORDAuthConfig(BaseModel): + """ + Authentication configuration for Discord + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + server_id: StrictStr = Field(description="Server ID. Example: Enter Server ID", alias="server-id") + bot_token: Annotated[str, Field(strict=True)] = Field(description="Bot token. Example: Enter Token", alias="bot-token") + channel_ids: StrictStr = Field(description="Channel ID. Example: Enter channel ID", alias="channel-ids") + __properties: ClassVar[List[str]] = ["name", "server-id", "bot-token", "channel-ids"] + + @field_validator('bot_token') + def bot_token_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DISCORDAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DISCORDAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "server-id": obj.get("server-id"), + "bot-token": obj.get("bot-token"), + "channel-ids": obj.get("channel-ids") + }) + return _obj + + diff --git a/vectorize_client/models/discord_config.py b/vectorize_client/models/discord_config.py new file mode 100644 index 0000000..06c2b56 --- /dev/null +++ b/vectorize_client/models/discord_config.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class DISCORDConfig(BaseModel): + """ + Configuration for Discord connector + """ # noqa: E501 + emoji: Optional[StrictStr] = Field(default=None, description="Emoji Filter. Example: Enter custom emoji filter name") + author: Optional[StrictStr] = Field(default=None, description="Author Filter. Example: Enter author name") + ignore_author: Optional[StrictStr] = Field(default=None, description="Ignore Author Filter. Example: Enter ignore author name", alias="ignore-author") + limit: Optional[Union[Annotated[float, Field(strict=True, ge=1)], Annotated[int, Field(strict=True, ge=1)]]] = Field(default=10000, description="Limit. Example: Enter limit") + thread_message_inclusion: Optional[StrictStr] = Field(default='ALL', description="Thread Message Inclusion", alias="thread-message-inclusion") + filter_logic: Optional[StrictStr] = Field(default='AND', description="Filter Logic", alias="filter-logic") + thread_message_mode: Optional[StrictStr] = Field(default='CONCATENATE', description="Thread Message Mode", alias="thread-message-mode") + __properties: ClassVar[List[str]] = ["emoji", "author", "ignore-author", "limit", "thread-message-inclusion", "filter-logic", "thread-message-mode"] + + @field_validator('thread_message_inclusion') + def thread_message_inclusion_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['ALL', 'FILTER']): + raise ValueError("must be one of enum values ('ALL', 'FILTER')") + return value + + @field_validator('filter_logic') + def filter_logic_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['AND', 'OR']): + raise ValueError("must be one of enum values ('AND', 'OR')") + return value + + @field_validator('thread_message_mode') + def thread_message_mode_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['CONCATENATE', 'SINGLE']): + raise ValueError("must be one of enum values ('CONCATENATE', 'SINGLE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DISCORDConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DISCORDConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "emoji": obj.get("emoji"), + "author": obj.get("author"), + "ignore-author": obj.get("ignore-author"), + "limit": obj.get("limit") if obj.get("limit") is not None else 10000, + "thread-message-inclusion": obj.get("thread-message-inclusion") if obj.get("thread-message-inclusion") is not None else 'ALL', + "filter-logic": obj.get("filter-logic") if obj.get("filter-logic") is not None else 'AND', + "thread-message-mode": obj.get("thread-message-mode") if obj.get("thread-message-mode") is not None else 'CONCATENATE' + }) + return _obj + + diff --git a/src/python/vectorize_client/models/document.py b/vectorize_client/models/document.py similarity index 100% rename from src/python/vectorize_client/models/document.py rename to vectorize_client/models/document.py diff --git a/vectorize_client/models/dropbox.py b/vectorize_client/models/dropbox.py new file mode 100644 index 0000000..c6dcea8 --- /dev/null +++ b/vectorize_client/models/dropbox.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.dropbox_config import DROPBOXConfig +from typing import Optional, Set +from typing_extensions import Self + +class Dropbox(BaseModel): + """ + Dropbox + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"DROPBOX\")") + config: DROPBOXConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['DROPBOX']): + raise ValueError("must be one of enum values ('DROPBOX')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Dropbox from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Dropbox from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": DROPBOXConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/dropbox1.py b/vectorize_client/models/dropbox1.py new file mode 100644 index 0000000..8c76bd9 --- /dev/null +++ b/vectorize_client/models/dropbox1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.dropbox_config import DROPBOXConfig +from typing import Optional, Set +from typing_extensions import Self + +class Dropbox1(BaseModel): + """ + Dropbox1 + """ # noqa: E501 + config: Optional[DROPBOXConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Dropbox1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Dropbox1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": DROPBOXConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/dropbox2.py b/vectorize_client/models/dropbox2.py new file mode 100644 index 0000000..a51698f --- /dev/null +++ b/vectorize_client/models/dropbox2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Dropbox2(BaseModel): + """ + Dropbox2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"DROPBOX\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['DROPBOX']): + raise ValueError("must be one of enum values ('DROPBOX')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Dropbox2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Dropbox2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/dropbox_auth_config.py b/vectorize_client/models/dropbox_auth_config.py new file mode 100644 index 0000000..2780140 --- /dev/null +++ b/vectorize_client/models/dropbox_auth_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class DROPBOXAuthConfig(BaseModel): + """ + Authentication configuration for Dropbox (Legacy) + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + refresh_token: Annotated[str, Field(strict=True)] = Field(description="Connect Dropbox to Vectorize. Example: Authorize", alias="refresh-token") + __properties: ClassVar[List[str]] = ["name", "refresh-token"] + + @field_validator('refresh_token') + def refresh_token_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DROPBOXAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DROPBOXAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "refresh-token": obj.get("refresh-token") + }) + return _obj + + diff --git a/vectorize_client/models/dropbox_config.py b/vectorize_client/models/dropbox_config.py new file mode 100644 index 0000000..46a5506 --- /dev/null +++ b/vectorize_client/models/dropbox_config.py @@ -0,0 +1,98 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class DROPBOXConfig(BaseModel): + """ + Configuration for Dropbox (Legacy) connector + """ # noqa: E501 + path_prefix: Optional[Annotated[str, Field(strict=True)]] = Field(default=None, description="Read from these folders (optional). Example: Enter Path: /exampleFolder/subFolder", alias="path-prefix") + __properties: ClassVar[List[str]] = ["path-prefix"] + + @field_validator('path_prefix') + def path_prefix_validate_regular_expression(cls, value): + """Validates the regular expression""" + if value is None: + return value + + if not re.match(r"^\/.*$", value): + raise ValueError(r"must validate the regular expression /^\/.*$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DROPBOXConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DROPBOXConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "path-prefix": obj.get("path-prefix") + }) + return _obj + + diff --git a/vectorize_client/models/dropbox_oauth.py b/vectorize_client/models/dropbox_oauth.py new file mode 100644 index 0000000..a3585a4 --- /dev/null +++ b/vectorize_client/models/dropbox_oauth.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.dropboxoauth_auth_config import DROPBOXOAUTHAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class DropboxOauth(BaseModel): + """ + DropboxOauth + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"DROPBOX_OAUTH\")") + config: DROPBOXOAUTHAuthConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['DROPBOX_OAUTH']): + raise ValueError("must be one of enum values ('DROPBOX_OAUTH')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DropboxOauth from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DropboxOauth from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": DROPBOXOAUTHAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/dropbox_oauth1.py b/vectorize_client/models/dropbox_oauth1.py new file mode 100644 index 0000000..7fa6f90 --- /dev/null +++ b/vectorize_client/models/dropbox_oauth1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.dropboxoauth_auth_config import DROPBOXOAUTHAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class DropboxOauth1(BaseModel): + """ + DropboxOauth1 + """ # noqa: E501 + config: Optional[DROPBOXOAUTHAuthConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DropboxOauth1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DropboxOauth1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": DROPBOXOAUTHAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/dropbox_oauth2.py b/vectorize_client/models/dropbox_oauth2.py new file mode 100644 index 0000000..5f85943 --- /dev/null +++ b/vectorize_client/models/dropbox_oauth2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class DropboxOauth2(BaseModel): + """ + DropboxOauth2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"DROPBOX_OAUTH\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['DROPBOX_OAUTH']): + raise ValueError("must be one of enum values ('DROPBOX_OAUTH')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DropboxOauth2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DropboxOauth2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/dropbox_oauth_multi.py b/vectorize_client/models/dropbox_oauth_multi.py new file mode 100644 index 0000000..98b95ce --- /dev/null +++ b/vectorize_client/models/dropbox_oauth_multi.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.dropboxoauthmulti_auth_config import DROPBOXOAUTHMULTIAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class DropboxOauthMulti(BaseModel): + """ + DropboxOauthMulti + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"DROPBOX_OAUTH_MULTI\")") + config: DROPBOXOAUTHMULTIAuthConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['DROPBOX_OAUTH_MULTI']): + raise ValueError("must be one of enum values ('DROPBOX_OAUTH_MULTI')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DropboxOauthMulti from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DropboxOauthMulti from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": DROPBOXOAUTHMULTIAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/dropbox_oauth_multi1.py b/vectorize_client/models/dropbox_oauth_multi1.py new file mode 100644 index 0000000..8155107 --- /dev/null +++ b/vectorize_client/models/dropbox_oauth_multi1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.dropboxoauthmulti_auth_config import DROPBOXOAUTHMULTIAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class DropboxOauthMulti1(BaseModel): + """ + DropboxOauthMulti1 + """ # noqa: E501 + config: Optional[DROPBOXOAUTHMULTIAuthConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DropboxOauthMulti1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DropboxOauthMulti1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": DROPBOXOAUTHMULTIAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/dropbox_oauth_multi2.py b/vectorize_client/models/dropbox_oauth_multi2.py new file mode 100644 index 0000000..29de78d --- /dev/null +++ b/vectorize_client/models/dropbox_oauth_multi2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class DropboxOauthMulti2(BaseModel): + """ + DropboxOauthMulti2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"DROPBOX_OAUTH_MULTI\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['DROPBOX_OAUTH_MULTI']): + raise ValueError("must be one of enum values ('DROPBOX_OAUTH_MULTI')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DropboxOauthMulti2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DropboxOauthMulti2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/dropbox_oauth_multi_custom.py b/vectorize_client/models/dropbox_oauth_multi_custom.py new file mode 100644 index 0000000..27703f5 --- /dev/null +++ b/vectorize_client/models/dropbox_oauth_multi_custom.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.dropboxoauthmulticustom_auth_config import DROPBOXOAUTHMULTICUSTOMAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class DropboxOauthMultiCustom(BaseModel): + """ + DropboxOauthMultiCustom + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"DROPBOX_OAUTH_MULTI_CUSTOM\")") + config: DROPBOXOAUTHMULTICUSTOMAuthConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['DROPBOX_OAUTH_MULTI_CUSTOM']): + raise ValueError("must be one of enum values ('DROPBOX_OAUTH_MULTI_CUSTOM')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DropboxOauthMultiCustom from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DropboxOauthMultiCustom from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": DROPBOXOAUTHMULTICUSTOMAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/dropbox_oauth_multi_custom1.py b/vectorize_client/models/dropbox_oauth_multi_custom1.py new file mode 100644 index 0000000..719175f --- /dev/null +++ b/vectorize_client/models/dropbox_oauth_multi_custom1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.dropboxoauthmulticustom_auth_config import DROPBOXOAUTHMULTICUSTOMAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class DropboxOauthMultiCustom1(BaseModel): + """ + DropboxOauthMultiCustom1 + """ # noqa: E501 + config: Optional[DROPBOXOAUTHMULTICUSTOMAuthConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DropboxOauthMultiCustom1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DropboxOauthMultiCustom1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": DROPBOXOAUTHMULTICUSTOMAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/dropbox_oauth_multi_custom2.py b/vectorize_client/models/dropbox_oauth_multi_custom2.py new file mode 100644 index 0000000..777298d --- /dev/null +++ b/vectorize_client/models/dropbox_oauth_multi_custom2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class DropboxOauthMultiCustom2(BaseModel): + """ + DropboxOauthMultiCustom2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"DROPBOX_OAUTH_MULTI_CUSTOM\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['DROPBOX_OAUTH_MULTI_CUSTOM']): + raise ValueError("must be one of enum values ('DROPBOX_OAUTH_MULTI_CUSTOM')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DropboxOauthMultiCustom2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DropboxOauthMultiCustom2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/dropboxoauth_auth_config.py b/vectorize_client/models/dropboxoauth_auth_config.py new file mode 100644 index 0000000..4fbc301 --- /dev/null +++ b/vectorize_client/models/dropboxoauth_auth_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class DROPBOXOAUTHAuthConfig(BaseModel): + """ + Authentication configuration for Dropbox OAuth + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + authorized_user: Optional[StrictStr] = Field(default=None, description="Authorized User", alias="authorized-user") + selection_details: StrictStr = Field(description="Connect Dropbox to Vectorize. Example: Authorize", alias="selection-details") + edited_users: Optional[StrictStr] = Field(default=None, alias="editedUsers") + reconnect_users: Optional[StrictStr] = Field(default=None, alias="reconnectUsers") + __properties: ClassVar[List[str]] = ["name", "authorized-user", "selection-details", "editedUsers", "reconnectUsers"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DROPBOXOAUTHAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DROPBOXOAUTHAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "authorized-user": obj.get("authorized-user"), + "selection-details": obj.get("selection-details"), + "editedUsers": obj.get("editedUsers"), + "reconnectUsers": obj.get("reconnectUsers") + }) + return _obj + + diff --git a/vectorize_client/models/dropboxoauthmulti_auth_config.py b/vectorize_client/models/dropboxoauthmulti_auth_config.py new file mode 100644 index 0000000..e65a661 --- /dev/null +++ b/vectorize_client/models/dropboxoauthmulti_auth_config.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class DROPBOXOAUTHMULTIAuthConfig(BaseModel): + """ + Authentication configuration for Dropbox Multi-User (Vectorize) + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + authorized_users: Optional[StrictStr] = Field(default=None, description="Authorized Users", alias="authorized-users") + edited_users: Optional[StrictStr] = Field(default=None, alias="editedUsers") + deleted_users: Optional[StrictStr] = Field(default=None, alias="deletedUsers") + __properties: ClassVar[List[str]] = ["name", "authorized-users", "editedUsers", "deletedUsers"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DROPBOXOAUTHMULTIAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DROPBOXOAUTHMULTIAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "authorized-users": obj.get("authorized-users"), + "editedUsers": obj.get("editedUsers"), + "deletedUsers": obj.get("deletedUsers") + }) + return _obj + + diff --git a/vectorize_client/models/dropboxoauthmulticustom_auth_config.py b/vectorize_client/models/dropboxoauthmulticustom_auth_config.py new file mode 100644 index 0000000..b57b24a --- /dev/null +++ b/vectorize_client/models/dropboxoauthmulticustom_auth_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class DROPBOXOAUTHMULTICUSTOMAuthConfig(BaseModel): + """ + Authentication configuration for Dropbox Multi-User (White Label) + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + app_key: StrictStr = Field(description="Dropbox App Key. Example: Enter App Key", alias="app-key") + app_secret: StrictStr = Field(description="Dropbox App Secret. Example: Enter App Secret", alias="app-secret") + authorized_users: Optional[StrictStr] = Field(default=None, description="Authorized Users", alias="authorized-users") + edited_users: Optional[StrictStr] = Field(default=None, alias="editedUsers") + deleted_users: Optional[StrictStr] = Field(default=None, alias="deletedUsers") + __properties: ClassVar[List[str]] = ["name", "app-key", "app-secret", "authorized-users", "editedUsers", "deletedUsers"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DROPBOXOAUTHMULTICUSTOMAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DROPBOXOAUTHMULTICUSTOMAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "app-key": obj.get("app-key"), + "app-secret": obj.get("app-secret"), + "authorized-users": obj.get("authorized-users"), + "editedUsers": obj.get("editedUsers"), + "deletedUsers": obj.get("deletedUsers") + }) + return _obj + + diff --git a/vectorize_client/models/elastic.py b/vectorize_client/models/elastic.py new file mode 100644 index 0000000..26389b1 --- /dev/null +++ b/vectorize_client/models/elastic.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.elastic_config import ELASTICConfig +from typing import Optional, Set +from typing_extensions import Self + +class Elastic(BaseModel): + """ + Elastic + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"ELASTIC\")") + config: ELASTICConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['ELASTIC']): + raise ValueError("must be one of enum values ('ELASTIC')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Elastic from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Elastic from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": ELASTICConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/elastic1.py b/vectorize_client/models/elastic1.py new file mode 100644 index 0000000..7b1ba48 --- /dev/null +++ b/vectorize_client/models/elastic1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.elastic_config import ELASTICConfig +from typing import Optional, Set +from typing_extensions import Self + +class Elastic1(BaseModel): + """ + Elastic1 + """ # noqa: E501 + config: Optional[ELASTICConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Elastic1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Elastic1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": ELASTICConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/elastic2.py b/vectorize_client/models/elastic2.py new file mode 100644 index 0000000..20a7f8a --- /dev/null +++ b/vectorize_client/models/elastic2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Elastic2(BaseModel): + """ + Elastic2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"ELASTIC\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['ELASTIC']): + raise ValueError("must be one of enum values ('ELASTIC')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Elastic2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Elastic2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/elastic_auth_config.py b/vectorize_client/models/elastic_auth_config.py new file mode 100644 index 0000000..924b864 --- /dev/null +++ b/vectorize_client/models/elastic_auth_config.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class ELASTICAuthConfig(BaseModel): + """ + Authentication configuration for Elasticsearch + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for your Elastic integration") + host: StrictStr = Field(description="Host. Example: Enter your host") + port: StrictStr = Field(description="Port. Example: Enter your port") + api_key: Annotated[str, Field(strict=True)] = Field(description="API Key. Example: Enter your API key", alias="api-key") + __properties: ClassVar[List[str]] = ["name", "host", "port", "api-key"] + + @field_validator('api_key') + def api_key_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ELASTICAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ELASTICAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "host": obj.get("host"), + "port": obj.get("port"), + "api-key": obj.get("api-key") + }) + return _obj + + diff --git a/vectorize_client/models/elastic_config.py b/vectorize_client/models/elastic_config.py new file mode 100644 index 0000000..4f8e34f --- /dev/null +++ b/vectorize_client/models/elastic_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class ELASTICConfig(BaseModel): + """ + Configuration for Elasticsearch connector + """ # noqa: E501 + index: Annotated[str, Field(strict=True, max_length=255)] = Field(description="Index Name. Example: Enter index name") + __properties: ClassVar[List[str]] = ["index"] + + @field_validator('index') + def index_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^(?!.*(--|\.\.))(?!^[\-.])(?!.*[\-.]$)[a-z0-9-.]*$", value): + raise ValueError(r"must validate the regular expression /^(?!.*(--|\.\.))(?!^[\-.])(?!.*[\-.]$)[a-z0-9-.]*$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ELASTICConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ELASTICConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "index": obj.get("index") + }) + return _obj + + diff --git a/src/python/vectorize_client/models/extraction_chunking_strategy.py b/vectorize_client/models/extraction_chunking_strategy.py similarity index 100% rename from src/python/vectorize_client/models/extraction_chunking_strategy.py rename to vectorize_client/models/extraction_chunking_strategy.py diff --git a/src/python/vectorize_client/models/extraction_result.py b/vectorize_client/models/extraction_result.py similarity index 100% rename from src/python/vectorize_client/models/extraction_result.py rename to vectorize_client/models/extraction_result.py diff --git a/src/python/vectorize_client/models/extraction_result_response.py b/vectorize_client/models/extraction_result_response.py similarity index 100% rename from src/python/vectorize_client/models/extraction_result_response.py rename to vectorize_client/models/extraction_result_response.py diff --git a/src/python/vectorize_client/models/extraction_type.py b/vectorize_client/models/extraction_type.py similarity index 100% rename from src/python/vectorize_client/models/extraction_type.py rename to vectorize_client/models/extraction_type.py diff --git a/vectorize_client/models/file_upload.py b/vectorize_client/models/file_upload.py new file mode 100644 index 0000000..2e09bd7 --- /dev/null +++ b/vectorize_client/models/file_upload.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.fileupload_auth_config import FILEUPLOADAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class FileUpload(BaseModel): + """ + FileUpload + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"FILE_UPLOAD\")") + config: FILEUPLOADAuthConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['FILE_UPLOAD']): + raise ValueError("must be one of enum values ('FILE_UPLOAD')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of FileUpload from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of FileUpload from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": FILEUPLOADAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/file_upload1.py b/vectorize_client/models/file_upload1.py new file mode 100644 index 0000000..1f2c9d8 --- /dev/null +++ b/vectorize_client/models/file_upload1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.fileupload_auth_config import FILEUPLOADAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class FileUpload1(BaseModel): + """ + FileUpload1 + """ # noqa: E501 + config: Optional[FILEUPLOADAuthConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of FileUpload1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of FileUpload1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": FILEUPLOADAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/file_upload2.py b/vectorize_client/models/file_upload2.py new file mode 100644 index 0000000..db69ae7 --- /dev/null +++ b/vectorize_client/models/file_upload2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class FileUpload2(BaseModel): + """ + FileUpload2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"FILE_UPLOAD\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['FILE_UPLOAD']): + raise ValueError("must be one of enum values ('FILE_UPLOAD')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of FileUpload2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of FileUpload2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/fileupload_auth_config.py b/vectorize_client/models/fileupload_auth_config.py new file mode 100644 index 0000000..fc9a2e1 --- /dev/null +++ b/vectorize_client/models/fileupload_auth_config.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class FILEUPLOADAuthConfig(BaseModel): + """ + Authentication configuration for File Upload + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for this connector") + path_prefix: Optional[StrictStr] = Field(default=None, description="Path Prefix", alias="path-prefix") + files: Optional[List[StrictStr]] = Field(default=None, description="Choose files. Files uploaded to this connector can be used in pipelines to vectorize their contents. Note: files with the same name will be overwritten.") + __properties: ClassVar[List[str]] = ["name", "path-prefix", "files"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of FILEUPLOADAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of FILEUPLOADAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "path-prefix": obj.get("path-prefix"), + "files": obj.get("files") + }) + return _obj + + diff --git a/vectorize_client/models/firecrawl.py b/vectorize_client/models/firecrawl.py new file mode 100644 index 0000000..00f3a94 --- /dev/null +++ b/vectorize_client/models/firecrawl.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.firecrawl_config import FIRECRAWLConfig +from typing import Optional, Set +from typing_extensions import Self + +class Firecrawl(BaseModel): + """ + Firecrawl + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"FIRECRAWL\")") + config: FIRECRAWLConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['FIRECRAWL']): + raise ValueError("must be one of enum values ('FIRECRAWL')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Firecrawl from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Firecrawl from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": FIRECRAWLConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/firecrawl1.py b/vectorize_client/models/firecrawl1.py new file mode 100644 index 0000000..7871d21 --- /dev/null +++ b/vectorize_client/models/firecrawl1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.firecrawl_config import FIRECRAWLConfig +from typing import Optional, Set +from typing_extensions import Self + +class Firecrawl1(BaseModel): + """ + Firecrawl1 + """ # noqa: E501 + config: Optional[FIRECRAWLConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Firecrawl1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Firecrawl1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": FIRECRAWLConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/firecrawl2.py b/vectorize_client/models/firecrawl2.py new file mode 100644 index 0000000..d061d3c --- /dev/null +++ b/vectorize_client/models/firecrawl2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Firecrawl2(BaseModel): + """ + Firecrawl2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"FIRECRAWL\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['FIRECRAWL']): + raise ValueError("must be one of enum values ('FIRECRAWL')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Firecrawl2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Firecrawl2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/firecrawl_auth_config.py b/vectorize_client/models/firecrawl_auth_config.py new file mode 100644 index 0000000..eff4653 --- /dev/null +++ b/vectorize_client/models/firecrawl_auth_config.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class FIRECRAWLAuthConfig(BaseModel): + """ + Authentication configuration for Firecrawl + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + api_key: StrictStr = Field(description="API Key. Example: Enter your Firecrawl API Key", alias="api-key") + __properties: ClassVar[List[str]] = ["name", "api-key"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of FIRECRAWLAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of FIRECRAWLAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "api-key": obj.get("api-key") + }) + return _obj + + diff --git a/vectorize_client/models/firecrawl_config.py b/vectorize_client/models/firecrawl_config.py new file mode 100644 index 0000000..5524100 --- /dev/null +++ b/vectorize_client/models/firecrawl_config.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class FIRECRAWLConfig(BaseModel): + """ + Configuration for Firecrawl connector + """ # noqa: E501 + endpoint: StrictStr = Field(description="Endpoint. Example: Choose which api endpoint to use") + request: Dict[str, Any] = Field(description="Request Body. Example: JSON config for firecrawl's /crawl or /scrape endpoint.") + __properties: ClassVar[List[str]] = ["endpoint", "request"] + + @field_validator('endpoint') + def endpoint_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['Crawl', 'Scrape']): + raise ValueError("must be one of enum values ('Crawl', 'Scrape')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of FIRECRAWLConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of FIRECRAWLConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "endpoint": obj.get("endpoint") if obj.get("endpoint") is not None else 'Crawl', + "request": obj.get("request") + }) + return _obj + + diff --git a/vectorize_client/models/fireflies.py b/vectorize_client/models/fireflies.py new file mode 100644 index 0000000..de1f7e6 --- /dev/null +++ b/vectorize_client/models/fireflies.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.fireflies_config import FIREFLIESConfig +from typing import Optional, Set +from typing_extensions import Self + +class Fireflies(BaseModel): + """ + Fireflies + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"FIREFLIES\")") + config: FIREFLIESConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['FIREFLIES']): + raise ValueError("must be one of enum values ('FIREFLIES')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Fireflies from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Fireflies from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": FIREFLIESConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/fireflies1.py b/vectorize_client/models/fireflies1.py new file mode 100644 index 0000000..1eb16b1 --- /dev/null +++ b/vectorize_client/models/fireflies1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.fireflies_config import FIREFLIESConfig +from typing import Optional, Set +from typing_extensions import Self + +class Fireflies1(BaseModel): + """ + Fireflies1 + """ # noqa: E501 + config: Optional[FIREFLIESConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Fireflies1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Fireflies1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": FIREFLIESConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/fireflies2.py b/vectorize_client/models/fireflies2.py new file mode 100644 index 0000000..a9972f3 --- /dev/null +++ b/vectorize_client/models/fireflies2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Fireflies2(BaseModel): + """ + Fireflies2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"FIREFLIES\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['FIREFLIES']): + raise ValueError("must be one of enum values ('FIREFLIES')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Fireflies2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Fireflies2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/fireflies_auth_config.py b/vectorize_client/models/fireflies_auth_config.py new file mode 100644 index 0000000..0f58a3f --- /dev/null +++ b/vectorize_client/models/fireflies_auth_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class FIREFLIESAuthConfig(BaseModel): + """ + Authentication configuration for Fireflies.ai + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + api_key: Annotated[str, Field(strict=True)] = Field(description="API Key. Example: Enter your Fireflies.ai API key", alias="api-key") + __properties: ClassVar[List[str]] = ["name", "api-key"] + + @field_validator('api_key') + def api_key_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of FIREFLIESAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of FIREFLIESAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "api-key": obj.get("api-key") + }) + return _obj + + diff --git a/vectorize_client/models/fireflies_config.py b/vectorize_client/models/fireflies_config.py new file mode 100644 index 0000000..a05f0d0 --- /dev/null +++ b/vectorize_client/models/fireflies_config.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import date +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing import Optional, Set +from typing_extensions import Self + +class FIREFLIESConfig(BaseModel): + """ + Configuration for Fireflies.ai connector + """ # noqa: E501 + start_date: date = Field(description="Start Date. Include meetings from this date forward. Example: Enter a date: Example 2023-12-31", alias="start-date") + end_date: Optional[date] = Field(default=None, description="End Date. Include meetings up to this date only. Example: Enter a date: Example 2023-12-31", alias="end-date") + title_filter_type: StrictStr = Field(alias="title-filter-type") + title_filter: Optional[StrictStr] = Field(default=None, description="Title Filter. Only include meetings with this text in the title. Example: Enter meeting title", alias="title-filter") + participant_filter_type: StrictStr = Field(alias="participant-filter-type") + participant_filter: Optional[StrictStr] = Field(default=None, description="Participant's Email Filter. Include meetings where these participants were invited. Example: Enter participant email", alias="participant-filter") + max_meetings: Optional[Union[StrictFloat, StrictInt]] = Field(default=-1, description="Max Meetings. Enter -1 for all available meetings, or specify a limit. Example: Enter maximum number of meetings to retrieve. (-1 for all)", alias="max-meetings") + __properties: ClassVar[List[str]] = ["start-date", "end-date", "title-filter-type", "title-filter", "participant-filter-type", "participant-filter", "max-meetings"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of FIREFLIESConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of FIREFLIESConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "start-date": obj.get("start-date"), + "end-date": obj.get("end-date"), + "title-filter-type": obj.get("title-filter-type") if obj.get("title-filter-type") is not None else 'AND', + "title-filter": obj.get("title-filter"), + "participant-filter-type": obj.get("participant-filter-type") if obj.get("participant-filter-type") is not None else 'AND', + "participant-filter": obj.get("participant-filter"), + "max-meetings": obj.get("max-meetings") if obj.get("max-meetings") is not None else -1 + }) + return _obj + + diff --git a/vectorize_client/models/gcs_auth_config.py b/vectorize_client/models/gcs_auth_config.py new file mode 100644 index 0000000..2482901 --- /dev/null +++ b/vectorize_client/models/gcs_auth_config.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class GCSAuthConfig(BaseModel): + """ + Authentication configuration for GCP Cloud Storage + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + service_account_json: StrictStr = Field(description="Service Account JSON. Example: Enter the JSON key file for the service account", alias="service-account-json") + bucket_name: StrictStr = Field(description="Bucket. Example: Enter bucket name", alias="bucket-name") + __properties: ClassVar[List[str]] = ["name", "service-account-json", "bucket-name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GCSAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GCSAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "service-account-json": obj.get("service-account-json"), + "bucket-name": obj.get("bucket-name") + }) + return _obj + + diff --git a/vectorize_client/models/gcs_config.py b/vectorize_client/models/gcs_config.py new file mode 100644 index 0000000..26871d2 --- /dev/null +++ b/vectorize_client/models/gcs_config.py @@ -0,0 +1,106 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class GCSConfig(BaseModel): + """ + Configuration for GCP Cloud Storage connector + """ # noqa: E501 + file_extensions: List[StrictStr] = Field(description="File Extensions", alias="file-extensions") + idle_time: Union[Annotated[float, Field(strict=True, ge=1)], Annotated[int, Field(strict=True, ge=1)]] = Field(description="Check for updates every (seconds)", alias="idle-time") + recursive: Optional[StrictBool] = Field(default=None, description="Recursively scan all folders in the bucket") + path_prefix: Optional[StrictStr] = Field(default=None, description="Path Prefix", alias="path-prefix") + path_metadata_regex: Optional[StrictStr] = Field(default=None, description="Path Metadata Regex", alias="path-metadata-regex") + path_regex_group_names: Optional[StrictStr] = Field(default=None, description="Path Regex Group Names. Example: Enter Group Name", alias="path-regex-group-names") + __properties: ClassVar[List[str]] = ["file-extensions", "idle-time", "recursive", "path-prefix", "path-metadata-regex", "path-regex-group-names"] + + @field_validator('file_extensions') + def file_extensions_validate_enum(cls, value): + """Validates the enum""" + for i in value: + if i not in set([]): + raise ValueError("each list item must be one of ()") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GCSConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GCSConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "file-extensions": obj.get("file-extensions"), + "idle-time": obj.get("idle-time") if obj.get("idle-time") is not None else 5, + "recursive": obj.get("recursive"), + "path-prefix": obj.get("path-prefix"), + "path-metadata-regex": obj.get("path-metadata-regex"), + "path-regex-group-names": obj.get("path-regex-group-names") + }) + return _obj + + diff --git a/src/python/vectorize_client/models/get_ai_platform_connectors200_response.py b/vectorize_client/models/get_ai_platform_connectors200_response.py similarity index 100% rename from src/python/vectorize_client/models/get_ai_platform_connectors200_response.py rename to vectorize_client/models/get_ai_platform_connectors200_response.py diff --git a/src/python/vectorize_client/models/get_deep_research_response.py b/vectorize_client/models/get_deep_research_response.py similarity index 100% rename from src/python/vectorize_client/models/get_deep_research_response.py rename to vectorize_client/models/get_deep_research_response.py diff --git a/src/python/vectorize_client/models/get_destination_connectors200_response.py b/vectorize_client/models/get_destination_connectors200_response.py similarity index 100% rename from src/python/vectorize_client/models/get_destination_connectors200_response.py rename to vectorize_client/models/get_destination_connectors200_response.py diff --git a/src/python/vectorize_client/models/get_pipeline_events_response.py b/vectorize_client/models/get_pipeline_events_response.py similarity index 100% rename from src/python/vectorize_client/models/get_pipeline_events_response.py rename to vectorize_client/models/get_pipeline_events_response.py diff --git a/src/python/vectorize_client/models/get_pipeline_metrics_response.py b/vectorize_client/models/get_pipeline_metrics_response.py similarity index 100% rename from src/python/vectorize_client/models/get_pipeline_metrics_response.py rename to vectorize_client/models/get_pipeline_metrics_response.py diff --git a/src/python/vectorize_client/models/get_pipeline_response.py b/vectorize_client/models/get_pipeline_response.py similarity index 100% rename from src/python/vectorize_client/models/get_pipeline_response.py rename to vectorize_client/models/get_pipeline_response.py diff --git a/src/python/vectorize_client/models/get_pipelines400_response.py b/vectorize_client/models/get_pipelines400_response.py similarity index 100% rename from src/python/vectorize_client/models/get_pipelines400_response.py rename to vectorize_client/models/get_pipelines400_response.py diff --git a/src/python/vectorize_client/models/get_pipelines_response.py b/vectorize_client/models/get_pipelines_response.py similarity index 100% rename from src/python/vectorize_client/models/get_pipelines_response.py rename to vectorize_client/models/get_pipelines_response.py diff --git a/src/python/vectorize_client/models/get_source_connectors200_response.py b/vectorize_client/models/get_source_connectors200_response.py similarity index 100% rename from src/python/vectorize_client/models/get_source_connectors200_response.py rename to vectorize_client/models/get_source_connectors200_response.py diff --git a/src/python/vectorize_client/models/get_upload_files_response.py b/vectorize_client/models/get_upload_files_response.py similarity index 100% rename from src/python/vectorize_client/models/get_upload_files_response.py rename to vectorize_client/models/get_upload_files_response.py diff --git a/vectorize_client/models/github.py b/vectorize_client/models/github.py new file mode 100644 index 0000000..7805011 --- /dev/null +++ b/vectorize_client/models/github.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.github_config import GITHUBConfig +from typing import Optional, Set +from typing_extensions import Self + +class Github(BaseModel): + """ + Github + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"GITHUB\")") + config: GITHUBConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['GITHUB']): + raise ValueError("must be one of enum values ('GITHUB')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Github from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Github from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": GITHUBConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/src/python/vectorize_client/models/update_source_connector_request.py b/vectorize_client/models/github1.py similarity index 78% rename from src/python/vectorize_client/models/update_source_connector_request.py rename to vectorize_client/models/github1.py index 5b72fd8..02f6852 100644 --- a/src/python/vectorize_client/models/update_source_connector_request.py +++ b/vectorize_client/models/github1.py @@ -18,15 +18,16 @@ import json from pydantic import BaseModel, ConfigDict -from typing import Any, ClassVar, Dict, List +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.github_config import GITHUBConfig from typing import Optional, Set from typing_extensions import Self -class UpdateSourceConnectorRequest(BaseModel): +class Github1(BaseModel): """ - UpdateSourceConnectorRequest + Github1 """ # noqa: E501 - config: Dict[str, Any] + config: Optional[GITHUBConfig] = None __properties: ClassVar[List[str]] = ["config"] model_config = ConfigDict( @@ -47,7 +48,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of UpdateSourceConnectorRequest from a JSON string""" + """Create an instance of Github1 from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -68,11 +69,14 @@ def to_dict(self) -> Dict[str, Any]: exclude=excluded_fields, exclude_none=True, ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() return _dict @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of UpdateSourceConnectorRequest from a dict""" + """Create an instance of Github1 from a dict""" if obj is None: return None @@ -80,7 +84,7 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: return cls.model_validate(obj) _obj = cls.model_validate({ - "config": obj.get("config") + "config": GITHUBConfig.from_dict(obj["config"]) if obj.get("config") is not None else None }) return _obj diff --git a/vectorize_client/models/github2.py b/vectorize_client/models/github2.py new file mode 100644 index 0000000..0e26d48 --- /dev/null +++ b/vectorize_client/models/github2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Github2(BaseModel): + """ + Github2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"GITHUB\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['GITHUB']): + raise ValueError("must be one of enum values ('GITHUB')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Github2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Github2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/github_auth_config.py b/vectorize_client/models/github_auth_config.py new file mode 100644 index 0000000..40c24e7 --- /dev/null +++ b/vectorize_client/models/github_auth_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class GITHUBAuthConfig(BaseModel): + """ + Authentication configuration for GitHub + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + oauth_token: Annotated[str, Field(strict=True)] = Field(description="Personal Access Token. Example: Enter your GitHub personal access token", alias="oauth-token") + __properties: ClassVar[List[str]] = ["name", "oauth-token"] + + @field_validator('oauth_token') + def oauth_token_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GITHUBAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GITHUBAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "oauth-token": obj.get("oauth-token") + }) + return _obj + + diff --git a/vectorize_client/models/github_config.py b/vectorize_client/models/github_config.py new file mode 100644 index 0000000..d00d902 --- /dev/null +++ b/vectorize_client/models/github_config.py @@ -0,0 +1,126 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import date +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictFloat, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class GITHUBConfig(BaseModel): + """ + Configuration for GitHub connector + """ # noqa: E501 + repositories: Annotated[str, Field(strict=True)] = Field(description="Repositories. Example: Example: owner1/repo1") + include_pull_requests: StrictBool = Field(description="Include Pull Requests", alias="include-pull-requests") + pull_request_status: StrictStr = Field(description="Pull Request Status", alias="pull-request-status") + pull_request_labels: Optional[StrictStr] = Field(default=None, description="Pull Request Labels. Example: Optionally filter by label. E.g. fix", alias="pull-request-labels") + include_issues: StrictBool = Field(description="Include Issues", alias="include-issues") + issue_status: StrictStr = Field(description="Issue Status", alias="issue-status") + issue_labels: Optional[StrictStr] = Field(default=None, description="Issue Labels. Example: Optionally filter by label. E.g. bug", alias="issue-labels") + max_items: Union[StrictFloat, StrictInt] = Field(description="Max Items. Example: Enter maximum number of items to fetch", alias="max-items") + created_after: Optional[date] = Field(default=None, description="Created After. Filter for items created after this date. Example: Enter a date: Example 2012-12-31", alias="created-after") + __properties: ClassVar[List[str]] = ["repositories", "include-pull-requests", "pull-request-status", "pull-request-labels", "include-issues", "issue-status", "issue-labels", "max-items", "created-after"] + + @field_validator('repositories') + def repositories_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^[a-zA-Z0-9-]+\/[a-zA-Z0-9-]+$", value): + raise ValueError(r"must validate the regular expression /^[a-zA-Z0-9-]+\/[a-zA-Z0-9-]+$/") + return value + + @field_validator('pull_request_status') + def pull_request_status_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['all', 'open', 'closed', 'merged']): + raise ValueError("must be one of enum values ('all', 'open', 'closed', 'merged')") + return value + + @field_validator('issue_status') + def issue_status_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['all', 'open', 'closed']): + raise ValueError("must be one of enum values ('all', 'open', 'closed')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GITHUBConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GITHUBConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "repositories": obj.get("repositories"), + "include-pull-requests": obj.get("include-pull-requests") if obj.get("include-pull-requests") is not None else True, + "pull-request-status": obj.get("pull-request-status") if obj.get("pull-request-status") is not None else 'all', + "pull-request-labels": obj.get("pull-request-labels"), + "include-issues": obj.get("include-issues") if obj.get("include-issues") is not None else True, + "issue-status": obj.get("issue-status") if obj.get("issue-status") is not None else 'all', + "issue-labels": obj.get("issue-labels"), + "max-items": obj.get("max-items") if obj.get("max-items") is not None else 1000, + "created-after": obj.get("created-after") + }) + return _obj + + diff --git a/vectorize_client/models/google_cloud_storage.py b/vectorize_client/models/google_cloud_storage.py new file mode 100644 index 0000000..4086afd --- /dev/null +++ b/vectorize_client/models/google_cloud_storage.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.gcs_config import GCSConfig +from typing import Optional, Set +from typing_extensions import Self + +class GoogleCloudStorage(BaseModel): + """ + GoogleCloudStorage + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"GCS\")") + config: GCSConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['GCS']): + raise ValueError("must be one of enum values ('GCS')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleCloudStorage from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleCloudStorage from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": GCSConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/google_cloud_storage1.py b/vectorize_client/models/google_cloud_storage1.py new file mode 100644 index 0000000..a57f55e --- /dev/null +++ b/vectorize_client/models/google_cloud_storage1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.gcs_config import GCSConfig +from typing import Optional, Set +from typing_extensions import Self + +class GoogleCloudStorage1(BaseModel): + """ + GoogleCloudStorage1 + """ # noqa: E501 + config: Optional[GCSConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleCloudStorage1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleCloudStorage1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": GCSConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/google_cloud_storage2.py b/vectorize_client/models/google_cloud_storage2.py new file mode 100644 index 0000000..6683081 --- /dev/null +++ b/vectorize_client/models/google_cloud_storage2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class GoogleCloudStorage2(BaseModel): + """ + GoogleCloudStorage2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"GCS\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['GCS']): + raise ValueError("must be one of enum values ('GCS')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleCloudStorage2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleCloudStorage2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/google_drive.py b/vectorize_client/models/google_drive.py new file mode 100644 index 0000000..3299c66 --- /dev/null +++ b/vectorize_client/models/google_drive.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.googledrive_config import GOOGLEDRIVEConfig +from typing import Optional, Set +from typing_extensions import Self + +class GoogleDrive(BaseModel): + """ + GoogleDrive + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"GOOGLE_DRIVE\")") + config: GOOGLEDRIVEConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['GOOGLE_DRIVE']): + raise ValueError("must be one of enum values ('GOOGLE_DRIVE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleDrive from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleDrive from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": GOOGLEDRIVEConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/google_drive1.py b/vectorize_client/models/google_drive1.py new file mode 100644 index 0000000..1f53c19 --- /dev/null +++ b/vectorize_client/models/google_drive1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.googledrive_config import GOOGLEDRIVEConfig +from typing import Optional, Set +from typing_extensions import Self + +class GoogleDrive1(BaseModel): + """ + GoogleDrive1 + """ # noqa: E501 + config: Optional[GOOGLEDRIVEConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleDrive1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleDrive1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": GOOGLEDRIVEConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/google_drive2.py b/vectorize_client/models/google_drive2.py new file mode 100644 index 0000000..466c714 --- /dev/null +++ b/vectorize_client/models/google_drive2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class GoogleDrive2(BaseModel): + """ + GoogleDrive2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"GOOGLE_DRIVE\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['GOOGLE_DRIVE']): + raise ValueError("must be one of enum values ('GOOGLE_DRIVE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleDrive2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleDrive2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/google_drive_o_auth.py b/vectorize_client/models/google_drive_o_auth.py new file mode 100644 index 0000000..66adf76 --- /dev/null +++ b/vectorize_client/models/google_drive_o_auth.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.googledriveoauth_config import GOOGLEDRIVEOAUTHConfig +from typing import Optional, Set +from typing_extensions import Self + +class GoogleDriveOAuth(BaseModel): + """ + GoogleDriveOAuth + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"GOOGLE_DRIVE_OAUTH\")") + config: GOOGLEDRIVEOAUTHConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['GOOGLE_DRIVE_OAUTH']): + raise ValueError("must be one of enum values ('GOOGLE_DRIVE_OAUTH')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleDriveOAuth from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleDriveOAuth from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": GOOGLEDRIVEOAUTHConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/google_drive_o_auth1.py b/vectorize_client/models/google_drive_o_auth1.py new file mode 100644 index 0000000..cd49810 --- /dev/null +++ b/vectorize_client/models/google_drive_o_auth1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.googledriveoauth_config import GOOGLEDRIVEOAUTHConfig +from typing import Optional, Set +from typing_extensions import Self + +class GoogleDriveOAuth1(BaseModel): + """ + GoogleDriveOAuth1 + """ # noqa: E501 + config: Optional[GOOGLEDRIVEOAUTHConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleDriveOAuth1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleDriveOAuth1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": GOOGLEDRIVEOAUTHConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/google_drive_o_auth2.py b/vectorize_client/models/google_drive_o_auth2.py new file mode 100644 index 0000000..73a8a22 --- /dev/null +++ b/vectorize_client/models/google_drive_o_auth2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class GoogleDriveOAuth2(BaseModel): + """ + GoogleDriveOAuth2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"GOOGLE_DRIVE_OAUTH\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['GOOGLE_DRIVE_OAUTH']): + raise ValueError("must be one of enum values ('GOOGLE_DRIVE_OAUTH')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleDriveOAuth2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleDriveOAuth2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/google_drive_oauth_multi.py b/vectorize_client/models/google_drive_oauth_multi.py new file mode 100644 index 0000000..10282e2 --- /dev/null +++ b/vectorize_client/models/google_drive_oauth_multi.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.googledriveoauthmulti_config import GOOGLEDRIVEOAUTHMULTIConfig +from typing import Optional, Set +from typing_extensions import Self + +class GoogleDriveOauthMulti(BaseModel): + """ + GoogleDriveOauthMulti + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"GOOGLE_DRIVE_OAUTH_MULTI\")") + config: GOOGLEDRIVEOAUTHMULTIConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['GOOGLE_DRIVE_OAUTH_MULTI']): + raise ValueError("must be one of enum values ('GOOGLE_DRIVE_OAUTH_MULTI')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleDriveOauthMulti from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleDriveOauthMulti from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": GOOGLEDRIVEOAUTHMULTIConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/google_drive_oauth_multi1.py b/vectorize_client/models/google_drive_oauth_multi1.py new file mode 100644 index 0000000..ce61d22 --- /dev/null +++ b/vectorize_client/models/google_drive_oauth_multi1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.googledriveoauthmulti_config import GOOGLEDRIVEOAUTHMULTIConfig +from typing import Optional, Set +from typing_extensions import Self + +class GoogleDriveOauthMulti1(BaseModel): + """ + GoogleDriveOauthMulti1 + """ # noqa: E501 + config: Optional[GOOGLEDRIVEOAUTHMULTIConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleDriveOauthMulti1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleDriveOauthMulti1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": GOOGLEDRIVEOAUTHMULTIConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/google_drive_oauth_multi2.py b/vectorize_client/models/google_drive_oauth_multi2.py new file mode 100644 index 0000000..17403e8 --- /dev/null +++ b/vectorize_client/models/google_drive_oauth_multi2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class GoogleDriveOauthMulti2(BaseModel): + """ + GoogleDriveOauthMulti2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"GOOGLE_DRIVE_OAUTH_MULTI\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['GOOGLE_DRIVE_OAUTH_MULTI']): + raise ValueError("must be one of enum values ('GOOGLE_DRIVE_OAUTH_MULTI')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleDriveOauthMulti2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleDriveOauthMulti2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/google_drive_oauth_multi_custom.py b/vectorize_client/models/google_drive_oauth_multi_custom.py new file mode 100644 index 0000000..59751dd --- /dev/null +++ b/vectorize_client/models/google_drive_oauth_multi_custom.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.googledriveoauthmulticustom_config import GOOGLEDRIVEOAUTHMULTICUSTOMConfig +from typing import Optional, Set +from typing_extensions import Self + +class GoogleDriveOauthMultiCustom(BaseModel): + """ + GoogleDriveOauthMultiCustom + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM\")") + config: GOOGLEDRIVEOAUTHMULTICUSTOMConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM']): + raise ValueError("must be one of enum values ('GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleDriveOauthMultiCustom from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleDriveOauthMultiCustom from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": GOOGLEDRIVEOAUTHMULTICUSTOMConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/google_drive_oauth_multi_custom1.py b/vectorize_client/models/google_drive_oauth_multi_custom1.py new file mode 100644 index 0000000..d15d4e1 --- /dev/null +++ b/vectorize_client/models/google_drive_oauth_multi_custom1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.googledriveoauthmulticustom_config import GOOGLEDRIVEOAUTHMULTICUSTOMConfig +from typing import Optional, Set +from typing_extensions import Self + +class GoogleDriveOauthMultiCustom1(BaseModel): + """ + GoogleDriveOauthMultiCustom1 + """ # noqa: E501 + config: Optional[GOOGLEDRIVEOAUTHMULTICUSTOMConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleDriveOauthMultiCustom1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleDriveOauthMultiCustom1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": GOOGLEDRIVEOAUTHMULTICUSTOMConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/google_drive_oauth_multi_custom2.py b/vectorize_client/models/google_drive_oauth_multi_custom2.py new file mode 100644 index 0000000..2e65808 --- /dev/null +++ b/vectorize_client/models/google_drive_oauth_multi_custom2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class GoogleDriveOauthMultiCustom2(BaseModel): + """ + GoogleDriveOauthMultiCustom2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM']): + raise ValueError("must be one of enum values ('GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GoogleDriveOauthMultiCustom2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GoogleDriveOauthMultiCustom2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/googledrive_auth_config.py b/vectorize_client/models/googledrive_auth_config.py new file mode 100644 index 0000000..395c88a --- /dev/null +++ b/vectorize_client/models/googledrive_auth_config.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class GOOGLEDRIVEAuthConfig(BaseModel): + """ + Authentication configuration for Google Drive (Service Account) + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + service_account_json: StrictStr = Field(description="Service Account JSON. Example: Enter the JSON key file for the service account", alias="service-account-json") + __properties: ClassVar[List[str]] = ["name", "service-account-json"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GOOGLEDRIVEAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GOOGLEDRIVEAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "service-account-json": obj.get("service-account-json") + }) + return _obj + + diff --git a/vectorize_client/models/googledrive_config.py b/vectorize_client/models/googledrive_config.py new file mode 100644 index 0000000..e67f591 --- /dev/null +++ b/vectorize_client/models/googledrive_config.py @@ -0,0 +1,110 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class GOOGLEDRIVEConfig(BaseModel): + """ + Configuration for Google Drive (Service Account) connector + """ # noqa: E501 + file_extensions: List[StrictStr] = Field(description="File Extensions", alias="file-extensions") + root_parents: Optional[Annotated[str, Field(strict=True)]] = Field(default=None, description="Restrict ingest to these folder URLs (optional). Example: Enter Folder URLs. Example: https://drive.google.com/drive/folders/1234aBCd5678_eFgH9012iJKL3456opqr", alias="root-parents") + idle_time: Optional[Union[StrictFloat, StrictInt]] = Field(default=5, description="Polling Interval (seconds). Example: Enter polling interval in seconds", alias="idle-time") + __properties: ClassVar[List[str]] = ["file-extensions", "root-parents", "idle-time"] + + @field_validator('file_extensions') + def file_extensions_validate_enum(cls, value): + """Validates the enum""" + for i in value: + if i not in set([]): + raise ValueError("each list item must be one of ()") + return value + + @field_validator('root_parents') + def root_parents_validate_regular_expression(cls, value): + """Validates the regular expression""" + if value is None: + return value + + if not re.match(r"^https:\/\/drive\.google\.com\/drive(\/u\/\d+)?\/folders\/[a-zA-Z0-9_-]+(\?.*)?$", value): + raise ValueError(r"must validate the regular expression /^https:\/\/drive\.google\.com\/drive(\/u\/\d+)?\/folders\/[a-zA-Z0-9_-]+(\?.*)?$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GOOGLEDRIVEConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GOOGLEDRIVEConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "file-extensions": obj.get("file-extensions"), + "root-parents": obj.get("root-parents"), + "idle-time": obj.get("idle-time") if obj.get("idle-time") is not None else 5 + }) + return _obj + + diff --git a/vectorize_client/models/googledriveoauth_auth_config.py b/vectorize_client/models/googledriveoauth_auth_config.py new file mode 100644 index 0000000..3071007 --- /dev/null +++ b/vectorize_client/models/googledriveoauth_auth_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class GOOGLEDRIVEOAUTHAuthConfig(BaseModel): + """ + Authentication configuration for Google Drive OAuth + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + authorized_user: Optional[StrictStr] = Field(default=None, description="Authorized User", alias="authorized-user") + selection_details: StrictStr = Field(description="Connect Google Drive to Vectorize. Example: Authorize", alias="selection-details") + edited_users: Optional[StrictStr] = Field(default=None, alias="editedUsers") + reconnect_users: Optional[StrictStr] = Field(default=None, alias="reconnectUsers") + __properties: ClassVar[List[str]] = ["name", "authorized-user", "selection-details", "editedUsers", "reconnectUsers"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GOOGLEDRIVEOAUTHAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GOOGLEDRIVEOAUTHAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "authorized-user": obj.get("authorized-user"), + "selection-details": obj.get("selection-details"), + "editedUsers": obj.get("editedUsers"), + "reconnectUsers": obj.get("reconnectUsers") + }) + return _obj + + diff --git a/vectorize_client/models/googledriveoauth_config.py b/vectorize_client/models/googledriveoauth_config.py new file mode 100644 index 0000000..cd8d18e --- /dev/null +++ b/vectorize_client/models/googledriveoauth_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing import Optional, Set +from typing_extensions import Self + +class GOOGLEDRIVEOAUTHConfig(BaseModel): + """ + Configuration for Google Drive OAuth connector + """ # noqa: E501 + file_extensions: List[StrictStr] = Field(description="File Extensions", alias="file-extensions") + idle_time: Optional[Union[StrictFloat, StrictInt]] = Field(default=5, description="Polling Interval (seconds). Example: Enter polling interval in seconds", alias="idle-time") + __properties: ClassVar[List[str]] = ["file-extensions", "idle-time"] + + @field_validator('file_extensions') + def file_extensions_validate_enum(cls, value): + """Validates the enum""" + for i in value: + if i not in set([]): + raise ValueError("each list item must be one of ()") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GOOGLEDRIVEOAUTHConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GOOGLEDRIVEOAUTHConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "file-extensions": obj.get("file-extensions"), + "idle-time": obj.get("idle-time") if obj.get("idle-time") is not None else 5 + }) + return _obj + + diff --git a/vectorize_client/models/googledriveoauthmulti_auth_config.py b/vectorize_client/models/googledriveoauthmulti_auth_config.py new file mode 100644 index 0000000..7cb05a4 --- /dev/null +++ b/vectorize_client/models/googledriveoauthmulti_auth_config.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class GOOGLEDRIVEOAUTHMULTIAuthConfig(BaseModel): + """ + Authentication configuration for Google Drive Multi-User (Vectorize) + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + authorized_users: Optional[StrictStr] = Field(default=None, description="Authorized Users", alias="authorized-users") + edited_users: Optional[StrictStr] = Field(default=None, alias="editedUsers") + deleted_users: Optional[StrictStr] = Field(default=None, alias="deletedUsers") + __properties: ClassVar[List[str]] = ["name", "authorized-users", "editedUsers", "deletedUsers"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GOOGLEDRIVEOAUTHMULTIAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GOOGLEDRIVEOAUTHMULTIAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "authorized-users": obj.get("authorized-users"), + "editedUsers": obj.get("editedUsers"), + "deletedUsers": obj.get("deletedUsers") + }) + return _obj + + diff --git a/vectorize_client/models/googledriveoauthmulti_config.py b/vectorize_client/models/googledriveoauthmulti_config.py new file mode 100644 index 0000000..35e93a1 --- /dev/null +++ b/vectorize_client/models/googledriveoauthmulti_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing import Optional, Set +from typing_extensions import Self + +class GOOGLEDRIVEOAUTHMULTIConfig(BaseModel): + """ + Configuration for Google Drive Multi-User (Vectorize) connector + """ # noqa: E501 + file_extensions: List[StrictStr] = Field(description="File Extensions", alias="file-extensions") + idle_time: Optional[Union[StrictFloat, StrictInt]] = Field(default=5, description="Polling Interval (seconds). Example: Enter polling interval in seconds", alias="idle-time") + __properties: ClassVar[List[str]] = ["file-extensions", "idle-time"] + + @field_validator('file_extensions') + def file_extensions_validate_enum(cls, value): + """Validates the enum""" + for i in value: + if i not in set([]): + raise ValueError("each list item must be one of ()") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GOOGLEDRIVEOAUTHMULTIConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GOOGLEDRIVEOAUTHMULTIConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "file-extensions": obj.get("file-extensions"), + "idle-time": obj.get("idle-time") if obj.get("idle-time") is not None else 5 + }) + return _obj + + diff --git a/vectorize_client/models/googledriveoauthmulticustom_auth_config.py b/vectorize_client/models/googledriveoauthmulticustom_auth_config.py new file mode 100644 index 0000000..1176f01 --- /dev/null +++ b/vectorize_client/models/googledriveoauthmulticustom_auth_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig(BaseModel): + """ + Authentication configuration for Google Drive Multi-User (White Label) + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + oauth2_client_id: StrictStr = Field(description="OAuth2 Client Id. Example: Enter Client Id", alias="oauth2-client-id") + oauth2_client_secret: StrictStr = Field(description="OAuth2 Client Secret. Example: Enter Client Secret", alias="oauth2-client-secret") + authorized_users: Optional[StrictStr] = Field(default=None, description="Authorized Users", alias="authorized-users") + edited_users: Optional[StrictStr] = Field(default=None, alias="editedUsers") + deleted_users: Optional[StrictStr] = Field(default=None, alias="deletedUsers") + __properties: ClassVar[List[str]] = ["name", "oauth2-client-id", "oauth2-client-secret", "authorized-users", "editedUsers", "deletedUsers"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GOOGLEDRIVEOAUTHMULTICUSTOMAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "oauth2-client-id": obj.get("oauth2-client-id"), + "oauth2-client-secret": obj.get("oauth2-client-secret"), + "authorized-users": obj.get("authorized-users"), + "editedUsers": obj.get("editedUsers"), + "deletedUsers": obj.get("deletedUsers") + }) + return _obj + + diff --git a/vectorize_client/models/googledriveoauthmulticustom_config.py b/vectorize_client/models/googledriveoauthmulticustom_config.py new file mode 100644 index 0000000..bb96eca --- /dev/null +++ b/vectorize_client/models/googledriveoauthmulticustom_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing import Optional, Set +from typing_extensions import Self + +class GOOGLEDRIVEOAUTHMULTICUSTOMConfig(BaseModel): + """ + Configuration for Google Drive Multi-User (White Label) connector + """ # noqa: E501 + file_extensions: List[StrictStr] = Field(description="File Extensions", alias="file-extensions") + idle_time: Optional[Union[StrictFloat, StrictInt]] = Field(default=5, description="Polling Interval (seconds). Example: Enter polling interval in seconds", alias="idle-time") + __properties: ClassVar[List[str]] = ["file-extensions", "idle-time"] + + @field_validator('file_extensions') + def file_extensions_validate_enum(cls, value): + """Validates the enum""" + for i in value: + if i not in set([]): + raise ValueError("each list item must be one of ()") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GOOGLEDRIVEOAUTHMULTICUSTOMConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GOOGLEDRIVEOAUTHMULTICUSTOMConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "file-extensions": obj.get("file-extensions"), + "idle-time": obj.get("idle-time") if obj.get("idle-time") is not None else 5 + }) + return _obj + + diff --git a/vectorize_client/models/intercom.py b/vectorize_client/models/intercom.py new file mode 100644 index 0000000..802bec8 --- /dev/null +++ b/vectorize_client/models/intercom.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.intercom_config import INTERCOMConfig +from typing import Optional, Set +from typing_extensions import Self + +class Intercom(BaseModel): + """ + Intercom + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"INTERCOM\")") + config: INTERCOMConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['INTERCOM']): + raise ValueError("must be one of enum values ('INTERCOM')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Intercom from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Intercom from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": INTERCOMConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/intercom1.py b/vectorize_client/models/intercom1.py new file mode 100644 index 0000000..9d43223 --- /dev/null +++ b/vectorize_client/models/intercom1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.intercom_config import INTERCOMConfig +from typing import Optional, Set +from typing_extensions import Self + +class Intercom1(BaseModel): + """ + Intercom1 + """ # noqa: E501 + config: Optional[INTERCOMConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Intercom1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Intercom1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": INTERCOMConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/intercom2.py b/vectorize_client/models/intercom2.py new file mode 100644 index 0000000..4d0f3ce --- /dev/null +++ b/vectorize_client/models/intercom2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Intercom2(BaseModel): + """ + Intercom2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"INTERCOM\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['INTERCOM']): + raise ValueError("must be one of enum values ('INTERCOM')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Intercom2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Intercom2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/intercom_auth_config.py b/vectorize_client/models/intercom_auth_config.py new file mode 100644 index 0000000..c4c2942 --- /dev/null +++ b/vectorize_client/models/intercom_auth_config.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class INTERCOMAuthConfig(BaseModel): + """ + Authentication configuration for Intercom + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + token: StrictStr = Field(description="Access Token. Example: Authorize Intercom Access") + __properties: ClassVar[List[str]] = ["name", "token"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of INTERCOMAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of INTERCOMAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "token": obj.get("token") + }) + return _obj + + diff --git a/vectorize_client/models/intercom_config.py b/vectorize_client/models/intercom_config.py new file mode 100644 index 0000000..67aabc6 --- /dev/null +++ b/vectorize_client/models/intercom_config.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import date +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class INTERCOMConfig(BaseModel): + """ + Configuration for Intercom connector + """ # noqa: E501 + created_at: date = Field(description="Created After. Filter for conversation created after this date. Example: Enter a date: Example 2012-12-31") + updated_at: Optional[date] = Field(default=None, description="Updated After. Filter for conversation updated after this date. Example: Enter a date: Example 2012-12-31") + state: Optional[List[StrictStr]] = Field(default=None, description="State") + __properties: ClassVar[List[str]] = ["created_at", "updated_at", "state"] + + @field_validator('state') + def state_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + for i in value: + if i not in set([]): + raise ValueError("each list item must be one of ()") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of INTERCOMConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of INTERCOMConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "created_at": obj.get("created_at"), + "updated_at": obj.get("updated_at"), + "state": obj.get("state") + }) + return _obj + + diff --git a/src/python/vectorize_client/models/metadata_extraction_strategy.py b/vectorize_client/models/metadata_extraction_strategy.py similarity index 100% rename from src/python/vectorize_client/models/metadata_extraction_strategy.py rename to vectorize_client/models/metadata_extraction_strategy.py diff --git a/src/python/vectorize_client/models/metadata_extraction_strategy_schema.py b/vectorize_client/models/metadata_extraction_strategy_schema.py similarity index 100% rename from src/python/vectorize_client/models/metadata_extraction_strategy_schema.py rename to vectorize_client/models/metadata_extraction_strategy_schema.py diff --git a/vectorize_client/models/milvus.py b/vectorize_client/models/milvus.py new file mode 100644 index 0000000..0f75efd --- /dev/null +++ b/vectorize_client/models/milvus.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.milvus_config import MILVUSConfig +from typing import Optional, Set +from typing_extensions import Self + +class Milvus(BaseModel): + """ + Milvus + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"MILVUS\")") + config: MILVUSConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['MILVUS']): + raise ValueError("must be one of enum values ('MILVUS')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Milvus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Milvus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": MILVUSConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/milvus1.py b/vectorize_client/models/milvus1.py new file mode 100644 index 0000000..5a42291 --- /dev/null +++ b/vectorize_client/models/milvus1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.milvus_config import MILVUSConfig +from typing import Optional, Set +from typing_extensions import Self + +class Milvus1(BaseModel): + """ + Milvus1 + """ # noqa: E501 + config: Optional[MILVUSConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Milvus1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Milvus1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": MILVUSConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/milvus2.py b/vectorize_client/models/milvus2.py new file mode 100644 index 0000000..f844503 --- /dev/null +++ b/vectorize_client/models/milvus2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Milvus2(BaseModel): + """ + Milvus2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"MILVUS\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['MILVUS']): + raise ValueError("must be one of enum values ('MILVUS')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Milvus2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Milvus2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/milvus_auth_config.py b/vectorize_client/models/milvus_auth_config.py new file mode 100644 index 0000000..bab5f71 --- /dev/null +++ b/vectorize_client/models/milvus_auth_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class MILVUSAuthConfig(BaseModel): + """ + Authentication configuration for Milvus + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for your Milvus integration") + url: StrictStr = Field(description="Public Endpoint. Example: Enter your public endpoint for your Milvus cluster") + token: Optional[StrictStr] = Field(default=None, description="Token. Example: Enter your cluster token or Username/Password") + username: Optional[StrictStr] = Field(default=None, description="Username. Example: Enter your cluster Username") + password: Optional[StrictStr] = Field(default=None, description="Password. Example: Enter your cluster Password") + __properties: ClassVar[List[str]] = ["name", "url", "token", "username", "password"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MILVUSAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MILVUSAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "url": obj.get("url"), + "token": obj.get("token"), + "username": obj.get("username"), + "password": obj.get("password") + }) + return _obj + + diff --git a/vectorize_client/models/milvus_config.py b/vectorize_client/models/milvus_config.py new file mode 100644 index 0000000..a9cee84 --- /dev/null +++ b/vectorize_client/models/milvus_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class MILVUSConfig(BaseModel): + """ + Configuration for Milvus connector + """ # noqa: E501 + collection: Annotated[str, Field(strict=True)] = Field(description="Collection Name. Example: Enter collection name") + __properties: ClassVar[List[str]] = ["collection"] + + @field_validator('collection') + def collection_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^[a-zA-Z][a-zA-Z0-9_]*$", value): + raise ValueError(r"must validate the regular expression /^[a-zA-Z][a-zA-Z0-9_]*$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MILVUSConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MILVUSConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "collection": obj.get("collection") + }) + return _obj + + diff --git a/src/python/vectorize_client/models/n8_n_config.py b/vectorize_client/models/n8_n_config.py similarity index 100% rename from src/python/vectorize_client/models/n8_n_config.py rename to vectorize_client/models/n8_n_config.py diff --git a/vectorize_client/models/notion.py b/vectorize_client/models/notion.py new file mode 100644 index 0000000..679c45b --- /dev/null +++ b/vectorize_client/models/notion.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.notion_config import NOTIONConfig +from typing import Optional, Set +from typing_extensions import Self + +class Notion(BaseModel): + """ + Notion + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"NOTION\")") + config: NOTIONConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['NOTION']): + raise ValueError("must be one of enum values ('NOTION')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Notion from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Notion from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": NOTIONConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/notion1.py b/vectorize_client/models/notion1.py new file mode 100644 index 0000000..9a3f0fe --- /dev/null +++ b/vectorize_client/models/notion1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.notion_config import NOTIONConfig +from typing import Optional, Set +from typing_extensions import Self + +class Notion1(BaseModel): + """ + Notion1 + """ # noqa: E501 + config: Optional[NOTIONConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Notion1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Notion1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": NOTIONConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/notion2.py b/vectorize_client/models/notion2.py new file mode 100644 index 0000000..99c77e6 --- /dev/null +++ b/vectorize_client/models/notion2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Notion2(BaseModel): + """ + Notion2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"NOTION\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['NOTION']): + raise ValueError("must be one of enum values ('NOTION')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Notion2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Notion2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/notion_auth_config.py b/vectorize_client/models/notion_auth_config.py new file mode 100644 index 0000000..3c6b859 --- /dev/null +++ b/vectorize_client/models/notion_auth_config.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class NOTIONAuthConfig(BaseModel): + """ + Authentication configuration for Notion + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + access_token: StrictStr = Field(description="Connect Notion to Vectorize - Note this will effect existing connections. test. Example: Authorize", alias="access-token") + s3id: Optional[StrictStr] = None + edited_token: Optional[StrictStr] = Field(default=None, alias="editedToken") + __properties: ClassVar[List[str]] = ["name", "access-token", "s3id", "editedToken"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NOTIONAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NOTIONAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "access-token": obj.get("access-token"), + "s3id": obj.get("s3id"), + "editedToken": obj.get("editedToken") + }) + return _obj + + diff --git a/vectorize_client/models/notion_config.py b/vectorize_client/models/notion_config.py new file mode 100644 index 0000000..6a7713a --- /dev/null +++ b/vectorize_client/models/notion_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class NOTIONConfig(BaseModel): + """ + Configuration for Notion connector + """ # noqa: E501 + select_resources: StrictStr = Field(description="Select Notion Resources", alias="select-resources") + database_ids: StrictStr = Field(description="Database IDs", alias="database-ids") + database_names: StrictStr = Field(description="Database Names", alias="database-names") + page_ids: StrictStr = Field(description="Page IDs", alias="page-ids") + page_names: StrictStr = Field(description="Page Names", alias="page-names") + __properties: ClassVar[List[str]] = ["select-resources", "database-ids", "database-names", "page-ids", "page-names"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NOTIONConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NOTIONConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "select-resources": obj.get("select-resources"), + "database-ids": obj.get("database-ids"), + "database-names": obj.get("database-names"), + "page-ids": obj.get("page-ids"), + "page-names": obj.get("page-names") + }) + return _obj + + diff --git a/vectorize_client/models/notion_oauth_multi.py b/vectorize_client/models/notion_oauth_multi.py new file mode 100644 index 0000000..85b3b38 --- /dev/null +++ b/vectorize_client/models/notion_oauth_multi.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.notionoauthmulti_auth_config import NOTIONOAUTHMULTIAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class NotionOauthMulti(BaseModel): + """ + NotionOauthMulti + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"NOTION_OAUTH_MULTI\")") + config: NOTIONOAUTHMULTIAuthConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['NOTION_OAUTH_MULTI']): + raise ValueError("must be one of enum values ('NOTION_OAUTH_MULTI')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NotionOauthMulti from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NotionOauthMulti from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": NOTIONOAUTHMULTIAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/notion_oauth_multi1.py b/vectorize_client/models/notion_oauth_multi1.py new file mode 100644 index 0000000..e8fa23b --- /dev/null +++ b/vectorize_client/models/notion_oauth_multi1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.notionoauthmulti_auth_config import NOTIONOAUTHMULTIAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class NotionOauthMulti1(BaseModel): + """ + NotionOauthMulti1 + """ # noqa: E501 + config: Optional[NOTIONOAUTHMULTIAuthConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NotionOauthMulti1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NotionOauthMulti1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": NOTIONOAUTHMULTIAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/notion_oauth_multi2.py b/vectorize_client/models/notion_oauth_multi2.py new file mode 100644 index 0000000..3c0111d --- /dev/null +++ b/vectorize_client/models/notion_oauth_multi2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class NotionOauthMulti2(BaseModel): + """ + NotionOauthMulti2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"NOTION_OAUTH_MULTI\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['NOTION_OAUTH_MULTI']): + raise ValueError("must be one of enum values ('NOTION_OAUTH_MULTI')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NotionOauthMulti2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NotionOauthMulti2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/notion_oauth_multi_custom.py b/vectorize_client/models/notion_oauth_multi_custom.py new file mode 100644 index 0000000..ed74204 --- /dev/null +++ b/vectorize_client/models/notion_oauth_multi_custom.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.notionoauthmulticustom_auth_config import NOTIONOAUTHMULTICUSTOMAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class NotionOauthMultiCustom(BaseModel): + """ + NotionOauthMultiCustom + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"NOTION_OAUTH_MULTI_CUSTOM\")") + config: NOTIONOAUTHMULTICUSTOMAuthConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['NOTION_OAUTH_MULTI_CUSTOM']): + raise ValueError("must be one of enum values ('NOTION_OAUTH_MULTI_CUSTOM')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NotionOauthMultiCustom from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NotionOauthMultiCustom from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": NOTIONOAUTHMULTICUSTOMAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/notion_oauth_multi_custom1.py b/vectorize_client/models/notion_oauth_multi_custom1.py new file mode 100644 index 0000000..1ceb8a7 --- /dev/null +++ b/vectorize_client/models/notion_oauth_multi_custom1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.notionoauthmulticustom_auth_config import NOTIONOAUTHMULTICUSTOMAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class NotionOauthMultiCustom1(BaseModel): + """ + NotionOauthMultiCustom1 + """ # noqa: E501 + config: Optional[NOTIONOAUTHMULTICUSTOMAuthConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NotionOauthMultiCustom1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NotionOauthMultiCustom1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": NOTIONOAUTHMULTICUSTOMAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/notion_oauth_multi_custom2.py b/vectorize_client/models/notion_oauth_multi_custom2.py new file mode 100644 index 0000000..2ffb755 --- /dev/null +++ b/vectorize_client/models/notion_oauth_multi_custom2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class NotionOauthMultiCustom2(BaseModel): + """ + NotionOauthMultiCustom2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"NOTION_OAUTH_MULTI_CUSTOM\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['NOTION_OAUTH_MULTI_CUSTOM']): + raise ValueError("must be one of enum values ('NOTION_OAUTH_MULTI_CUSTOM')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NotionOauthMultiCustom2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NotionOauthMultiCustom2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/notionoauthmulti_auth_config.py b/vectorize_client/models/notionoauthmulti_auth_config.py new file mode 100644 index 0000000..6e3b554 --- /dev/null +++ b/vectorize_client/models/notionoauthmulti_auth_config.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class NOTIONOAUTHMULTIAuthConfig(BaseModel): + """ + Authentication configuration for Notion Multi-User (Vectorize) + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + authorized_users: Optional[StrictStr] = Field(default=None, description="Authorized Users. Users who have authorized access to their Notion content", alias="authorized-users") + edited_users: Optional[StrictStr] = Field(default=None, alias="editedUsers") + deleted_users: Optional[StrictStr] = Field(default=None, alias="deletedUsers") + __properties: ClassVar[List[str]] = ["name", "authorized-users", "editedUsers", "deletedUsers"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NOTIONOAUTHMULTIAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NOTIONOAUTHMULTIAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "authorized-users": obj.get("authorized-users"), + "editedUsers": obj.get("editedUsers"), + "deletedUsers": obj.get("deletedUsers") + }) + return _obj + + diff --git a/vectorize_client/models/notionoauthmulticustom_auth_config.py b/vectorize_client/models/notionoauthmulticustom_auth_config.py new file mode 100644 index 0000000..09552ff --- /dev/null +++ b/vectorize_client/models/notionoauthmulticustom_auth_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class NOTIONOAUTHMULTICUSTOMAuthConfig(BaseModel): + """ + Authentication configuration for Notion Multi-User (White Label) + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + client_id: StrictStr = Field(description="Notion Client ID. Example: Enter Client ID", alias="client-id") + client_secret: StrictStr = Field(description="Notion Client Secret. Example: Enter Client Secret", alias="client-secret") + authorized_users: Optional[StrictStr] = Field(default=None, description="Authorized Users", alias="authorized-users") + edited_users: Optional[StrictStr] = Field(default=None, alias="editedUsers") + deleted_users: Optional[StrictStr] = Field(default=None, alias="deletedUsers") + __properties: ClassVar[List[str]] = ["name", "client-id", "client-secret", "authorized-users", "editedUsers", "deletedUsers"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NOTIONOAUTHMULTICUSTOMAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NOTIONOAUTHMULTICUSTOMAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "client-id": obj.get("client-id"), + "client-secret": obj.get("client-secret"), + "authorized-users": obj.get("authorized-users"), + "editedUsers": obj.get("editedUsers"), + "deletedUsers": obj.get("deletedUsers") + }) + return _obj + + diff --git a/vectorize_client/models/one_drive.py b/vectorize_client/models/one_drive.py new file mode 100644 index 0000000..d76c776 --- /dev/null +++ b/vectorize_client/models/one_drive.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.onedrive_config import ONEDRIVEConfig +from typing import Optional, Set +from typing_extensions import Self + +class OneDrive(BaseModel): + """ + OneDrive + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"ONE_DRIVE\")") + config: ONEDRIVEConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['ONE_DRIVE']): + raise ValueError("must be one of enum values ('ONE_DRIVE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of OneDrive from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of OneDrive from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": ONEDRIVEConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/one_drive1.py b/vectorize_client/models/one_drive1.py new file mode 100644 index 0000000..ff1dcc6 --- /dev/null +++ b/vectorize_client/models/one_drive1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.onedrive_config import ONEDRIVEConfig +from typing import Optional, Set +from typing_extensions import Self + +class OneDrive1(BaseModel): + """ + OneDrive1 + """ # noqa: E501 + config: Optional[ONEDRIVEConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of OneDrive1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of OneDrive1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": ONEDRIVEConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/one_drive2.py b/vectorize_client/models/one_drive2.py new file mode 100644 index 0000000..49c35fe --- /dev/null +++ b/vectorize_client/models/one_drive2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class OneDrive2(BaseModel): + """ + OneDrive2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"ONE_DRIVE\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['ONE_DRIVE']): + raise ValueError("must be one of enum values ('ONE_DRIVE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of OneDrive2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of OneDrive2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/onedrive_auth_config.py b/vectorize_client/models/onedrive_auth_config.py new file mode 100644 index 0000000..a3689fe --- /dev/null +++ b/vectorize_client/models/onedrive_auth_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class ONEDRIVEAuthConfig(BaseModel): + """ + Authentication configuration for OneDrive + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + ms_client_id: StrictStr = Field(description="Client Id. Example: Enter Client Id", alias="ms-client-id") + ms_tenant_id: StrictStr = Field(description="Tenant Id. Example: Enter Tenant Id", alias="ms-tenant-id") + ms_client_secret: StrictStr = Field(description="Client Secret. Example: Enter Client Secret", alias="ms-client-secret") + users: StrictStr = Field(description="Users. Example: Enter users emails to import files from. Example: developer@vectorize.io") + __properties: ClassVar[List[str]] = ["name", "ms-client-id", "ms-tenant-id", "ms-client-secret", "users"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ONEDRIVEAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ONEDRIVEAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "ms-client-id": obj.get("ms-client-id"), + "ms-tenant-id": obj.get("ms-tenant-id"), + "ms-client-secret": obj.get("ms-client-secret"), + "users": obj.get("users") + }) + return _obj + + diff --git a/vectorize_client/models/onedrive_config.py b/vectorize_client/models/onedrive_config.py new file mode 100644 index 0000000..3a04ff2 --- /dev/null +++ b/vectorize_client/models/onedrive_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ONEDRIVEConfig(BaseModel): + """ + Configuration for OneDrive connector + """ # noqa: E501 + file_extensions: List[StrictStr] = Field(description="File Extensions", alias="file-extensions") + path_prefix: Optional[StrictStr] = Field(default=None, description="Read starting from this folder (optional). Example: Enter Folder path: /exampleFolder/subFolder", alias="path-prefix") + __properties: ClassVar[List[str]] = ["file-extensions", "path-prefix"] + + @field_validator('file_extensions') + def file_extensions_validate_enum(cls, value): + """Validates the enum""" + for i in value: + if i not in set([]): + raise ValueError("each list item must be one of ()") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ONEDRIVEConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ONEDRIVEConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "file-extensions": obj.get("file-extensions"), + "path-prefix": obj.get("path-prefix") + }) + return _obj + + diff --git a/vectorize_client/models/openai.py b/vectorize_client/models/openai.py new file mode 100644 index 0000000..2fb58ae --- /dev/null +++ b/vectorize_client/models/openai.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.openai_auth_config import OPENAIAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class Openai(BaseModel): + """ + Openai + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"OPENAI\")") + config: OPENAIAuthConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['OPENAI']): + raise ValueError("must be one of enum values ('OPENAI')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Openai from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Openai from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": OPENAIAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/openai1.py b/vectorize_client/models/openai1.py new file mode 100644 index 0000000..ba6712f --- /dev/null +++ b/vectorize_client/models/openai1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.openai_auth_config import OPENAIAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class Openai1(BaseModel): + """ + Openai1 + """ # noqa: E501 + config: Optional[OPENAIAuthConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Openai1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Openai1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": OPENAIAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/openai2.py b/vectorize_client/models/openai2.py new file mode 100644 index 0000000..ce58850 --- /dev/null +++ b/vectorize_client/models/openai2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Openai2(BaseModel): + """ + Openai2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"OPENAI\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['OPENAI']): + raise ValueError("must be one of enum values ('OPENAI')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Openai2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Openai2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/openai_auth_config.py b/vectorize_client/models/openai_auth_config.py new file mode 100644 index 0000000..6143b68 --- /dev/null +++ b/vectorize_client/models/openai_auth_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class OPENAIAuthConfig(BaseModel): + """ + Authentication configuration for OpenAI + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for your OpenAI integration") + key: Annotated[str, Field(strict=True)] = Field(description="API Key. Example: Enter your OpenAI API Key") + __properties: ClassVar[List[str]] = ["name", "key"] + + @field_validator('key') + def key_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of OPENAIAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of OPENAIAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "key": obj.get("key") + }) + return _obj + + diff --git a/vectorize_client/models/pinecone.py b/vectorize_client/models/pinecone.py new file mode 100644 index 0000000..421c307 --- /dev/null +++ b/vectorize_client/models/pinecone.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.pinecone_config import PINECONEConfig +from typing import Optional, Set +from typing_extensions import Self + +class Pinecone(BaseModel): + """ + Pinecone + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"PINECONE\")") + config: PINECONEConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['PINECONE']): + raise ValueError("must be one of enum values ('PINECONE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Pinecone from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Pinecone from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": PINECONEConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/pinecone1.py b/vectorize_client/models/pinecone1.py new file mode 100644 index 0000000..39c4e84 --- /dev/null +++ b/vectorize_client/models/pinecone1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.pinecone_config import PINECONEConfig +from typing import Optional, Set +from typing_extensions import Self + +class Pinecone1(BaseModel): + """ + Pinecone1 + """ # noqa: E501 + config: Optional[PINECONEConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Pinecone1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Pinecone1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": PINECONEConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/pinecone2.py b/vectorize_client/models/pinecone2.py new file mode 100644 index 0000000..4a1bc54 --- /dev/null +++ b/vectorize_client/models/pinecone2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Pinecone2(BaseModel): + """ + Pinecone2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"PINECONE\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['PINECONE']): + raise ValueError("must be one of enum values ('PINECONE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Pinecone2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Pinecone2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/pinecone_auth_config.py b/vectorize_client/models/pinecone_auth_config.py new file mode 100644 index 0000000..1cc8043 --- /dev/null +++ b/vectorize_client/models/pinecone_auth_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class PINECONEAuthConfig(BaseModel): + """ + Authentication configuration for Pinecone + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for your Pinecone integration") + api_key: Annotated[str, Field(strict=True)] = Field(description="API Key. Example: Enter your API Key", alias="api-key") + __properties: ClassVar[List[str]] = ["name", "api-key"] + + @field_validator('api_key') + def api_key_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PINECONEAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PINECONEAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "api-key": obj.get("api-key") + }) + return _obj + + diff --git a/vectorize_client/models/pinecone_config.py b/vectorize_client/models/pinecone_config.py new file mode 100644 index 0000000..ffbc3e0 --- /dev/null +++ b/vectorize_client/models/pinecone_config.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class PINECONEConfig(BaseModel): + """ + Configuration for Pinecone connector + """ # noqa: E501 + index: Annotated[str, Field(strict=True, max_length=45)] = Field(description="Index Name. Example: Enter index name") + namespace: Optional[Annotated[str, Field(strict=True, max_length=45)]] = Field(default=None, description="Namespace. Example: Enter namespace") + __properties: ClassVar[List[str]] = ["index", "namespace"] + + @field_validator('index') + def index_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^(?!.*--)(?!^-)(?!.*-$)[a-z0-9-]+$", value): + raise ValueError(r"must validate the regular expression /^(?!.*--)(?!^-)(?!.*-$)[a-z0-9-]+$/") + return value + + @field_validator('namespace') + def namespace_validate_regular_expression(cls, value): + """Validates the regular expression""" + if value is None: + return value + + if not re.match(r"^(?!.*--)(?!^-)(?!.*-$)[a-z0-9-]+$", value): + raise ValueError(r"must validate the regular expression /^(?!.*--)(?!^-)(?!.*-$)[a-z0-9-]+$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PINECONEConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PINECONEConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "index": obj.get("index"), + "namespace": obj.get("namespace") + }) + return _obj + + diff --git a/vectorize_client/models/pipeline_ai_platform_request_inner.py b/vectorize_client/models/pipeline_ai_platform_request_inner.py new file mode 100644 index 0000000..947cab1 --- /dev/null +++ b/vectorize_client/models/pipeline_ai_platform_request_inner.py @@ -0,0 +1,165 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from vectorize_client.models.bedrock2 import Bedrock2 +from vectorize_client.models.openai2 import Openai2 +from vectorize_client.models.vertex2 import Vertex2 +from vectorize_client.models.voyage2 import Voyage2 +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +PIPELINEAIPLATFORMREQUESTINNER_ONE_OF_SCHEMAS = ["Bedrock2", "Openai2", "Vertex2", "Voyage2"] + +class PipelineAIPlatformRequestInner(BaseModel): + """ + PipelineAIPlatformRequestInner + """ + # data type: Bedrock2 + oneof_schema_1_validator: Optional[Bedrock2] = None + # data type: Vertex2 + oneof_schema_2_validator: Optional[Vertex2] = None + # data type: Openai2 + oneof_schema_3_validator: Optional[Openai2] = None + # data type: Voyage2 + oneof_schema_4_validator: Optional[Voyage2] = None + actual_instance: Optional[Union[Bedrock2, Openai2, Vertex2, Voyage2]] = None + one_of_schemas: Set[str] = { "Bedrock2", "Openai2", "Vertex2", "Voyage2" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = PipelineAIPlatformRequestInner.model_construct() + error_messages = [] + match = 0 + # validate data type: Bedrock2 + if not isinstance(v, Bedrock2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Bedrock2`") + else: + match += 1 + # validate data type: Vertex2 + if not isinstance(v, Vertex2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Vertex2`") + else: + match += 1 + # validate data type: Openai2 + if not isinstance(v, Openai2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Openai2`") + else: + match += 1 + # validate data type: Voyage2 + if not isinstance(v, Voyage2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Voyage2`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in PipelineAIPlatformRequestInner with oneOf schemas: Bedrock2, Openai2, Vertex2, Voyage2. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in PipelineAIPlatformRequestInner with oneOf schemas: Bedrock2, Openai2, Vertex2, Voyage2. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into Bedrock2 + try: + instance.actual_instance = Bedrock2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Vertex2 + try: + instance.actual_instance = Vertex2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Openai2 + try: + instance.actual_instance = Openai2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Voyage2 + try: + instance.actual_instance = Voyage2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into PipelineAIPlatformRequestInner with oneOf schemas: Bedrock2, Openai2, Vertex2, Voyage2. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into PipelineAIPlatformRequestInner with oneOf schemas: Bedrock2, Openai2, Vertex2, Voyage2. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], Bedrock2, Openai2, Vertex2, Voyage2]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) + + diff --git a/src/python/vectorize_client/models/pipeline_configuration_schema.py b/vectorize_client/models/pipeline_configuration_schema.py similarity index 66% rename from src/python/vectorize_client/models/pipeline_configuration_schema.py rename to vectorize_client/models/pipeline_configuration_schema.py index 977c150..67b4674 100644 --- a/src/python/vectorize_client/models/pipeline_configuration_schema.py +++ b/vectorize_client/models/pipeline_configuration_schema.py @@ -20,10 +20,10 @@ from pydantic import BaseModel, ConfigDict, Field from typing import Any, ClassVar, Dict, List from typing_extensions import Annotated -from vectorize_client.models.ai_platform_schema import AIPlatformSchema -from vectorize_client.models.destination_connector_schema import DestinationConnectorSchema +from vectorize_client.models.pipeline_ai_platform_request_inner import PipelineAIPlatformRequestInner +from vectorize_client.models.pipeline_destination_connector_request_inner import PipelineDestinationConnectorRequestInner +from vectorize_client.models.pipeline_source_connector_request_inner import PipelineSourceConnectorRequestInner from vectorize_client.models.schedule_schema import ScheduleSchema -from vectorize_client.models.source_connector_schema import SourceConnectorSchema from typing import Optional, Set from typing_extensions import Self @@ -31,9 +31,9 @@ class PipelineConfigurationSchema(BaseModel): """ PipelineConfigurationSchema """ # noqa: E501 - source_connectors: Annotated[List[SourceConnectorSchema], Field(min_length=1)] = Field(alias="sourceConnectors") - destination_connector: DestinationConnectorSchema = Field(alias="destinationConnector") - ai_platform: AIPlatformSchema = Field(alias="aiPlatform") + source_connectors: Annotated[List[PipelineSourceConnectorRequestInner], Field(min_length=1)] = Field(alias="sourceConnectors") + destination_connector: Annotated[List[PipelineDestinationConnectorRequestInner], Field(min_length=1)] = Field(alias="destinationConnector") + ai_platform: Annotated[List[PipelineAIPlatformRequestInner], Field(min_length=1)] = Field(alias="aiPlatform") pipeline_name: Annotated[str, Field(min_length=1, strict=True)] = Field(alias="pipelineName") schedule: ScheduleSchema __properties: ClassVar[List[str]] = ["sourceConnectors", "destinationConnector", "aiPlatform", "pipelineName", "schedule"] @@ -84,12 +84,20 @@ def to_dict(self) -> Dict[str, Any]: if _item_source_connectors: _items.append(_item_source_connectors.to_dict()) _dict['sourceConnectors'] = _items - # override the default output from pydantic by calling `to_dict()` of destination_connector + # override the default output from pydantic by calling `to_dict()` of each item in destination_connector (list) + _items = [] if self.destination_connector: - _dict['destinationConnector'] = self.destination_connector.to_dict() - # override the default output from pydantic by calling `to_dict()` of ai_platform + for _item_destination_connector in self.destination_connector: + if _item_destination_connector: + _items.append(_item_destination_connector.to_dict()) + _dict['destinationConnector'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in ai_platform (list) + _items = [] if self.ai_platform: - _dict['aiPlatform'] = self.ai_platform.to_dict() + for _item_ai_platform in self.ai_platform: + if _item_ai_platform: + _items.append(_item_ai_platform.to_dict()) + _dict['aiPlatform'] = _items # override the default output from pydantic by calling `to_dict()` of schedule if self.schedule: _dict['schedule'] = self.schedule.to_dict() @@ -105,9 +113,9 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: return cls.model_validate(obj) _obj = cls.model_validate({ - "sourceConnectors": [SourceConnectorSchema.from_dict(_item) for _item in obj["sourceConnectors"]] if obj.get("sourceConnectors") is not None else None, - "destinationConnector": DestinationConnectorSchema.from_dict(obj["destinationConnector"]) if obj.get("destinationConnector") is not None else None, - "aiPlatform": AIPlatformSchema.from_dict(obj["aiPlatform"]) if obj.get("aiPlatform") is not None else None, + "sourceConnectors": [PipelineSourceConnectorRequestInner.from_dict(_item) for _item in obj["sourceConnectors"]] if obj.get("sourceConnectors") is not None else None, + "destinationConnector": [PipelineDestinationConnectorRequestInner.from_dict(_item) for _item in obj["destinationConnector"]] if obj.get("destinationConnector") is not None else None, + "aiPlatform": [PipelineAIPlatformRequestInner.from_dict(_item) for _item in obj["aiPlatform"]] if obj.get("aiPlatform") is not None else None, "pipelineName": obj.get("pipelineName"), "schedule": ScheduleSchema.from_dict(obj["schedule"]) if obj.get("schedule") is not None else None }) diff --git a/vectorize_client/models/pipeline_destination_connector_request_inner.py b/vectorize_client/models/pipeline_destination_connector_request_inner.py new file mode 100644 index 0000000..92f0c1f --- /dev/null +++ b/vectorize_client/models/pipeline_destination_connector_request_inner.py @@ -0,0 +1,277 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from vectorize_client.models.azureaisearch2 import Azureaisearch2 +from vectorize_client.models.capella2 import Capella2 +from vectorize_client.models.datastax2 import Datastax2 +from vectorize_client.models.elastic2 import Elastic2 +from vectorize_client.models.milvus2 import Milvus2 +from vectorize_client.models.pinecone2 import Pinecone2 +from vectorize_client.models.postgresql2 import Postgresql2 +from vectorize_client.models.qdrant2 import Qdrant2 +from vectorize_client.models.singlestore2 import Singlestore2 +from vectorize_client.models.supabase2 import Supabase2 +from vectorize_client.models.turbopuffer2 import Turbopuffer2 +from vectorize_client.models.weaviate2 import Weaviate2 +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +PIPELINEDESTINATIONCONNECTORREQUESTINNER_ONE_OF_SCHEMAS = ["Azureaisearch2", "Capella2", "Datastax2", "Elastic2", "Milvus2", "Pinecone2", "Postgresql2", "Qdrant2", "Singlestore2", "Supabase2", "Turbopuffer2", "Weaviate2"] + +class PipelineDestinationConnectorRequestInner(BaseModel): + """ + PipelineDestinationConnectorRequestInner + """ + # data type: Capella2 + oneof_schema_1_validator: Optional[Capella2] = None + # data type: Datastax2 + oneof_schema_2_validator: Optional[Datastax2] = None + # data type: Elastic2 + oneof_schema_3_validator: Optional[Elastic2] = None + # data type: Pinecone2 + oneof_schema_4_validator: Optional[Pinecone2] = None + # data type: Singlestore2 + oneof_schema_5_validator: Optional[Singlestore2] = None + # data type: Milvus2 + oneof_schema_6_validator: Optional[Milvus2] = None + # data type: Postgresql2 + oneof_schema_7_validator: Optional[Postgresql2] = None + # data type: Qdrant2 + oneof_schema_8_validator: Optional[Qdrant2] = None + # data type: Supabase2 + oneof_schema_9_validator: Optional[Supabase2] = None + # data type: Weaviate2 + oneof_schema_10_validator: Optional[Weaviate2] = None + # data type: Azureaisearch2 + oneof_schema_11_validator: Optional[Azureaisearch2] = None + # data type: Turbopuffer2 + oneof_schema_12_validator: Optional[Turbopuffer2] = None + actual_instance: Optional[Union[Azureaisearch2, Capella2, Datastax2, Elastic2, Milvus2, Pinecone2, Postgresql2, Qdrant2, Singlestore2, Supabase2, Turbopuffer2, Weaviate2]] = None + one_of_schemas: Set[str] = { "Azureaisearch2", "Capella2", "Datastax2", "Elastic2", "Milvus2", "Pinecone2", "Postgresql2", "Qdrant2", "Singlestore2", "Supabase2", "Turbopuffer2", "Weaviate2" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = PipelineDestinationConnectorRequestInner.model_construct() + error_messages = [] + match = 0 + # validate data type: Capella2 + if not isinstance(v, Capella2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Capella2`") + else: + match += 1 + # validate data type: Datastax2 + if not isinstance(v, Datastax2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Datastax2`") + else: + match += 1 + # validate data type: Elastic2 + if not isinstance(v, Elastic2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Elastic2`") + else: + match += 1 + # validate data type: Pinecone2 + if not isinstance(v, Pinecone2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Pinecone2`") + else: + match += 1 + # validate data type: Singlestore2 + if not isinstance(v, Singlestore2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Singlestore2`") + else: + match += 1 + # validate data type: Milvus2 + if not isinstance(v, Milvus2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Milvus2`") + else: + match += 1 + # validate data type: Postgresql2 + if not isinstance(v, Postgresql2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Postgresql2`") + else: + match += 1 + # validate data type: Qdrant2 + if not isinstance(v, Qdrant2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Qdrant2`") + else: + match += 1 + # validate data type: Supabase2 + if not isinstance(v, Supabase2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Supabase2`") + else: + match += 1 + # validate data type: Weaviate2 + if not isinstance(v, Weaviate2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Weaviate2`") + else: + match += 1 + # validate data type: Azureaisearch2 + if not isinstance(v, Azureaisearch2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Azureaisearch2`") + else: + match += 1 + # validate data type: Turbopuffer2 + if not isinstance(v, Turbopuffer2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Turbopuffer2`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in PipelineDestinationConnectorRequestInner with oneOf schemas: Azureaisearch2, Capella2, Datastax2, Elastic2, Milvus2, Pinecone2, Postgresql2, Qdrant2, Singlestore2, Supabase2, Turbopuffer2, Weaviate2. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in PipelineDestinationConnectorRequestInner with oneOf schemas: Azureaisearch2, Capella2, Datastax2, Elastic2, Milvus2, Pinecone2, Postgresql2, Qdrant2, Singlestore2, Supabase2, Turbopuffer2, Weaviate2. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into Capella2 + try: + instance.actual_instance = Capella2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Datastax2 + try: + instance.actual_instance = Datastax2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Elastic2 + try: + instance.actual_instance = Elastic2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Pinecone2 + try: + instance.actual_instance = Pinecone2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Singlestore2 + try: + instance.actual_instance = Singlestore2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Milvus2 + try: + instance.actual_instance = Milvus2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Postgresql2 + try: + instance.actual_instance = Postgresql2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Qdrant2 + try: + instance.actual_instance = Qdrant2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Supabase2 + try: + instance.actual_instance = Supabase2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Weaviate2 + try: + instance.actual_instance = Weaviate2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Azureaisearch2 + try: + instance.actual_instance = Azureaisearch2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Turbopuffer2 + try: + instance.actual_instance = Turbopuffer2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into PipelineDestinationConnectorRequestInner with oneOf schemas: Azureaisearch2, Capella2, Datastax2, Elastic2, Milvus2, Pinecone2, Postgresql2, Qdrant2, Singlestore2, Supabase2, Turbopuffer2, Weaviate2. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into PipelineDestinationConnectorRequestInner with oneOf schemas: Azureaisearch2, Capella2, Datastax2, Elastic2, Milvus2, Pinecone2, Postgresql2, Qdrant2, Singlestore2, Supabase2, Turbopuffer2, Weaviate2. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], Azureaisearch2, Capella2, Datastax2, Elastic2, Milvus2, Pinecone2, Postgresql2, Qdrant2, Singlestore2, Supabase2, Turbopuffer2, Weaviate2]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) + + diff --git a/src/python/vectorize_client/models/pipeline_events.py b/vectorize_client/models/pipeline_events.py similarity index 100% rename from src/python/vectorize_client/models/pipeline_events.py rename to vectorize_client/models/pipeline_events.py diff --git a/src/python/vectorize_client/models/pipeline_list_summary.py b/vectorize_client/models/pipeline_list_summary.py similarity index 100% rename from src/python/vectorize_client/models/pipeline_list_summary.py rename to vectorize_client/models/pipeline_list_summary.py diff --git a/src/python/vectorize_client/models/pipeline_metrics.py b/vectorize_client/models/pipeline_metrics.py similarity index 100% rename from src/python/vectorize_client/models/pipeline_metrics.py rename to vectorize_client/models/pipeline_metrics.py diff --git a/vectorize_client/models/pipeline_source_connector_request_inner.py b/vectorize_client/models/pipeline_source_connector_request_inner.py new file mode 100644 index 0000000..4b9df13 --- /dev/null +++ b/vectorize_client/models/pipeline_source_connector_request_inner.py @@ -0,0 +1,445 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from vectorize_client.models.amazon_s32 import AmazonS32 +from vectorize_client.models.azure_blob_storage2 import AzureBlobStorage2 +from vectorize_client.models.confluence2 import Confluence2 +from vectorize_client.models.discord2 import Discord2 +from vectorize_client.models.dropbox2 import Dropbox2 +from vectorize_client.models.dropbox_oauth2 import DropboxOauth2 +from vectorize_client.models.dropbox_oauth_multi2 import DropboxOauthMulti2 +from vectorize_client.models.dropbox_oauth_multi_custom2 import DropboxOauthMultiCustom2 +from vectorize_client.models.file_upload2 import FileUpload2 +from vectorize_client.models.firecrawl2 import Firecrawl2 +from vectorize_client.models.fireflies2 import Fireflies2 +from vectorize_client.models.github2 import Github2 +from vectorize_client.models.google_cloud_storage2 import GoogleCloudStorage2 +from vectorize_client.models.google_drive2 import GoogleDrive2 +from vectorize_client.models.google_drive_o_auth2 import GoogleDriveOAuth2 +from vectorize_client.models.google_drive_oauth_multi2 import GoogleDriveOauthMulti2 +from vectorize_client.models.google_drive_oauth_multi_custom2 import GoogleDriveOauthMultiCustom2 +from vectorize_client.models.intercom2 import Intercom2 +from vectorize_client.models.notion2 import Notion2 +from vectorize_client.models.notion_oauth_multi2 import NotionOauthMulti2 +from vectorize_client.models.notion_oauth_multi_custom2 import NotionOauthMultiCustom2 +from vectorize_client.models.one_drive2 import OneDrive2 +from vectorize_client.models.sharepoint2 import Sharepoint2 +from vectorize_client.models.web_crawler2 import WebCrawler2 +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +PIPELINESOURCECONNECTORREQUESTINNER_ONE_OF_SCHEMAS = ["AmazonS32", "AzureBlobStorage2", "Confluence2", "Discord2", "Dropbox2", "DropboxOauth2", "DropboxOauthMulti2", "DropboxOauthMultiCustom2", "FileUpload2", "Firecrawl2", "Fireflies2", "Github2", "GoogleCloudStorage2", "GoogleDrive2", "GoogleDriveOAuth2", "GoogleDriveOauthMulti2", "GoogleDriveOauthMultiCustom2", "Intercom2", "Notion2", "NotionOauthMulti2", "NotionOauthMultiCustom2", "OneDrive2", "Sharepoint2", "WebCrawler2"] + +class PipelineSourceConnectorRequestInner(BaseModel): + """ + PipelineSourceConnectorRequestInner + """ + # data type: AmazonS32 + oneof_schema_1_validator: Optional[AmazonS32] = None + # data type: AzureBlobStorage2 + oneof_schema_2_validator: Optional[AzureBlobStorage2] = None + # data type: Confluence2 + oneof_schema_3_validator: Optional[Confluence2] = None + # data type: Discord2 + oneof_schema_4_validator: Optional[Discord2] = None + # data type: Dropbox2 + oneof_schema_5_validator: Optional[Dropbox2] = None + # data type: DropboxOauth2 + oneof_schema_6_validator: Optional[DropboxOauth2] = None + # data type: DropboxOauthMulti2 + oneof_schema_7_validator: Optional[DropboxOauthMulti2] = None + # data type: DropboxOauthMultiCustom2 + oneof_schema_8_validator: Optional[DropboxOauthMultiCustom2] = None + # data type: GoogleDriveOAuth2 + oneof_schema_9_validator: Optional[GoogleDriveOAuth2] = None + # data type: GoogleDrive2 + oneof_schema_10_validator: Optional[GoogleDrive2] = None + # data type: GoogleDriveOauthMulti2 + oneof_schema_11_validator: Optional[GoogleDriveOauthMulti2] = None + # data type: GoogleDriveOauthMultiCustom2 + oneof_schema_12_validator: Optional[GoogleDriveOauthMultiCustom2] = None + # data type: Firecrawl2 + oneof_schema_13_validator: Optional[Firecrawl2] = None + # data type: GoogleCloudStorage2 + oneof_schema_14_validator: Optional[GoogleCloudStorage2] = None + # data type: Intercom2 + oneof_schema_15_validator: Optional[Intercom2] = None + # data type: Notion2 + oneof_schema_16_validator: Optional[Notion2] = None + # data type: NotionOauthMulti2 + oneof_schema_17_validator: Optional[NotionOauthMulti2] = None + # data type: NotionOauthMultiCustom2 + oneof_schema_18_validator: Optional[NotionOauthMultiCustom2] = None + # data type: OneDrive2 + oneof_schema_19_validator: Optional[OneDrive2] = None + # data type: Sharepoint2 + oneof_schema_20_validator: Optional[Sharepoint2] = None + # data type: WebCrawler2 + oneof_schema_21_validator: Optional[WebCrawler2] = None + # data type: FileUpload2 + oneof_schema_22_validator: Optional[FileUpload2] = None + # data type: Github2 + oneof_schema_23_validator: Optional[Github2] = None + # data type: Fireflies2 + oneof_schema_24_validator: Optional[Fireflies2] = None + actual_instance: Optional[Union[AmazonS32, AzureBlobStorage2, Confluence2, Discord2, Dropbox2, DropboxOauth2, DropboxOauthMulti2, DropboxOauthMultiCustom2, FileUpload2, Firecrawl2, Fireflies2, Github2, GoogleCloudStorage2, GoogleDrive2, GoogleDriveOAuth2, GoogleDriveOauthMulti2, GoogleDriveOauthMultiCustom2, Intercom2, Notion2, NotionOauthMulti2, NotionOauthMultiCustom2, OneDrive2, Sharepoint2, WebCrawler2]] = None + one_of_schemas: Set[str] = { "AmazonS32", "AzureBlobStorage2", "Confluence2", "Discord2", "Dropbox2", "DropboxOauth2", "DropboxOauthMulti2", "DropboxOauthMultiCustom2", "FileUpload2", "Firecrawl2", "Fireflies2", "Github2", "GoogleCloudStorage2", "GoogleDrive2", "GoogleDriveOAuth2", "GoogleDriveOauthMulti2", "GoogleDriveOauthMultiCustom2", "Intercom2", "Notion2", "NotionOauthMulti2", "NotionOauthMultiCustom2", "OneDrive2", "Sharepoint2", "WebCrawler2" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = PipelineSourceConnectorRequestInner.model_construct() + error_messages = [] + match = 0 + # validate data type: AmazonS32 + if not isinstance(v, AmazonS32): + error_messages.append(f"Error! Input type `{type(v)}` is not `AmazonS32`") + else: + match += 1 + # validate data type: AzureBlobStorage2 + if not isinstance(v, AzureBlobStorage2): + error_messages.append(f"Error! Input type `{type(v)}` is not `AzureBlobStorage2`") + else: + match += 1 + # validate data type: Confluence2 + if not isinstance(v, Confluence2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Confluence2`") + else: + match += 1 + # validate data type: Discord2 + if not isinstance(v, Discord2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Discord2`") + else: + match += 1 + # validate data type: Dropbox2 + if not isinstance(v, Dropbox2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Dropbox2`") + else: + match += 1 + # validate data type: DropboxOauth2 + if not isinstance(v, DropboxOauth2): + error_messages.append(f"Error! Input type `{type(v)}` is not `DropboxOauth2`") + else: + match += 1 + # validate data type: DropboxOauthMulti2 + if not isinstance(v, DropboxOauthMulti2): + error_messages.append(f"Error! Input type `{type(v)}` is not `DropboxOauthMulti2`") + else: + match += 1 + # validate data type: DropboxOauthMultiCustom2 + if not isinstance(v, DropboxOauthMultiCustom2): + error_messages.append(f"Error! Input type `{type(v)}` is not `DropboxOauthMultiCustom2`") + else: + match += 1 + # validate data type: GoogleDriveOAuth2 + if not isinstance(v, GoogleDriveOAuth2): + error_messages.append(f"Error! Input type `{type(v)}` is not `GoogleDriveOAuth2`") + else: + match += 1 + # validate data type: GoogleDrive2 + if not isinstance(v, GoogleDrive2): + error_messages.append(f"Error! Input type `{type(v)}` is not `GoogleDrive2`") + else: + match += 1 + # validate data type: GoogleDriveOauthMulti2 + if not isinstance(v, GoogleDriveOauthMulti2): + error_messages.append(f"Error! Input type `{type(v)}` is not `GoogleDriveOauthMulti2`") + else: + match += 1 + # validate data type: GoogleDriveOauthMultiCustom2 + if not isinstance(v, GoogleDriveOauthMultiCustom2): + error_messages.append(f"Error! Input type `{type(v)}` is not `GoogleDriveOauthMultiCustom2`") + else: + match += 1 + # validate data type: Firecrawl2 + if not isinstance(v, Firecrawl2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Firecrawl2`") + else: + match += 1 + # validate data type: GoogleCloudStorage2 + if not isinstance(v, GoogleCloudStorage2): + error_messages.append(f"Error! Input type `{type(v)}` is not `GoogleCloudStorage2`") + else: + match += 1 + # validate data type: Intercom2 + if not isinstance(v, Intercom2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Intercom2`") + else: + match += 1 + # validate data type: Notion2 + if not isinstance(v, Notion2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Notion2`") + else: + match += 1 + # validate data type: NotionOauthMulti2 + if not isinstance(v, NotionOauthMulti2): + error_messages.append(f"Error! Input type `{type(v)}` is not `NotionOauthMulti2`") + else: + match += 1 + # validate data type: NotionOauthMultiCustom2 + if not isinstance(v, NotionOauthMultiCustom2): + error_messages.append(f"Error! Input type `{type(v)}` is not `NotionOauthMultiCustom2`") + else: + match += 1 + # validate data type: OneDrive2 + if not isinstance(v, OneDrive2): + error_messages.append(f"Error! Input type `{type(v)}` is not `OneDrive2`") + else: + match += 1 + # validate data type: Sharepoint2 + if not isinstance(v, Sharepoint2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Sharepoint2`") + else: + match += 1 + # validate data type: WebCrawler2 + if not isinstance(v, WebCrawler2): + error_messages.append(f"Error! Input type `{type(v)}` is not `WebCrawler2`") + else: + match += 1 + # validate data type: FileUpload2 + if not isinstance(v, FileUpload2): + error_messages.append(f"Error! Input type `{type(v)}` is not `FileUpload2`") + else: + match += 1 + # validate data type: Github2 + if not isinstance(v, Github2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Github2`") + else: + match += 1 + # validate data type: Fireflies2 + if not isinstance(v, Fireflies2): + error_messages.append(f"Error! Input type `{type(v)}` is not `Fireflies2`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in PipelineSourceConnectorRequestInner with oneOf schemas: AmazonS32, AzureBlobStorage2, Confluence2, Discord2, Dropbox2, DropboxOauth2, DropboxOauthMulti2, DropboxOauthMultiCustom2, FileUpload2, Firecrawl2, Fireflies2, Github2, GoogleCloudStorage2, GoogleDrive2, GoogleDriveOAuth2, GoogleDriveOauthMulti2, GoogleDriveOauthMultiCustom2, Intercom2, Notion2, NotionOauthMulti2, NotionOauthMultiCustom2, OneDrive2, Sharepoint2, WebCrawler2. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in PipelineSourceConnectorRequestInner with oneOf schemas: AmazonS32, AzureBlobStorage2, Confluence2, Discord2, Dropbox2, DropboxOauth2, DropboxOauthMulti2, DropboxOauthMultiCustom2, FileUpload2, Firecrawl2, Fireflies2, Github2, GoogleCloudStorage2, GoogleDrive2, GoogleDriveOAuth2, GoogleDriveOauthMulti2, GoogleDriveOauthMultiCustom2, Intercom2, Notion2, NotionOauthMulti2, NotionOauthMultiCustom2, OneDrive2, Sharepoint2, WebCrawler2. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into AmazonS32 + try: + instance.actual_instance = AmazonS32.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into AzureBlobStorage2 + try: + instance.actual_instance = AzureBlobStorage2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Confluence2 + try: + instance.actual_instance = Confluence2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Discord2 + try: + instance.actual_instance = Discord2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Dropbox2 + try: + instance.actual_instance = Dropbox2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into DropboxOauth2 + try: + instance.actual_instance = DropboxOauth2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into DropboxOauthMulti2 + try: + instance.actual_instance = DropboxOauthMulti2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into DropboxOauthMultiCustom2 + try: + instance.actual_instance = DropboxOauthMultiCustom2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GoogleDriveOAuth2 + try: + instance.actual_instance = GoogleDriveOAuth2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GoogleDrive2 + try: + instance.actual_instance = GoogleDrive2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GoogleDriveOauthMulti2 + try: + instance.actual_instance = GoogleDriveOauthMulti2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GoogleDriveOauthMultiCustom2 + try: + instance.actual_instance = GoogleDriveOauthMultiCustom2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Firecrawl2 + try: + instance.actual_instance = Firecrawl2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GoogleCloudStorage2 + try: + instance.actual_instance = GoogleCloudStorage2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Intercom2 + try: + instance.actual_instance = Intercom2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Notion2 + try: + instance.actual_instance = Notion2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into NotionOauthMulti2 + try: + instance.actual_instance = NotionOauthMulti2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into NotionOauthMultiCustom2 + try: + instance.actual_instance = NotionOauthMultiCustom2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into OneDrive2 + try: + instance.actual_instance = OneDrive2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Sharepoint2 + try: + instance.actual_instance = Sharepoint2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into WebCrawler2 + try: + instance.actual_instance = WebCrawler2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into FileUpload2 + try: + instance.actual_instance = FileUpload2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Github2 + try: + instance.actual_instance = Github2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Fireflies2 + try: + instance.actual_instance = Fireflies2.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into PipelineSourceConnectorRequestInner with oneOf schemas: AmazonS32, AzureBlobStorage2, Confluence2, Discord2, Dropbox2, DropboxOauth2, DropboxOauthMulti2, DropboxOauthMultiCustom2, FileUpload2, Firecrawl2, Fireflies2, Github2, GoogleCloudStorage2, GoogleDrive2, GoogleDriveOAuth2, GoogleDriveOauthMulti2, GoogleDriveOauthMultiCustom2, Intercom2, Notion2, NotionOauthMulti2, NotionOauthMultiCustom2, OneDrive2, Sharepoint2, WebCrawler2. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into PipelineSourceConnectorRequestInner with oneOf schemas: AmazonS32, AzureBlobStorage2, Confluence2, Discord2, Dropbox2, DropboxOauth2, DropboxOauthMulti2, DropboxOauthMultiCustom2, FileUpload2, Firecrawl2, Fireflies2, Github2, GoogleCloudStorage2, GoogleDrive2, GoogleDriveOAuth2, GoogleDriveOauthMulti2, GoogleDriveOauthMultiCustom2, Intercom2, Notion2, NotionOauthMulti2, NotionOauthMultiCustom2, OneDrive2, Sharepoint2, WebCrawler2. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], AmazonS32, AzureBlobStorage2, Confluence2, Discord2, Dropbox2, DropboxOauth2, DropboxOauthMulti2, DropboxOauthMultiCustom2, FileUpload2, Firecrawl2, Fireflies2, Github2, GoogleCloudStorage2, GoogleDrive2, GoogleDriveOAuth2, GoogleDriveOauthMulti2, GoogleDriveOauthMultiCustom2, Intercom2, Notion2, NotionOauthMulti2, NotionOauthMultiCustom2, OneDrive2, Sharepoint2, WebCrawler2]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) + + diff --git a/src/python/vectorize_client/models/pipeline_summary.py b/vectorize_client/models/pipeline_summary.py similarity index 100% rename from src/python/vectorize_client/models/pipeline_summary.py rename to vectorize_client/models/pipeline_summary.py diff --git a/vectorize_client/models/postgresql.py b/vectorize_client/models/postgresql.py new file mode 100644 index 0000000..8b08832 --- /dev/null +++ b/vectorize_client/models/postgresql.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.postgresql_config import POSTGRESQLConfig +from typing import Optional, Set +from typing_extensions import Self + +class Postgresql(BaseModel): + """ + Postgresql + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"POSTGRESQL\")") + config: POSTGRESQLConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['POSTGRESQL']): + raise ValueError("must be one of enum values ('POSTGRESQL')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Postgresql from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Postgresql from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": POSTGRESQLConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/postgresql1.py b/vectorize_client/models/postgresql1.py new file mode 100644 index 0000000..046e2a8 --- /dev/null +++ b/vectorize_client/models/postgresql1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.postgresql_config import POSTGRESQLConfig +from typing import Optional, Set +from typing_extensions import Self + +class Postgresql1(BaseModel): + """ + Postgresql1 + """ # noqa: E501 + config: Optional[POSTGRESQLConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Postgresql1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Postgresql1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": POSTGRESQLConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/postgresql2.py b/vectorize_client/models/postgresql2.py new file mode 100644 index 0000000..4a398b8 --- /dev/null +++ b/vectorize_client/models/postgresql2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Postgresql2(BaseModel): + """ + Postgresql2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"POSTGRESQL\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['POSTGRESQL']): + raise ValueError("must be one of enum values ('POSTGRESQL')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Postgresql2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Postgresql2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/postgresql_auth_config.py b/vectorize_client/models/postgresql_auth_config.py new file mode 100644 index 0000000..1044a43 --- /dev/null +++ b/vectorize_client/models/postgresql_auth_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing import Optional, Set +from typing_extensions import Self + +class POSTGRESQLAuthConfig(BaseModel): + """ + Authentication configuration for PostgreSQL + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for your PostgreSQL integration") + host: StrictStr = Field(description="Host. Example: Enter the host of the deployment") + port: Optional[Union[StrictFloat, StrictInt]] = Field(default=5432, description="Port. Example: Enter the port of the deployment") + database: StrictStr = Field(description="Database. Example: Enter the database name") + username: StrictStr = Field(description="Username. Example: Enter the username") + password: StrictStr = Field(description="Password. Example: Enter the username's password") + __properties: ClassVar[List[str]] = ["name", "host", "port", "database", "username", "password"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of POSTGRESQLAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of POSTGRESQLAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "host": obj.get("host"), + "port": obj.get("port") if obj.get("port") is not None else 5432, + "database": obj.get("database"), + "username": obj.get("username"), + "password": obj.get("password") + }) + return _obj + + diff --git a/vectorize_client/models/postgresql_config.py b/vectorize_client/models/postgresql_config.py new file mode 100644 index 0000000..f7d7a66 --- /dev/null +++ b/vectorize_client/models/postgresql_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class POSTGRESQLConfig(BaseModel): + """ + Configuration for PostgreSQL connector + """ # noqa: E501 + table: Annotated[str, Field(strict=True, max_length=45)] = Field(description="Table Name. Example: Enter
or .
") + __properties: ClassVar[List[str]] = ["table"] + + @field_validator('table') + def table_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^(?!\b(add|alter|all|and|any|as|asc|avg|between|case|check|column|commit|constraint|create|cross|database|default|delete|desc|distinct|drop|else|exists|false|from|full|group|having|in|index|inner|insert|is|join|key|left|like|limit|max|min|not|null|on|or|order|outer|primary|right|rollback|select|set|sum|table|true|union|unique|update|values|view|where)\b$)(?!.*--)(?!.*[-])[a-z][a-z0-9._]{0,44}$", value): + raise ValueError(r"must validate the regular expression /^(?!\b(add|alter|all|and|any|as|asc|avg|between|case|check|column|commit|constraint|create|cross|database|default|delete|desc|distinct|drop|else|exists|false|from|full|group|having|in|index|inner|insert|is|join|key|left|like|limit|max|min|not|null|on|or|order|outer|primary|right|rollback|select|set|sum|table|true|union|unique|update|values|view|where)\b$)(?!.*--)(?!.*[-])[a-z][a-z0-9._]{0,44}$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of POSTGRESQLConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of POSTGRESQLConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "table": obj.get("table") + }) + return _obj + + diff --git a/vectorize_client/models/qdrant.py b/vectorize_client/models/qdrant.py new file mode 100644 index 0000000..a331400 --- /dev/null +++ b/vectorize_client/models/qdrant.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.qdrant_config import QDRANTConfig +from typing import Optional, Set +from typing_extensions import Self + +class Qdrant(BaseModel): + """ + Qdrant + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"QDRANT\")") + config: QDRANTConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['QDRANT']): + raise ValueError("must be one of enum values ('QDRANT')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Qdrant from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Qdrant from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": QDRANTConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/qdrant1.py b/vectorize_client/models/qdrant1.py new file mode 100644 index 0000000..5f286be --- /dev/null +++ b/vectorize_client/models/qdrant1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.qdrant_config import QDRANTConfig +from typing import Optional, Set +from typing_extensions import Self + +class Qdrant1(BaseModel): + """ + Qdrant1 + """ # noqa: E501 + config: Optional[QDRANTConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Qdrant1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Qdrant1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": QDRANTConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/qdrant2.py b/vectorize_client/models/qdrant2.py new file mode 100644 index 0000000..eebf0ab --- /dev/null +++ b/vectorize_client/models/qdrant2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Qdrant2(BaseModel): + """ + Qdrant2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"QDRANT\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['QDRANT']): + raise ValueError("must be one of enum values ('QDRANT')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Qdrant2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Qdrant2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/qdrant_auth_config.py b/vectorize_client/models/qdrant_auth_config.py new file mode 100644 index 0000000..70c2bf6 --- /dev/null +++ b/vectorize_client/models/qdrant_auth_config.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class QDRANTAuthConfig(BaseModel): + """ + Authentication configuration for Qdrant + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for your Qdrant integration") + host: StrictStr = Field(description="Host. Example: Enter your host") + api_key: Annotated[str, Field(strict=True)] = Field(description="API Key. Example: Enter your API key", alias="api-key") + __properties: ClassVar[List[str]] = ["name", "host", "api-key"] + + @field_validator('api_key') + def api_key_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of QDRANTAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of QDRANTAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "host": obj.get("host"), + "api-key": obj.get("api-key") + }) + return _obj + + diff --git a/vectorize_client/models/qdrant_config.py b/vectorize_client/models/qdrant_config.py new file mode 100644 index 0000000..712f9c0 --- /dev/null +++ b/vectorize_client/models/qdrant_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class QDRANTConfig(BaseModel): + """ + Configuration for Qdrant connector + """ # noqa: E501 + collection: Annotated[str, Field(strict=True)] = Field(description="Collection Name. Example: Enter collection name") + __properties: ClassVar[List[str]] = ["collection"] + + @field_validator('collection') + def collection_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^[a-zA-Z0-9_-]*$", value): + raise ValueError(r"must validate the regular expression /^[a-zA-Z0-9_-]*$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of QDRANTConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of QDRANTConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "collection": obj.get("collection") + }) + return _obj + + diff --git a/src/python/vectorize_client/models/remove_user_from_source_connector_request.py b/vectorize_client/models/remove_user_from_source_connector_request.py similarity index 100% rename from src/python/vectorize_client/models/remove_user_from_source_connector_request.py rename to vectorize_client/models/remove_user_from_source_connector_request.py diff --git a/src/python/vectorize_client/models/remove_user_from_source_connector_response.py b/vectorize_client/models/remove_user_from_source_connector_response.py similarity index 100% rename from src/python/vectorize_client/models/remove_user_from_source_connector_response.py rename to vectorize_client/models/remove_user_from_source_connector_response.py diff --git a/src/python/vectorize_client/models/retrieve_context.py b/vectorize_client/models/retrieve_context.py similarity index 100% rename from src/python/vectorize_client/models/retrieve_context.py rename to vectorize_client/models/retrieve_context.py diff --git a/src/python/vectorize_client/models/retrieve_context_message.py b/vectorize_client/models/retrieve_context_message.py similarity index 100% rename from src/python/vectorize_client/models/retrieve_context_message.py rename to vectorize_client/models/retrieve_context_message.py diff --git a/src/python/vectorize_client/models/retrieve_documents_request.py b/vectorize_client/models/retrieve_documents_request.py similarity index 85% rename from src/python/vectorize_client/models/retrieve_documents_request.py rename to vectorize_client/models/retrieve_documents_request.py index 1aa7981..87b5fb7 100644 --- a/src/python/vectorize_client/models/retrieve_documents_request.py +++ b/vectorize_client/models/retrieve_documents_request.py @@ -20,6 +20,7 @@ from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr from typing import Any, ClassVar, Dict, List, Optional, Union from typing_extensions import Annotated +from vectorize_client.models.advanced_query import AdvancedQuery from vectorize_client.models.retrieve_context import RetrieveContext from typing import Optional, Set from typing_extensions import Self @@ -33,7 +34,8 @@ class RetrieveDocumentsRequest(BaseModel): rerank: Optional[StrictBool] = True metadata_filters: Optional[List[Dict[str, Any]]] = Field(default=None, alias="metadata-filters") context: Optional[RetrieveContext] = None - __properties: ClassVar[List[str]] = ["question", "numResults", "rerank", "metadata-filters", "context"] + advanced_query: Optional[AdvancedQuery] = Field(default=None, alias="advanced-query") + __properties: ClassVar[List[str]] = ["question", "numResults", "rerank", "metadata-filters", "context", "advanced-query"] model_config = ConfigDict( populate_by_name=True, @@ -77,6 +79,9 @@ def to_dict(self) -> Dict[str, Any]: # override the default output from pydantic by calling `to_dict()` of context if self.context: _dict['context'] = self.context.to_dict() + # override the default output from pydantic by calling `to_dict()` of advanced_query + if self.advanced_query: + _dict['advanced-query'] = self.advanced_query.to_dict() return _dict @classmethod @@ -93,7 +98,8 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: "numResults": obj.get("numResults"), "rerank": obj.get("rerank") if obj.get("rerank") is not None else True, "metadata-filters": obj.get("metadata-filters"), - "context": RetrieveContext.from_dict(obj["context"]) if obj.get("context") is not None else None + "context": RetrieveContext.from_dict(obj["context"]) if obj.get("context") is not None else None, + "advanced-query": AdvancedQuery.from_dict(obj["advanced-query"]) if obj.get("advanced-query") is not None else None }) return _obj diff --git a/src/python/vectorize_client/models/retrieve_documents_response.py b/vectorize_client/models/retrieve_documents_response.py similarity index 100% rename from src/python/vectorize_client/models/retrieve_documents_response.py rename to vectorize_client/models/retrieve_documents_response.py diff --git a/src/python/vectorize_client/models/schedule_schema.py b/vectorize_client/models/schedule_schema.py similarity index 100% rename from src/python/vectorize_client/models/schedule_schema.py rename to vectorize_client/models/schedule_schema.py diff --git a/src/python/vectorize_client/models/schedule_schema_type.py b/vectorize_client/models/schedule_schema_type.py similarity index 100% rename from src/python/vectorize_client/models/schedule_schema_type.py rename to vectorize_client/models/schedule_schema_type.py diff --git a/vectorize_client/models/sharepoint.py b/vectorize_client/models/sharepoint.py new file mode 100644 index 0000000..3f8a869 --- /dev/null +++ b/vectorize_client/models/sharepoint.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.sharepoint_config import SHAREPOINTConfig +from typing import Optional, Set +from typing_extensions import Self + +class Sharepoint(BaseModel): + """ + Sharepoint + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"SHAREPOINT\")") + config: SHAREPOINTConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['SHAREPOINT']): + raise ValueError("must be one of enum values ('SHAREPOINT')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Sharepoint from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Sharepoint from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": SHAREPOINTConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/sharepoint1.py b/vectorize_client/models/sharepoint1.py new file mode 100644 index 0000000..03bcdfe --- /dev/null +++ b/vectorize_client/models/sharepoint1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.sharepoint_config import SHAREPOINTConfig +from typing import Optional, Set +from typing_extensions import Self + +class Sharepoint1(BaseModel): + """ + Sharepoint1 + """ # noqa: E501 + config: Optional[SHAREPOINTConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Sharepoint1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Sharepoint1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": SHAREPOINTConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/sharepoint2.py b/vectorize_client/models/sharepoint2.py new file mode 100644 index 0000000..5baf96b --- /dev/null +++ b/vectorize_client/models/sharepoint2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Sharepoint2(BaseModel): + """ + Sharepoint2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"SHAREPOINT\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['SHAREPOINT']): + raise ValueError("must be one of enum values ('SHAREPOINT')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Sharepoint2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Sharepoint2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/sharepoint_auth_config.py b/vectorize_client/models/sharepoint_auth_config.py new file mode 100644 index 0000000..7d5c63e --- /dev/null +++ b/vectorize_client/models/sharepoint_auth_config.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class SHAREPOINTAuthConfig(BaseModel): + """ + Authentication configuration for SharePoint + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + ms_client_id: StrictStr = Field(description="Client Id. Example: Enter Client Id", alias="ms-client-id") + ms_tenant_id: StrictStr = Field(description="Tenant Id. Example: Enter Tenant Id", alias="ms-tenant-id") + ms_client_secret: StrictStr = Field(description="Client Secret. Example: Enter Client Secret", alias="ms-client-secret") + __properties: ClassVar[List[str]] = ["name", "ms-client-id", "ms-tenant-id", "ms-client-secret"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SHAREPOINTAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SHAREPOINTAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "ms-client-id": obj.get("ms-client-id"), + "ms-tenant-id": obj.get("ms-tenant-id"), + "ms-client-secret": obj.get("ms-client-secret") + }) + return _obj + + diff --git a/vectorize_client/models/sharepoint_config.py b/vectorize_client/models/sharepoint_config.py new file mode 100644 index 0000000..67c0eba --- /dev/null +++ b/vectorize_client/models/sharepoint_config.py @@ -0,0 +1,108 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class SHAREPOINTConfig(BaseModel): + """ + Configuration for SharePoint connector + """ # noqa: E501 + file_extensions: List[StrictStr] = Field(description="File Extensions", alias="file-extensions") + sites: Optional[Annotated[str, Field(strict=True)]] = Field(default=None, description="Site Name(s). Example: Filter by site name. All sites if empty.") + __properties: ClassVar[List[str]] = ["file-extensions", "sites"] + + @field_validator('file_extensions') + def file_extensions_validate_enum(cls, value): + """Validates the enum""" + for i in value: + if i not in set([]): + raise ValueError("each list item must be one of ()") + return value + + @field_validator('sites') + def sites_validate_regular_expression(cls, value): + """Validates the regular expression""" + if value is None: + return value + + if not re.match(r"^(?!.*(https?:\/\/|www\.))[\w\s\-.]+$", value): + raise ValueError(r"must validate the regular expression /^(?!.*(https?:\/\/|www\.))[\w\s\-.]+$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SHAREPOINTConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SHAREPOINTConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "file-extensions": obj.get("file-extensions"), + "sites": obj.get("sites") + }) + return _obj + + diff --git a/vectorize_client/models/singlestore.py b/vectorize_client/models/singlestore.py new file mode 100644 index 0000000..5bb28bf --- /dev/null +++ b/vectorize_client/models/singlestore.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.singlestore_config import SINGLESTOREConfig +from typing import Optional, Set +from typing_extensions import Self + +class Singlestore(BaseModel): + """ + Singlestore + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"SINGLESTORE\")") + config: SINGLESTOREConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['SINGLESTORE']): + raise ValueError("must be one of enum values ('SINGLESTORE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Singlestore from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Singlestore from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": SINGLESTOREConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/singlestore1.py b/vectorize_client/models/singlestore1.py new file mode 100644 index 0000000..6c367db --- /dev/null +++ b/vectorize_client/models/singlestore1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.singlestore_config import SINGLESTOREConfig +from typing import Optional, Set +from typing_extensions import Self + +class Singlestore1(BaseModel): + """ + Singlestore1 + """ # noqa: E501 + config: Optional[SINGLESTOREConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Singlestore1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Singlestore1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": SINGLESTOREConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/singlestore2.py b/vectorize_client/models/singlestore2.py new file mode 100644 index 0000000..5cb20ae --- /dev/null +++ b/vectorize_client/models/singlestore2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Singlestore2(BaseModel): + """ + Singlestore2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"SINGLESTORE\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['SINGLESTORE']): + raise ValueError("must be one of enum values ('SINGLESTORE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Singlestore2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Singlestore2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/singlestore_auth_config.py b/vectorize_client/models/singlestore_auth_config.py new file mode 100644 index 0000000..00218aa --- /dev/null +++ b/vectorize_client/models/singlestore_auth_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Union +from typing import Optional, Set +from typing_extensions import Self + +class SINGLESTOREAuthConfig(BaseModel): + """ + Authentication configuration for SingleStore + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for your SingleStore integration") + host: StrictStr = Field(description="Host. Example: Enter the host of the deployment") + port: Union[StrictFloat, StrictInt] = Field(description="Port. Example: Enter the port of the deployment") + database: StrictStr = Field(description="Database. Example: Enter the database name") + username: StrictStr = Field(description="Username. Example: Enter the username") + password: StrictStr = Field(description="Password. Example: Enter the username's password") + __properties: ClassVar[List[str]] = ["name", "host", "port", "database", "username", "password"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SINGLESTOREAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SINGLESTOREAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "host": obj.get("host"), + "port": obj.get("port"), + "database": obj.get("database"), + "username": obj.get("username"), + "password": obj.get("password") + }) + return _obj + + diff --git a/vectorize_client/models/singlestore_config.py b/vectorize_client/models/singlestore_config.py new file mode 100644 index 0000000..87b5643 --- /dev/null +++ b/vectorize_client/models/singlestore_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class SINGLESTOREConfig(BaseModel): + """ + Configuration for SingleStore connector + """ # noqa: E501 + table: Annotated[str, Field(strict=True, max_length=45)] = Field(description="Table Name. Example: Enter table name") + __properties: ClassVar[List[str]] = ["table"] + + @field_validator('table') + def table_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^(?!\b(add|alter|all|and|any|as|asc|avg|between|case|check|column|commit|constraint|create|cross|database|default|delete|desc|distinct|drop|else|exists|false|from|full|group|having|in|index|inner|insert|is|join|key|left|like|limit|max|min|not|null|on|or|order|outer|primary|right|rollback|select|set|sum|table|true|union|unique|update|values|view|where)\b$)(?!.*--)(?!.*[-])[a-z][a-z0-9_]{0,44}$", value): + raise ValueError(r"must validate the regular expression /^(?!\b(add|alter|all|and|any|as|asc|avg|between|case|check|column|commit|constraint|create|cross|database|default|delete|desc|distinct|drop|else|exists|false|from|full|group|having|in|index|inner|insert|is|join|key|left|like|limit|max|min|not|null|on|or|order|outer|primary|right|rollback|select|set|sum|table|true|union|unique|update|values|view|where)\b$)(?!.*--)(?!.*[-])[a-z][a-z0-9_]{0,44}$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SINGLESTOREConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SINGLESTOREConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "table": obj.get("table") + }) + return _obj + + diff --git a/src/python/vectorize_client/models/source_connector.py b/vectorize_client/models/source_connector.py similarity index 100% rename from src/python/vectorize_client/models/source_connector.py rename to vectorize_client/models/source_connector.py diff --git a/vectorize_client/models/source_connector_input.py b/vectorize_client/models/source_connector_input.py new file mode 100644 index 0000000..769a3cd --- /dev/null +++ b/vectorize_client/models/source_connector_input.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.source_connector_input_config import SourceConnectorInputConfig +from typing import Optional, Set +from typing_extensions import Self + +class SourceConnectorInput(BaseModel): + """ + Source connector configuration + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the source connector") + type: StrictStr = Field(description="Type of source connector") + config: SourceConnectorInputConfig + __properties: ClassVar[List[str]] = ["id", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['AWS_S3', 'AZURE_BLOB', 'CONFLUENCE', 'DISCORD', 'DROPBOX', 'DROPBOX_OAUTH', 'DROPBOX_OAUTH_MULTI', 'DROPBOX_OAUTH_MULTI_CUSTOM', 'GOOGLE_DRIVE_OAUTH', 'GOOGLE_DRIVE', 'GOOGLE_DRIVE_OAUTH_MULTI', 'GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM', 'FIRECRAWL', 'GCS', 'INTERCOM', 'NOTION', 'NOTION_OAUTH_MULTI', 'NOTION_OAUTH_MULTI_CUSTOM', 'ONE_DRIVE', 'SHAREPOINT', 'WEB_CRAWLER', 'FILE_UPLOAD', 'GITHUB', 'FIREFLIES']): + raise ValueError("must be one of enum values ('AWS_S3', 'AZURE_BLOB', 'CONFLUENCE', 'DISCORD', 'DROPBOX', 'DROPBOX_OAUTH', 'DROPBOX_OAUTH_MULTI', 'DROPBOX_OAUTH_MULTI_CUSTOM', 'GOOGLE_DRIVE_OAUTH', 'GOOGLE_DRIVE', 'GOOGLE_DRIVE_OAUTH_MULTI', 'GOOGLE_DRIVE_OAUTH_MULTI_CUSTOM', 'FIRECRAWL', 'GCS', 'INTERCOM', 'NOTION', 'NOTION_OAUTH_MULTI', 'NOTION_OAUTH_MULTI_CUSTOM', 'ONE_DRIVE', 'SHAREPOINT', 'WEB_CRAWLER', 'FILE_UPLOAD', 'GITHUB', 'FIREFLIES')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SourceConnectorInput from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SourceConnectorInput from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type"), + "config": SourceConnectorInputConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/source_connector_input_config.py b/vectorize_client/models/source_connector_input_config.py new file mode 100644 index 0000000..5da8660 --- /dev/null +++ b/vectorize_client/models/source_connector_input_config.py @@ -0,0 +1,361 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from vectorize_client.models.awss3_config import AWSS3Config +from vectorize_client.models.azureblob_config import AZUREBLOBConfig +from vectorize_client.models.confluence_config import CONFLUENCEConfig +from vectorize_client.models.discord_config import DISCORDConfig +from vectorize_client.models.dropbox_config import DROPBOXConfig +from vectorize_client.models.firecrawl_config import FIRECRAWLConfig +from vectorize_client.models.fireflies_config import FIREFLIESConfig +from vectorize_client.models.gcs_config import GCSConfig +from vectorize_client.models.github_config import GITHUBConfig +from vectorize_client.models.googledrive_config import GOOGLEDRIVEConfig +from vectorize_client.models.googledriveoauth_config import GOOGLEDRIVEOAUTHConfig +from vectorize_client.models.googledriveoauthmulti_config import GOOGLEDRIVEOAUTHMULTIConfig +from vectorize_client.models.googledriveoauthmulticustom_config import GOOGLEDRIVEOAUTHMULTICUSTOMConfig +from vectorize_client.models.intercom_config import INTERCOMConfig +from vectorize_client.models.notion_config import NOTIONConfig +from vectorize_client.models.onedrive_config import ONEDRIVEConfig +from vectorize_client.models.sharepoint_config import SHAREPOINTConfig +from vectorize_client.models.webcrawler_config import WEBCRAWLERConfig +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +SOURCECONNECTORINPUTCONFIG_ONE_OF_SCHEMAS = ["AWSS3Config", "AZUREBLOBConfig", "CONFLUENCEConfig", "DISCORDConfig", "DROPBOXConfig", "FIRECRAWLConfig", "FIREFLIESConfig", "GCSConfig", "GITHUBConfig", "GOOGLEDRIVEConfig", "GOOGLEDRIVEOAUTHConfig", "GOOGLEDRIVEOAUTHMULTICUSTOMConfig", "GOOGLEDRIVEOAUTHMULTIConfig", "INTERCOMConfig", "NOTIONConfig", "ONEDRIVEConfig", "SHAREPOINTConfig", "WEBCRAWLERConfig"] + +class SourceConnectorInputConfig(BaseModel): + """ + Configuration specific to the connector type + """ + # data type: AWSS3Config + oneof_schema_1_validator: Optional[AWSS3Config] = None + # data type: AZUREBLOBConfig + oneof_schema_2_validator: Optional[AZUREBLOBConfig] = None + # data type: CONFLUENCEConfig + oneof_schema_3_validator: Optional[CONFLUENCEConfig] = None + # data type: DISCORDConfig + oneof_schema_4_validator: Optional[DISCORDConfig] = None + # data type: DROPBOXConfig + oneof_schema_5_validator: Optional[DROPBOXConfig] = None + # data type: GOOGLEDRIVEOAUTHConfig + oneof_schema_6_validator: Optional[GOOGLEDRIVEOAUTHConfig] = None + # data type: GOOGLEDRIVEConfig + oneof_schema_7_validator: Optional[GOOGLEDRIVEConfig] = None + # data type: GOOGLEDRIVEOAUTHMULTIConfig + oneof_schema_8_validator: Optional[GOOGLEDRIVEOAUTHMULTIConfig] = None + # data type: GOOGLEDRIVEOAUTHMULTICUSTOMConfig + oneof_schema_9_validator: Optional[GOOGLEDRIVEOAUTHMULTICUSTOMConfig] = None + # data type: FIRECRAWLConfig + oneof_schema_10_validator: Optional[FIRECRAWLConfig] = None + # data type: GCSConfig + oneof_schema_11_validator: Optional[GCSConfig] = None + # data type: INTERCOMConfig + oneof_schema_12_validator: Optional[INTERCOMConfig] = None + # data type: NOTIONConfig + oneof_schema_13_validator: Optional[NOTIONConfig] = None + # data type: ONEDRIVEConfig + oneof_schema_14_validator: Optional[ONEDRIVEConfig] = None + # data type: SHAREPOINTConfig + oneof_schema_15_validator: Optional[SHAREPOINTConfig] = None + # data type: WEBCRAWLERConfig + oneof_schema_16_validator: Optional[WEBCRAWLERConfig] = None + # data type: GITHUBConfig + oneof_schema_17_validator: Optional[GITHUBConfig] = None + # data type: FIREFLIESConfig + oneof_schema_18_validator: Optional[FIREFLIESConfig] = None + actual_instance: Optional[Union[AWSS3Config, AZUREBLOBConfig, CONFLUENCEConfig, DISCORDConfig, DROPBOXConfig, FIRECRAWLConfig, FIREFLIESConfig, GCSConfig, GITHUBConfig, GOOGLEDRIVEConfig, GOOGLEDRIVEOAUTHConfig, GOOGLEDRIVEOAUTHMULTICUSTOMConfig, GOOGLEDRIVEOAUTHMULTIConfig, INTERCOMConfig, NOTIONConfig, ONEDRIVEConfig, SHAREPOINTConfig, WEBCRAWLERConfig]] = None + one_of_schemas: Set[str] = { "AWSS3Config", "AZUREBLOBConfig", "CONFLUENCEConfig", "DISCORDConfig", "DROPBOXConfig", "FIRECRAWLConfig", "FIREFLIESConfig", "GCSConfig", "GITHUBConfig", "GOOGLEDRIVEConfig", "GOOGLEDRIVEOAUTHConfig", "GOOGLEDRIVEOAUTHMULTICUSTOMConfig", "GOOGLEDRIVEOAUTHMULTIConfig", "INTERCOMConfig", "NOTIONConfig", "ONEDRIVEConfig", "SHAREPOINTConfig", "WEBCRAWLERConfig" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = SourceConnectorInputConfig.model_construct() + error_messages = [] + match = 0 + # validate data type: AWSS3Config + if not isinstance(v, AWSS3Config): + error_messages.append(f"Error! Input type `{type(v)}` is not `AWSS3Config`") + else: + match += 1 + # validate data type: AZUREBLOBConfig + if not isinstance(v, AZUREBLOBConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `AZUREBLOBConfig`") + else: + match += 1 + # validate data type: CONFLUENCEConfig + if not isinstance(v, CONFLUENCEConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `CONFLUENCEConfig`") + else: + match += 1 + # validate data type: DISCORDConfig + if not isinstance(v, DISCORDConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `DISCORDConfig`") + else: + match += 1 + # validate data type: DROPBOXConfig + if not isinstance(v, DROPBOXConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `DROPBOXConfig`") + else: + match += 1 + # validate data type: GOOGLEDRIVEOAUTHConfig + if not isinstance(v, GOOGLEDRIVEOAUTHConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `GOOGLEDRIVEOAUTHConfig`") + else: + match += 1 + # validate data type: GOOGLEDRIVEConfig + if not isinstance(v, GOOGLEDRIVEConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `GOOGLEDRIVEConfig`") + else: + match += 1 + # validate data type: GOOGLEDRIVEOAUTHMULTIConfig + if not isinstance(v, GOOGLEDRIVEOAUTHMULTIConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `GOOGLEDRIVEOAUTHMULTIConfig`") + else: + match += 1 + # validate data type: GOOGLEDRIVEOAUTHMULTICUSTOMConfig + if not isinstance(v, GOOGLEDRIVEOAUTHMULTICUSTOMConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `GOOGLEDRIVEOAUTHMULTICUSTOMConfig`") + else: + match += 1 + # validate data type: FIRECRAWLConfig + if not isinstance(v, FIRECRAWLConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `FIRECRAWLConfig`") + else: + match += 1 + # validate data type: GCSConfig + if not isinstance(v, GCSConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `GCSConfig`") + else: + match += 1 + # validate data type: INTERCOMConfig + if not isinstance(v, INTERCOMConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `INTERCOMConfig`") + else: + match += 1 + # validate data type: NOTIONConfig + if not isinstance(v, NOTIONConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `NOTIONConfig`") + else: + match += 1 + # validate data type: ONEDRIVEConfig + if not isinstance(v, ONEDRIVEConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `ONEDRIVEConfig`") + else: + match += 1 + # validate data type: SHAREPOINTConfig + if not isinstance(v, SHAREPOINTConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `SHAREPOINTConfig`") + else: + match += 1 + # validate data type: WEBCRAWLERConfig + if not isinstance(v, WEBCRAWLERConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `WEBCRAWLERConfig`") + else: + match += 1 + # validate data type: GITHUBConfig + if not isinstance(v, GITHUBConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `GITHUBConfig`") + else: + match += 1 + # validate data type: FIREFLIESConfig + if not isinstance(v, FIREFLIESConfig): + error_messages.append(f"Error! Input type `{type(v)}` is not `FIREFLIESConfig`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in SourceConnectorInputConfig with oneOf schemas: AWSS3Config, AZUREBLOBConfig, CONFLUENCEConfig, DISCORDConfig, DROPBOXConfig, FIRECRAWLConfig, FIREFLIESConfig, GCSConfig, GITHUBConfig, GOOGLEDRIVEConfig, GOOGLEDRIVEOAUTHConfig, GOOGLEDRIVEOAUTHMULTICUSTOMConfig, GOOGLEDRIVEOAUTHMULTIConfig, INTERCOMConfig, NOTIONConfig, ONEDRIVEConfig, SHAREPOINTConfig, WEBCRAWLERConfig. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in SourceConnectorInputConfig with oneOf schemas: AWSS3Config, AZUREBLOBConfig, CONFLUENCEConfig, DISCORDConfig, DROPBOXConfig, FIRECRAWLConfig, FIREFLIESConfig, GCSConfig, GITHUBConfig, GOOGLEDRIVEConfig, GOOGLEDRIVEOAUTHConfig, GOOGLEDRIVEOAUTHMULTICUSTOMConfig, GOOGLEDRIVEOAUTHMULTIConfig, INTERCOMConfig, NOTIONConfig, ONEDRIVEConfig, SHAREPOINTConfig, WEBCRAWLERConfig. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into AWSS3Config + try: + instance.actual_instance = AWSS3Config.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into AZUREBLOBConfig + try: + instance.actual_instance = AZUREBLOBConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into CONFLUENCEConfig + try: + instance.actual_instance = CONFLUENCEConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into DISCORDConfig + try: + instance.actual_instance = DISCORDConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into DROPBOXConfig + try: + instance.actual_instance = DROPBOXConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GOOGLEDRIVEOAUTHConfig + try: + instance.actual_instance = GOOGLEDRIVEOAUTHConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GOOGLEDRIVEConfig + try: + instance.actual_instance = GOOGLEDRIVEConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GOOGLEDRIVEOAUTHMULTIConfig + try: + instance.actual_instance = GOOGLEDRIVEOAUTHMULTIConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GOOGLEDRIVEOAUTHMULTICUSTOMConfig + try: + instance.actual_instance = GOOGLEDRIVEOAUTHMULTICUSTOMConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into FIRECRAWLConfig + try: + instance.actual_instance = FIRECRAWLConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GCSConfig + try: + instance.actual_instance = GCSConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into INTERCOMConfig + try: + instance.actual_instance = INTERCOMConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into NOTIONConfig + try: + instance.actual_instance = NOTIONConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into ONEDRIVEConfig + try: + instance.actual_instance = ONEDRIVEConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into SHAREPOINTConfig + try: + instance.actual_instance = SHAREPOINTConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into WEBCRAWLERConfig + try: + instance.actual_instance = WEBCRAWLERConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GITHUBConfig + try: + instance.actual_instance = GITHUBConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into FIREFLIESConfig + try: + instance.actual_instance = FIREFLIESConfig.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into SourceConnectorInputConfig with oneOf schemas: AWSS3Config, AZUREBLOBConfig, CONFLUENCEConfig, DISCORDConfig, DROPBOXConfig, FIRECRAWLConfig, FIREFLIESConfig, GCSConfig, GITHUBConfig, GOOGLEDRIVEConfig, GOOGLEDRIVEOAUTHConfig, GOOGLEDRIVEOAUTHMULTICUSTOMConfig, GOOGLEDRIVEOAUTHMULTIConfig, INTERCOMConfig, NOTIONConfig, ONEDRIVEConfig, SHAREPOINTConfig, WEBCRAWLERConfig. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into SourceConnectorInputConfig with oneOf schemas: AWSS3Config, AZUREBLOBConfig, CONFLUENCEConfig, DISCORDConfig, DROPBOXConfig, FIRECRAWLConfig, FIREFLIESConfig, GCSConfig, GITHUBConfig, GOOGLEDRIVEConfig, GOOGLEDRIVEOAUTHConfig, GOOGLEDRIVEOAUTHMULTICUSTOMConfig, GOOGLEDRIVEOAUTHMULTIConfig, INTERCOMConfig, NOTIONConfig, ONEDRIVEConfig, SHAREPOINTConfig, WEBCRAWLERConfig. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], AWSS3Config, AZUREBLOBConfig, CONFLUENCEConfig, DISCORDConfig, DROPBOXConfig, FIRECRAWLConfig, FIREFLIESConfig, GCSConfig, GITHUBConfig, GOOGLEDRIVEConfig, GOOGLEDRIVEOAUTHConfig, GOOGLEDRIVEOAUTHMULTICUSTOMConfig, GOOGLEDRIVEOAUTHMULTIConfig, INTERCOMConfig, NOTIONConfig, ONEDRIVEConfig, SHAREPOINTConfig, WEBCRAWLERConfig]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) + + diff --git a/src/python/vectorize_client/models/source_connector_schema.py b/vectorize_client/models/source_connector_schema.py similarity index 96% rename from src/python/vectorize_client/models/source_connector_schema.py rename to vectorize_client/models/source_connector_schema.py index 7b4b594..06679d3 100644 --- a/src/python/vectorize_client/models/source_connector_schema.py +++ b/vectorize_client/models/source_connector_schema.py @@ -18,7 +18,7 @@ import json from pydantic import BaseModel, ConfigDict, StrictStr -from typing import Any, ClassVar, Dict, List, Optional +from typing import Any, ClassVar, Dict, List from vectorize_client.models.source_connector_type import SourceConnectorType from typing import Optional, Set from typing_extensions import Self @@ -29,7 +29,7 @@ class SourceConnectorSchema(BaseModel): """ # noqa: E501 id: StrictStr type: SourceConnectorType - config: Optional[Dict[str, Any]] = None + config: Dict[str, Any] __properties: ClassVar[List[str]] = ["id", "type", "config"] model_config = ConfigDict( diff --git a/src/python/vectorize_client/models/source_connector_type.py b/vectorize_client/models/source_connector_type.py similarity index 78% rename from src/python/vectorize_client/models/source_connector_type.py rename to vectorize_client/models/source_connector_type.py index 776dfaf..46ba5d1 100644 --- a/src/python/vectorize_client/models/source_connector_type.py +++ b/vectorize_client/models/source_connector_type.py @@ -31,6 +31,9 @@ class SourceConnectorType(str, Enum): CONFLUENCE = 'CONFLUENCE' DISCORD = 'DISCORD' DROPBOX = 'DROPBOX' + DROPBOX_OAUTH = 'DROPBOX_OAUTH' + DROPBOX_OAUTH_MULTI = 'DROPBOX_OAUTH_MULTI' + DROPBOX_OAUTH_MULTI_CUSTOM = 'DROPBOX_OAUTH_MULTI_CUSTOM' GOOGLE_DRIVE_OAUTH = 'GOOGLE_DRIVE_OAUTH' GOOGLE_DRIVE = 'GOOGLE_DRIVE' GOOGLE_DRIVE_OAUTH_MULTI = 'GOOGLE_DRIVE_OAUTH_MULTI' @@ -38,12 +41,15 @@ class SourceConnectorType(str, Enum): FIRECRAWL = 'FIRECRAWL' GCS = 'GCS' INTERCOM = 'INTERCOM' + NOTION = 'NOTION' + NOTION_OAUTH_MULTI = 'NOTION_OAUTH_MULTI' + NOTION_OAUTH_MULTI_CUSTOM = 'NOTION_OAUTH_MULTI_CUSTOM' ONE_DRIVE = 'ONE_DRIVE' SHAREPOINT = 'SHAREPOINT' WEB_CRAWLER = 'WEB_CRAWLER' FILE_UPLOAD = 'FILE_UPLOAD' - SALESFORCE = 'SALESFORCE' - ZENDESK = 'ZENDESK' + GITHUB = 'GITHUB' + FIREFLIES = 'FIREFLIES' @classmethod def from_json(cls, json_str: str) -> Self: diff --git a/src/python/vectorize_client/models/start_deep_research_request.py b/vectorize_client/models/start_deep_research_request.py similarity index 100% rename from src/python/vectorize_client/models/start_deep_research_request.py rename to vectorize_client/models/start_deep_research_request.py diff --git a/src/python/vectorize_client/models/start_deep_research_response.py b/vectorize_client/models/start_deep_research_response.py similarity index 100% rename from src/python/vectorize_client/models/start_deep_research_response.py rename to vectorize_client/models/start_deep_research_response.py diff --git a/src/python/vectorize_client/models/start_extraction_request.py b/vectorize_client/models/start_extraction_request.py similarity index 100% rename from src/python/vectorize_client/models/start_extraction_request.py rename to vectorize_client/models/start_extraction_request.py diff --git a/src/python/vectorize_client/models/start_extraction_response.py b/vectorize_client/models/start_extraction_response.py similarity index 100% rename from src/python/vectorize_client/models/start_extraction_response.py rename to vectorize_client/models/start_extraction_response.py diff --git a/src/python/vectorize_client/models/start_file_upload_request.py b/vectorize_client/models/start_file_upload_request.py similarity index 100% rename from src/python/vectorize_client/models/start_file_upload_request.py rename to vectorize_client/models/start_file_upload_request.py diff --git a/src/python/vectorize_client/models/start_file_upload_response.py b/vectorize_client/models/start_file_upload_response.py similarity index 100% rename from src/python/vectorize_client/models/start_file_upload_response.py rename to vectorize_client/models/start_file_upload_response.py diff --git a/src/python/vectorize_client/models/start_file_upload_to_connector_request.py b/vectorize_client/models/start_file_upload_to_connector_request.py similarity index 100% rename from src/python/vectorize_client/models/start_file_upload_to_connector_request.py rename to vectorize_client/models/start_file_upload_to_connector_request.py diff --git a/src/python/vectorize_client/models/start_file_upload_to_connector_response.py b/vectorize_client/models/start_file_upload_to_connector_response.py similarity index 100% rename from src/python/vectorize_client/models/start_file_upload_to_connector_response.py rename to vectorize_client/models/start_file_upload_to_connector_response.py diff --git a/src/python/vectorize_client/models/start_pipeline_response.py b/vectorize_client/models/start_pipeline_response.py similarity index 100% rename from src/python/vectorize_client/models/start_pipeline_response.py rename to vectorize_client/models/start_pipeline_response.py diff --git a/src/python/vectorize_client/models/stop_pipeline_response.py b/vectorize_client/models/stop_pipeline_response.py similarity index 100% rename from src/python/vectorize_client/models/stop_pipeline_response.py rename to vectorize_client/models/stop_pipeline_response.py diff --git a/vectorize_client/models/supabase.py b/vectorize_client/models/supabase.py new file mode 100644 index 0000000..37e2b20 --- /dev/null +++ b/vectorize_client/models/supabase.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.supabase_config import SUPABASEConfig +from typing import Optional, Set +from typing_extensions import Self + +class Supabase(BaseModel): + """ + Supabase + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"SUPABASE\")") + config: SUPABASEConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['SUPABASE']): + raise ValueError("must be one of enum values ('SUPABASE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Supabase from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Supabase from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": SUPABASEConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/supabase1.py b/vectorize_client/models/supabase1.py new file mode 100644 index 0000000..ef64515 --- /dev/null +++ b/vectorize_client/models/supabase1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.supabase_config import SUPABASEConfig +from typing import Optional, Set +from typing_extensions import Self + +class Supabase1(BaseModel): + """ + Supabase1 + """ # noqa: E501 + config: Optional[SUPABASEConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Supabase1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Supabase1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": SUPABASEConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/supabase2.py b/vectorize_client/models/supabase2.py new file mode 100644 index 0000000..c82cccd --- /dev/null +++ b/vectorize_client/models/supabase2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Supabase2(BaseModel): + """ + Supabase2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"SUPABASE\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['SUPABASE']): + raise ValueError("must be one of enum values ('SUPABASE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Supabase2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Supabase2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/supabase_auth_config.py b/vectorize_client/models/supabase_auth_config.py new file mode 100644 index 0000000..f5319a2 --- /dev/null +++ b/vectorize_client/models/supabase_auth_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing import Optional, Set +from typing_extensions import Self + +class SUPABASEAuthConfig(BaseModel): + """ + Authentication configuration for Supabase + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for your Supabase integration") + host: StrictStr = Field(description="Host. Example: Enter the host of the deployment") + port: Optional[Union[StrictFloat, StrictInt]] = Field(default=5432, description="Port. Example: Enter the port of the deployment") + database: StrictStr = Field(description="Database. Example: Enter the database name") + username: StrictStr = Field(description="Username. Example: Enter the username") + password: StrictStr = Field(description="Password. Example: Enter the username's password") + __properties: ClassVar[List[str]] = ["name", "host", "port", "database", "username", "password"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SUPABASEAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SUPABASEAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "host": obj.get("host") if obj.get("host") is not None else 'aws-0-us-east-1.pooler.supabase.com', + "port": obj.get("port") if obj.get("port") is not None else 5432, + "database": obj.get("database"), + "username": obj.get("username"), + "password": obj.get("password") + }) + return _obj + + diff --git a/vectorize_client/models/supabase_config.py b/vectorize_client/models/supabase_config.py new file mode 100644 index 0000000..5eee33a --- /dev/null +++ b/vectorize_client/models/supabase_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class SUPABASEConfig(BaseModel): + """ + Configuration for Supabase connector + """ # noqa: E501 + table: Annotated[str, Field(strict=True, max_length=45)] = Field(description="Table Name. Example: Enter
or .
") + __properties: ClassVar[List[str]] = ["table"] + + @field_validator('table') + def table_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^(?!\b(add|alter|all|and|any|as|asc|avg|between|case|check|column|commit|constraint|create|cross|database|default|delete|desc|distinct|drop|else|exists|false|from|full|group|having|in|index|inner|insert|is|join|key|left|like|limit|max|min|not|null|on|or|order|outer|primary|right|rollback|select|set|sum|table|true|union|unique|update|values|view|where)\b$)(?!.*--)(?!.*[-])[a-z][a-z0-9._]{0,44}$", value): + raise ValueError(r"must validate the regular expression /^(?!\b(add|alter|all|and|any|as|asc|avg|between|case|check|column|commit|constraint|create|cross|database|default|delete|desc|distinct|drop|else|exists|false|from|full|group|having|in|index|inner|insert|is|join|key|left|like|limit|max|min|not|null|on|or|order|outer|primary|right|rollback|select|set|sum|table|true|union|unique|update|values|view|where)\b$)(?!.*--)(?!.*[-])[a-z][a-z0-9._]{0,44}$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SUPABASEConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SUPABASEConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "table": obj.get("table") + }) + return _obj + + diff --git a/vectorize_client/models/turbopuffer.py b/vectorize_client/models/turbopuffer.py new file mode 100644 index 0000000..3720c00 --- /dev/null +++ b/vectorize_client/models/turbopuffer.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.turbopuffer_config import TURBOPUFFERConfig +from typing import Optional, Set +from typing_extensions import Self + +class Turbopuffer(BaseModel): + """ + Turbopuffer + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"TURBOPUFFER\")") + config: TURBOPUFFERConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['TURBOPUFFER']): + raise ValueError("must be one of enum values ('TURBOPUFFER')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Turbopuffer from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Turbopuffer from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": TURBOPUFFERConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/turbopuffer1.py b/vectorize_client/models/turbopuffer1.py new file mode 100644 index 0000000..0a4f2c2 --- /dev/null +++ b/vectorize_client/models/turbopuffer1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.turbopuffer_config import TURBOPUFFERConfig +from typing import Optional, Set +from typing_extensions import Self + +class Turbopuffer1(BaseModel): + """ + Turbopuffer1 + """ # noqa: E501 + config: Optional[TURBOPUFFERConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Turbopuffer1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Turbopuffer1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": TURBOPUFFERConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/turbopuffer2.py b/vectorize_client/models/turbopuffer2.py new file mode 100644 index 0000000..83d2253 --- /dev/null +++ b/vectorize_client/models/turbopuffer2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Turbopuffer2(BaseModel): + """ + Turbopuffer2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"TURBOPUFFER\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['TURBOPUFFER']): + raise ValueError("must be one of enum values ('TURBOPUFFER')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Turbopuffer2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Turbopuffer2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/turbopuffer_auth_config.py b/vectorize_client/models/turbopuffer_auth_config.py new file mode 100644 index 0000000..5353247 --- /dev/null +++ b/vectorize_client/models/turbopuffer_auth_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class TURBOPUFFERAuthConfig(BaseModel): + """ + Authentication configuration for Turbopuffer + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for your Turbopuffer integration") + api_key: Annotated[str, Field(strict=True)] = Field(description="API Key. Example: Enter your API key", alias="api-key") + __properties: ClassVar[List[str]] = ["name", "api-key"] + + @field_validator('api_key') + def api_key_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of TURBOPUFFERAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of TURBOPUFFERAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "api-key": obj.get("api-key") + }) + return _obj + + diff --git a/src/python/vectorize_client/models/update_destination_connector_request.py b/vectorize_client/models/turbopuffer_config.py similarity index 81% rename from src/python/vectorize_client/models/update_destination_connector_request.py rename to vectorize_client/models/turbopuffer_config.py index 6c366a1..63fc518 100644 --- a/src/python/vectorize_client/models/update_destination_connector_request.py +++ b/vectorize_client/models/turbopuffer_config.py @@ -17,17 +17,17 @@ import re # noqa: F401 import json -from pydantic import BaseModel, ConfigDict +from pydantic import BaseModel, ConfigDict, Field, StrictStr from typing import Any, ClassVar, Dict, List from typing import Optional, Set from typing_extensions import Self -class UpdateDestinationConnectorRequest(BaseModel): +class TURBOPUFFERConfig(BaseModel): """ - UpdateDestinationConnectorRequest + Configuration for Turbopuffer connector """ # noqa: E501 - config: Dict[str, Any] - __properties: ClassVar[List[str]] = ["config"] + namespace: StrictStr = Field(description="Namespace. Example: Enter namespace name") + __properties: ClassVar[List[str]] = ["namespace"] model_config = ConfigDict( populate_by_name=True, @@ -47,7 +47,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str: str) -> Optional[Self]: - """Create an instance of UpdateDestinationConnectorRequest from a JSON string""" + """Create an instance of TURBOPUFFERConfig from a JSON string""" return cls.from_dict(json.loads(json_str)) def to_dict(self) -> Dict[str, Any]: @@ -72,7 +72,7 @@ def to_dict(self) -> Dict[str, Any]: @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: - """Create an instance of UpdateDestinationConnectorRequest from a dict""" + """Create an instance of TURBOPUFFERConfig from a dict""" if obj is None: return None @@ -80,7 +80,7 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: return cls.model_validate(obj) _obj = cls.model_validate({ - "config": obj.get("config") + "namespace": obj.get("namespace") }) return _obj diff --git a/src/python/vectorize_client/models/update_ai_platform_connector_request.py b/vectorize_client/models/update_ai_platform_connector_request.py similarity index 100% rename from src/python/vectorize_client/models/update_ai_platform_connector_request.py rename to vectorize_client/models/update_ai_platform_connector_request.py diff --git a/src/python/vectorize_client/models/update_ai_platform_connector_response.py b/vectorize_client/models/update_ai_platform_connector_response.py similarity index 100% rename from src/python/vectorize_client/models/update_ai_platform_connector_response.py rename to vectorize_client/models/update_ai_platform_connector_response.py diff --git a/vectorize_client/models/update_aiplatform_connector_request0.py b/vectorize_client/models/update_aiplatform_connector_request0.py new file mode 100644 index 0000000..8b2a930 --- /dev/null +++ b/vectorize_client/models/update_aiplatform_connector_request0.py @@ -0,0 +1,165 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from vectorize_client.models.bedrock1 import Bedrock1 +from vectorize_client.models.openai1 import Openai1 +from vectorize_client.models.vertex1 import Vertex1 +from vectorize_client.models.voyage1 import Voyage1 +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +UPDATEAIPLATFORMCONNECTORREQUEST_ONE_OF_SCHEMAS = ["Bedrock1", "Openai1", "Vertex1", "Voyage1"] + +class UpdateAiplatformConnectorRequest(BaseModel): + """ + UpdateAiplatformConnectorRequest + """ + # data type: Bedrock1 + oneof_schema_1_validator: Optional[Bedrock1] = None + # data type: Vertex1 + oneof_schema_2_validator: Optional[Vertex1] = None + # data type: Openai1 + oneof_schema_3_validator: Optional[Openai1] = None + # data type: Voyage1 + oneof_schema_4_validator: Optional[Voyage1] = None + actual_instance: Optional[Union[Bedrock1, Openai1, Vertex1, Voyage1]] = None + one_of_schemas: Set[str] = { "Bedrock1", "Openai1", "Vertex1", "Voyage1" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = UpdateAiplatformConnectorRequest.model_construct() + error_messages = [] + match = 0 + # validate data type: Bedrock1 + if not isinstance(v, Bedrock1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Bedrock1`") + else: + match += 1 + # validate data type: Vertex1 + if not isinstance(v, Vertex1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Vertex1`") + else: + match += 1 + # validate data type: Openai1 + if not isinstance(v, Openai1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Openai1`") + else: + match += 1 + # validate data type: Voyage1 + if not isinstance(v, Voyage1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Voyage1`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in UpdateAiplatformConnectorRequest with oneOf schemas: Bedrock1, Openai1, Vertex1, Voyage1. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in UpdateAiplatformConnectorRequest with oneOf schemas: Bedrock1, Openai1, Vertex1, Voyage1. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into Bedrock1 + try: + instance.actual_instance = Bedrock1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Vertex1 + try: + instance.actual_instance = Vertex1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Openai1 + try: + instance.actual_instance = Openai1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Voyage1 + try: + instance.actual_instance = Voyage1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into UpdateAiplatformConnectorRequest with oneOf schemas: Bedrock1, Openai1, Vertex1, Voyage1. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into UpdateAiplatformConnectorRequest with oneOf schemas: Bedrock1, Openai1, Vertex1, Voyage1. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], Bedrock1, Openai1, Vertex1, Voyage1]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) + + diff --git a/vectorize_client/models/update_destination_connector_request.py b/vectorize_client/models/update_destination_connector_request.py new file mode 100644 index 0000000..e2fe959 --- /dev/null +++ b/vectorize_client/models/update_destination_connector_request.py @@ -0,0 +1,277 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from vectorize_client.models.azureaisearch1 import Azureaisearch1 +from vectorize_client.models.capella1 import Capella1 +from vectorize_client.models.datastax1 import Datastax1 +from vectorize_client.models.elastic1 import Elastic1 +from vectorize_client.models.milvus1 import Milvus1 +from vectorize_client.models.pinecone1 import Pinecone1 +from vectorize_client.models.postgresql1 import Postgresql1 +from vectorize_client.models.qdrant1 import Qdrant1 +from vectorize_client.models.singlestore1 import Singlestore1 +from vectorize_client.models.supabase1 import Supabase1 +from vectorize_client.models.turbopuffer1 import Turbopuffer1 +from vectorize_client.models.weaviate1 import Weaviate1 +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +UPDATEDESTINATIONCONNECTORREQUEST_ONE_OF_SCHEMAS = ["Azureaisearch1", "Capella1", "Datastax1", "Elastic1", "Milvus1", "Pinecone1", "Postgresql1", "Qdrant1", "Singlestore1", "Supabase1", "Turbopuffer1", "Weaviate1"] + +class UpdateDestinationConnectorRequest(BaseModel): + """ + UpdateDestinationConnectorRequest + """ + # data type: Capella1 + oneof_schema_1_validator: Optional[Capella1] = None + # data type: Datastax1 + oneof_schema_2_validator: Optional[Datastax1] = None + # data type: Elastic1 + oneof_schema_3_validator: Optional[Elastic1] = None + # data type: Pinecone1 + oneof_schema_4_validator: Optional[Pinecone1] = None + # data type: Singlestore1 + oneof_schema_5_validator: Optional[Singlestore1] = None + # data type: Milvus1 + oneof_schema_6_validator: Optional[Milvus1] = None + # data type: Postgresql1 + oneof_schema_7_validator: Optional[Postgresql1] = None + # data type: Qdrant1 + oneof_schema_8_validator: Optional[Qdrant1] = None + # data type: Supabase1 + oneof_schema_9_validator: Optional[Supabase1] = None + # data type: Weaviate1 + oneof_schema_10_validator: Optional[Weaviate1] = None + # data type: Azureaisearch1 + oneof_schema_11_validator: Optional[Azureaisearch1] = None + # data type: Turbopuffer1 + oneof_schema_12_validator: Optional[Turbopuffer1] = None + actual_instance: Optional[Union[Azureaisearch1, Capella1, Datastax1, Elastic1, Milvus1, Pinecone1, Postgresql1, Qdrant1, Singlestore1, Supabase1, Turbopuffer1, Weaviate1]] = None + one_of_schemas: Set[str] = { "Azureaisearch1", "Capella1", "Datastax1", "Elastic1", "Milvus1", "Pinecone1", "Postgresql1", "Qdrant1", "Singlestore1", "Supabase1", "Turbopuffer1", "Weaviate1" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = UpdateDestinationConnectorRequest.model_construct() + error_messages = [] + match = 0 + # validate data type: Capella1 + if not isinstance(v, Capella1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Capella1`") + else: + match += 1 + # validate data type: Datastax1 + if not isinstance(v, Datastax1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Datastax1`") + else: + match += 1 + # validate data type: Elastic1 + if not isinstance(v, Elastic1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Elastic1`") + else: + match += 1 + # validate data type: Pinecone1 + if not isinstance(v, Pinecone1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Pinecone1`") + else: + match += 1 + # validate data type: Singlestore1 + if not isinstance(v, Singlestore1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Singlestore1`") + else: + match += 1 + # validate data type: Milvus1 + if not isinstance(v, Milvus1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Milvus1`") + else: + match += 1 + # validate data type: Postgresql1 + if not isinstance(v, Postgresql1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Postgresql1`") + else: + match += 1 + # validate data type: Qdrant1 + if not isinstance(v, Qdrant1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Qdrant1`") + else: + match += 1 + # validate data type: Supabase1 + if not isinstance(v, Supabase1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Supabase1`") + else: + match += 1 + # validate data type: Weaviate1 + if not isinstance(v, Weaviate1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Weaviate1`") + else: + match += 1 + # validate data type: Azureaisearch1 + if not isinstance(v, Azureaisearch1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Azureaisearch1`") + else: + match += 1 + # validate data type: Turbopuffer1 + if not isinstance(v, Turbopuffer1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Turbopuffer1`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in UpdateDestinationConnectorRequest with oneOf schemas: Azureaisearch1, Capella1, Datastax1, Elastic1, Milvus1, Pinecone1, Postgresql1, Qdrant1, Singlestore1, Supabase1, Turbopuffer1, Weaviate1. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in UpdateDestinationConnectorRequest with oneOf schemas: Azureaisearch1, Capella1, Datastax1, Elastic1, Milvus1, Pinecone1, Postgresql1, Qdrant1, Singlestore1, Supabase1, Turbopuffer1, Weaviate1. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into Capella1 + try: + instance.actual_instance = Capella1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Datastax1 + try: + instance.actual_instance = Datastax1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Elastic1 + try: + instance.actual_instance = Elastic1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Pinecone1 + try: + instance.actual_instance = Pinecone1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Singlestore1 + try: + instance.actual_instance = Singlestore1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Milvus1 + try: + instance.actual_instance = Milvus1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Postgresql1 + try: + instance.actual_instance = Postgresql1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Qdrant1 + try: + instance.actual_instance = Qdrant1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Supabase1 + try: + instance.actual_instance = Supabase1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Weaviate1 + try: + instance.actual_instance = Weaviate1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Azureaisearch1 + try: + instance.actual_instance = Azureaisearch1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Turbopuffer1 + try: + instance.actual_instance = Turbopuffer1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into UpdateDestinationConnectorRequest with oneOf schemas: Azureaisearch1, Capella1, Datastax1, Elastic1, Milvus1, Pinecone1, Postgresql1, Qdrant1, Singlestore1, Supabase1, Turbopuffer1, Weaviate1. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into UpdateDestinationConnectorRequest with oneOf schemas: Azureaisearch1, Capella1, Datastax1, Elastic1, Milvus1, Pinecone1, Postgresql1, Qdrant1, Singlestore1, Supabase1, Turbopuffer1, Weaviate1. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], Azureaisearch1, Capella1, Datastax1, Elastic1, Milvus1, Pinecone1, Postgresql1, Qdrant1, Singlestore1, Supabase1, Turbopuffer1, Weaviate1]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) + + diff --git a/src/python/vectorize_client/models/update_destination_connector_response.py b/vectorize_client/models/update_destination_connector_response.py similarity index 100% rename from src/python/vectorize_client/models/update_destination_connector_response.py rename to vectorize_client/models/update_destination_connector_response.py diff --git a/vectorize_client/models/update_source_connector_request.py b/vectorize_client/models/update_source_connector_request.py new file mode 100644 index 0000000..ae6248f --- /dev/null +++ b/vectorize_client/models/update_source_connector_request.py @@ -0,0 +1,445 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from vectorize_client.models.amazon_s31 import AmazonS31 +from vectorize_client.models.azure_blob_storage1 import AzureBlobStorage1 +from vectorize_client.models.confluence1 import Confluence1 +from vectorize_client.models.discord1 import Discord1 +from vectorize_client.models.dropbox1 import Dropbox1 +from vectorize_client.models.dropbox_oauth1 import DropboxOauth1 +from vectorize_client.models.dropbox_oauth_multi1 import DropboxOauthMulti1 +from vectorize_client.models.dropbox_oauth_multi_custom1 import DropboxOauthMultiCustom1 +from vectorize_client.models.file_upload1 import FileUpload1 +from vectorize_client.models.firecrawl1 import Firecrawl1 +from vectorize_client.models.fireflies1 import Fireflies1 +from vectorize_client.models.github1 import Github1 +from vectorize_client.models.google_cloud_storage1 import GoogleCloudStorage1 +from vectorize_client.models.google_drive1 import GoogleDrive1 +from vectorize_client.models.google_drive_o_auth1 import GoogleDriveOAuth1 +from vectorize_client.models.google_drive_oauth_multi1 import GoogleDriveOauthMulti1 +from vectorize_client.models.google_drive_oauth_multi_custom1 import GoogleDriveOauthMultiCustom1 +from vectorize_client.models.intercom1 import Intercom1 +from vectorize_client.models.notion1 import Notion1 +from vectorize_client.models.notion_oauth_multi1 import NotionOauthMulti1 +from vectorize_client.models.notion_oauth_multi_custom1 import NotionOauthMultiCustom1 +from vectorize_client.models.one_drive1 import OneDrive1 +from vectorize_client.models.sharepoint1 import Sharepoint1 +from vectorize_client.models.web_crawler1 import WebCrawler1 +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +UPDATESOURCECONNECTORREQUEST_ONE_OF_SCHEMAS = ["AmazonS31", "AzureBlobStorage1", "Confluence1", "Discord1", "Dropbox1", "DropboxOauth1", "DropboxOauthMulti1", "DropboxOauthMultiCustom1", "FileUpload1", "Firecrawl1", "Fireflies1", "Github1", "GoogleCloudStorage1", "GoogleDrive1", "GoogleDriveOAuth1", "GoogleDriveOauthMulti1", "GoogleDriveOauthMultiCustom1", "Intercom1", "Notion1", "NotionOauthMulti1", "NotionOauthMultiCustom1", "OneDrive1", "Sharepoint1", "WebCrawler1"] + +class UpdateSourceConnectorRequest(BaseModel): + """ + UpdateSourceConnectorRequest + """ + # data type: AmazonS31 + oneof_schema_1_validator: Optional[AmazonS31] = None + # data type: AzureBlobStorage1 + oneof_schema_2_validator: Optional[AzureBlobStorage1] = None + # data type: Confluence1 + oneof_schema_3_validator: Optional[Confluence1] = None + # data type: Discord1 + oneof_schema_4_validator: Optional[Discord1] = None + # data type: Dropbox1 + oneof_schema_5_validator: Optional[Dropbox1] = None + # data type: DropboxOauth1 + oneof_schema_6_validator: Optional[DropboxOauth1] = None + # data type: DropboxOauthMulti1 + oneof_schema_7_validator: Optional[DropboxOauthMulti1] = None + # data type: DropboxOauthMultiCustom1 + oneof_schema_8_validator: Optional[DropboxOauthMultiCustom1] = None + # data type: GoogleDriveOAuth1 + oneof_schema_9_validator: Optional[GoogleDriveOAuth1] = None + # data type: GoogleDrive1 + oneof_schema_10_validator: Optional[GoogleDrive1] = None + # data type: GoogleDriveOauthMulti1 + oneof_schema_11_validator: Optional[GoogleDriveOauthMulti1] = None + # data type: GoogleDriveOauthMultiCustom1 + oneof_schema_12_validator: Optional[GoogleDriveOauthMultiCustom1] = None + # data type: Firecrawl1 + oneof_schema_13_validator: Optional[Firecrawl1] = None + # data type: GoogleCloudStorage1 + oneof_schema_14_validator: Optional[GoogleCloudStorage1] = None + # data type: Intercom1 + oneof_schema_15_validator: Optional[Intercom1] = None + # data type: Notion1 + oneof_schema_16_validator: Optional[Notion1] = None + # data type: NotionOauthMulti1 + oneof_schema_17_validator: Optional[NotionOauthMulti1] = None + # data type: NotionOauthMultiCustom1 + oneof_schema_18_validator: Optional[NotionOauthMultiCustom1] = None + # data type: OneDrive1 + oneof_schema_19_validator: Optional[OneDrive1] = None + # data type: Sharepoint1 + oneof_schema_20_validator: Optional[Sharepoint1] = None + # data type: WebCrawler1 + oneof_schema_21_validator: Optional[WebCrawler1] = None + # data type: FileUpload1 + oneof_schema_22_validator: Optional[FileUpload1] = None + # data type: Github1 + oneof_schema_23_validator: Optional[Github1] = None + # data type: Fireflies1 + oneof_schema_24_validator: Optional[Fireflies1] = None + actual_instance: Optional[Union[AmazonS31, AzureBlobStorage1, Confluence1, Discord1, Dropbox1, DropboxOauth1, DropboxOauthMulti1, DropboxOauthMultiCustom1, FileUpload1, Firecrawl1, Fireflies1, Github1, GoogleCloudStorage1, GoogleDrive1, GoogleDriveOAuth1, GoogleDriveOauthMulti1, GoogleDriveOauthMultiCustom1, Intercom1, Notion1, NotionOauthMulti1, NotionOauthMultiCustom1, OneDrive1, Sharepoint1, WebCrawler1]] = None + one_of_schemas: Set[str] = { "AmazonS31", "AzureBlobStorage1", "Confluence1", "Discord1", "Dropbox1", "DropboxOauth1", "DropboxOauthMulti1", "DropboxOauthMultiCustom1", "FileUpload1", "Firecrawl1", "Fireflies1", "Github1", "GoogleCloudStorage1", "GoogleDrive1", "GoogleDriveOAuth1", "GoogleDriveOauthMulti1", "GoogleDriveOauthMultiCustom1", "Intercom1", "Notion1", "NotionOauthMulti1", "NotionOauthMultiCustom1", "OneDrive1", "Sharepoint1", "WebCrawler1" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = UpdateSourceConnectorRequest.model_construct() + error_messages = [] + match = 0 + # validate data type: AmazonS31 + if not isinstance(v, AmazonS31): + error_messages.append(f"Error! Input type `{type(v)}` is not `AmazonS31`") + else: + match += 1 + # validate data type: AzureBlobStorage1 + if not isinstance(v, AzureBlobStorage1): + error_messages.append(f"Error! Input type `{type(v)}` is not `AzureBlobStorage1`") + else: + match += 1 + # validate data type: Confluence1 + if not isinstance(v, Confluence1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Confluence1`") + else: + match += 1 + # validate data type: Discord1 + if not isinstance(v, Discord1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Discord1`") + else: + match += 1 + # validate data type: Dropbox1 + if not isinstance(v, Dropbox1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Dropbox1`") + else: + match += 1 + # validate data type: DropboxOauth1 + if not isinstance(v, DropboxOauth1): + error_messages.append(f"Error! Input type `{type(v)}` is not `DropboxOauth1`") + else: + match += 1 + # validate data type: DropboxOauthMulti1 + if not isinstance(v, DropboxOauthMulti1): + error_messages.append(f"Error! Input type `{type(v)}` is not `DropboxOauthMulti1`") + else: + match += 1 + # validate data type: DropboxOauthMultiCustom1 + if not isinstance(v, DropboxOauthMultiCustom1): + error_messages.append(f"Error! Input type `{type(v)}` is not `DropboxOauthMultiCustom1`") + else: + match += 1 + # validate data type: GoogleDriveOAuth1 + if not isinstance(v, GoogleDriveOAuth1): + error_messages.append(f"Error! Input type `{type(v)}` is not `GoogleDriveOAuth1`") + else: + match += 1 + # validate data type: GoogleDrive1 + if not isinstance(v, GoogleDrive1): + error_messages.append(f"Error! Input type `{type(v)}` is not `GoogleDrive1`") + else: + match += 1 + # validate data type: GoogleDriveOauthMulti1 + if not isinstance(v, GoogleDriveOauthMulti1): + error_messages.append(f"Error! Input type `{type(v)}` is not `GoogleDriveOauthMulti1`") + else: + match += 1 + # validate data type: GoogleDriveOauthMultiCustom1 + if not isinstance(v, GoogleDriveOauthMultiCustom1): + error_messages.append(f"Error! Input type `{type(v)}` is not `GoogleDriveOauthMultiCustom1`") + else: + match += 1 + # validate data type: Firecrawl1 + if not isinstance(v, Firecrawl1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Firecrawl1`") + else: + match += 1 + # validate data type: GoogleCloudStorage1 + if not isinstance(v, GoogleCloudStorage1): + error_messages.append(f"Error! Input type `{type(v)}` is not `GoogleCloudStorage1`") + else: + match += 1 + # validate data type: Intercom1 + if not isinstance(v, Intercom1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Intercom1`") + else: + match += 1 + # validate data type: Notion1 + if not isinstance(v, Notion1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Notion1`") + else: + match += 1 + # validate data type: NotionOauthMulti1 + if not isinstance(v, NotionOauthMulti1): + error_messages.append(f"Error! Input type `{type(v)}` is not `NotionOauthMulti1`") + else: + match += 1 + # validate data type: NotionOauthMultiCustom1 + if not isinstance(v, NotionOauthMultiCustom1): + error_messages.append(f"Error! Input type `{type(v)}` is not `NotionOauthMultiCustom1`") + else: + match += 1 + # validate data type: OneDrive1 + if not isinstance(v, OneDrive1): + error_messages.append(f"Error! Input type `{type(v)}` is not `OneDrive1`") + else: + match += 1 + # validate data type: Sharepoint1 + if not isinstance(v, Sharepoint1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Sharepoint1`") + else: + match += 1 + # validate data type: WebCrawler1 + if not isinstance(v, WebCrawler1): + error_messages.append(f"Error! Input type `{type(v)}` is not `WebCrawler1`") + else: + match += 1 + # validate data type: FileUpload1 + if not isinstance(v, FileUpload1): + error_messages.append(f"Error! Input type `{type(v)}` is not `FileUpload1`") + else: + match += 1 + # validate data type: Github1 + if not isinstance(v, Github1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Github1`") + else: + match += 1 + # validate data type: Fireflies1 + if not isinstance(v, Fireflies1): + error_messages.append(f"Error! Input type `{type(v)}` is not `Fireflies1`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in UpdateSourceConnectorRequest with oneOf schemas: AmazonS31, AzureBlobStorage1, Confluence1, Discord1, Dropbox1, DropboxOauth1, DropboxOauthMulti1, DropboxOauthMultiCustom1, FileUpload1, Firecrawl1, Fireflies1, Github1, GoogleCloudStorage1, GoogleDrive1, GoogleDriveOAuth1, GoogleDriveOauthMulti1, GoogleDriveOauthMultiCustom1, Intercom1, Notion1, NotionOauthMulti1, NotionOauthMultiCustom1, OneDrive1, Sharepoint1, WebCrawler1. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in UpdateSourceConnectorRequest with oneOf schemas: AmazonS31, AzureBlobStorage1, Confluence1, Discord1, Dropbox1, DropboxOauth1, DropboxOauthMulti1, DropboxOauthMultiCustom1, FileUpload1, Firecrawl1, Fireflies1, Github1, GoogleCloudStorage1, GoogleDrive1, GoogleDriveOAuth1, GoogleDriveOauthMulti1, GoogleDriveOauthMultiCustom1, Intercom1, Notion1, NotionOauthMulti1, NotionOauthMultiCustom1, OneDrive1, Sharepoint1, WebCrawler1. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into AmazonS31 + try: + instance.actual_instance = AmazonS31.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into AzureBlobStorage1 + try: + instance.actual_instance = AzureBlobStorage1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Confluence1 + try: + instance.actual_instance = Confluence1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Discord1 + try: + instance.actual_instance = Discord1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Dropbox1 + try: + instance.actual_instance = Dropbox1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into DropboxOauth1 + try: + instance.actual_instance = DropboxOauth1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into DropboxOauthMulti1 + try: + instance.actual_instance = DropboxOauthMulti1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into DropboxOauthMultiCustom1 + try: + instance.actual_instance = DropboxOauthMultiCustom1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GoogleDriveOAuth1 + try: + instance.actual_instance = GoogleDriveOAuth1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GoogleDrive1 + try: + instance.actual_instance = GoogleDrive1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GoogleDriveOauthMulti1 + try: + instance.actual_instance = GoogleDriveOauthMulti1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GoogleDriveOauthMultiCustom1 + try: + instance.actual_instance = GoogleDriveOauthMultiCustom1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Firecrawl1 + try: + instance.actual_instance = Firecrawl1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into GoogleCloudStorage1 + try: + instance.actual_instance = GoogleCloudStorage1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Intercom1 + try: + instance.actual_instance = Intercom1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Notion1 + try: + instance.actual_instance = Notion1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into NotionOauthMulti1 + try: + instance.actual_instance = NotionOauthMulti1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into NotionOauthMultiCustom1 + try: + instance.actual_instance = NotionOauthMultiCustom1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into OneDrive1 + try: + instance.actual_instance = OneDrive1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Sharepoint1 + try: + instance.actual_instance = Sharepoint1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into WebCrawler1 + try: + instance.actual_instance = WebCrawler1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into FileUpload1 + try: + instance.actual_instance = FileUpload1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Github1 + try: + instance.actual_instance = Github1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Fireflies1 + try: + instance.actual_instance = Fireflies1.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into UpdateSourceConnectorRequest with oneOf schemas: AmazonS31, AzureBlobStorage1, Confluence1, Discord1, Dropbox1, DropboxOauth1, DropboxOauthMulti1, DropboxOauthMultiCustom1, FileUpload1, Firecrawl1, Fireflies1, Github1, GoogleCloudStorage1, GoogleDrive1, GoogleDriveOAuth1, GoogleDriveOauthMulti1, GoogleDriveOauthMultiCustom1, Intercom1, Notion1, NotionOauthMulti1, NotionOauthMultiCustom1, OneDrive1, Sharepoint1, WebCrawler1. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into UpdateSourceConnectorRequest with oneOf schemas: AmazonS31, AzureBlobStorage1, Confluence1, Discord1, Dropbox1, DropboxOauth1, DropboxOauthMulti1, DropboxOauthMultiCustom1, FileUpload1, Firecrawl1, Fireflies1, Github1, GoogleCloudStorage1, GoogleDrive1, GoogleDriveOAuth1, GoogleDriveOauthMulti1, GoogleDriveOauthMultiCustom1, Intercom1, Notion1, NotionOauthMulti1, NotionOauthMultiCustom1, OneDrive1, Sharepoint1, WebCrawler1. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], AmazonS31, AzureBlobStorage1, Confluence1, Discord1, Dropbox1, DropboxOauth1, DropboxOauthMulti1, DropboxOauthMultiCustom1, FileUpload1, Firecrawl1, Fireflies1, Github1, GoogleCloudStorage1, GoogleDrive1, GoogleDriveOAuth1, GoogleDriveOauthMulti1, GoogleDriveOauthMultiCustom1, Intercom1, Notion1, NotionOauthMulti1, NotionOauthMultiCustom1, OneDrive1, Sharepoint1, WebCrawler1]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) + + diff --git a/src/python/vectorize_client/models/update_source_connector_response.py b/vectorize_client/models/update_source_connector_response.py similarity index 100% rename from src/python/vectorize_client/models/update_source_connector_response.py rename to vectorize_client/models/update_source_connector_response.py diff --git a/src/python/vectorize_client/models/update_source_connector_response_data.py b/vectorize_client/models/update_source_connector_response_data.py similarity index 100% rename from src/python/vectorize_client/models/update_source_connector_response_data.py rename to vectorize_client/models/update_source_connector_response_data.py diff --git a/src/python/vectorize_client/models/update_user_in_source_connector_request.py b/vectorize_client/models/update_user_in_source_connector_request.py similarity index 75% rename from src/python/vectorize_client/models/update_user_in_source_connector_request.py rename to vectorize_client/models/update_user_in_source_connector_request.py index fd194e2..619f644 100644 --- a/src/python/vectorize_client/models/update_user_in_source_connector_request.py +++ b/vectorize_client/models/update_user_in_source_connector_request.py @@ -19,7 +19,7 @@ from pydantic import BaseModel, ConfigDict, Field, StrictStr from typing import Any, ClassVar, Dict, List, Optional -from vectorize_client.models.add_user_to_source_connector_request_selected_files_value import AddUserToSourceConnectorRequestSelectedFilesValue +from vectorize_client.models.add_user_to_source_connector_request_selected_files import AddUserToSourceConnectorRequestSelectedFiles from typing import Optional, Set from typing_extensions import Self @@ -28,9 +28,10 @@ class UpdateUserInSourceConnectorRequest(BaseModel): UpdateUserInSourceConnectorRequest """ # noqa: E501 user_id: StrictStr = Field(alias="userId") - selected_files: Optional[Dict[str, AddUserToSourceConnectorRequestSelectedFilesValue]] = Field(default=None, alias="selectedFiles") + selected_files: Optional[AddUserToSourceConnectorRequestSelectedFiles] = Field(default=None, alias="selectedFiles") refresh_token: Optional[StrictStr] = Field(default=None, alias="refreshToken") - __properties: ClassVar[List[str]] = ["userId", "selectedFiles", "refreshToken"] + access_token: Optional[StrictStr] = Field(default=None, alias="accessToken") + __properties: ClassVar[List[str]] = ["userId", "selectedFiles", "refreshToken", "accessToken"] model_config = ConfigDict( populate_by_name=True, @@ -71,13 +72,9 @@ def to_dict(self) -> Dict[str, Any]: exclude=excluded_fields, exclude_none=True, ) - # override the default output from pydantic by calling `to_dict()` of each value in selected_files (dict) - _field_dict = {} + # override the default output from pydantic by calling `to_dict()` of selected_files if self.selected_files: - for _key_selected_files in self.selected_files: - if self.selected_files[_key_selected_files]: - _field_dict[_key_selected_files] = self.selected_files[_key_selected_files].to_dict() - _dict['selectedFiles'] = _field_dict + _dict['selectedFiles'] = self.selected_files.to_dict() return _dict @classmethod @@ -91,13 +88,9 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: _obj = cls.model_validate({ "userId": obj.get("userId"), - "selectedFiles": dict( - (_k, AddUserToSourceConnectorRequestSelectedFilesValue.from_dict(_v)) - for _k, _v in obj["selectedFiles"].items() - ) - if obj.get("selectedFiles") is not None - else None, - "refreshToken": obj.get("refreshToken") + "selectedFiles": AddUserToSourceConnectorRequestSelectedFiles.from_dict(obj["selectedFiles"]) if obj.get("selectedFiles") is not None else None, + "refreshToken": obj.get("refreshToken"), + "accessToken": obj.get("accessToken") }) return _obj diff --git a/src/python/vectorize_client/models/update_user_in_source_connector_response.py b/vectorize_client/models/update_user_in_source_connector_response.py similarity index 100% rename from src/python/vectorize_client/models/update_user_in_source_connector_response.py rename to vectorize_client/models/update_user_in_source_connector_response.py diff --git a/src/python/vectorize_client/models/updated_ai_platform_connector_data.py b/vectorize_client/models/updated_ai_platform_connector_data.py similarity index 100% rename from src/python/vectorize_client/models/updated_ai_platform_connector_data.py rename to vectorize_client/models/updated_ai_platform_connector_data.py diff --git a/src/python/vectorize_client/models/updated_destination_connector_data.py b/vectorize_client/models/updated_destination_connector_data.py similarity index 100% rename from src/python/vectorize_client/models/updated_destination_connector_data.py rename to vectorize_client/models/updated_destination_connector_data.py diff --git a/src/python/vectorize_client/models/upload_file.py b/vectorize_client/models/upload_file.py similarity index 100% rename from src/python/vectorize_client/models/upload_file.py rename to vectorize_client/models/upload_file.py diff --git a/vectorize_client/models/vertex.py b/vectorize_client/models/vertex.py new file mode 100644 index 0000000..1043e8e --- /dev/null +++ b/vectorize_client/models/vertex.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.vertex_auth_config import VERTEXAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class Vertex(BaseModel): + """ + Vertex + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"VERTEX\")") + config: VERTEXAuthConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['VERTEX']): + raise ValueError("must be one of enum values ('VERTEX')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Vertex from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Vertex from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": VERTEXAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/vertex1.py b/vectorize_client/models/vertex1.py new file mode 100644 index 0000000..a5c7d09 --- /dev/null +++ b/vectorize_client/models/vertex1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.vertex_auth_config import VERTEXAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class Vertex1(BaseModel): + """ + Vertex1 + """ # noqa: E501 + config: Optional[VERTEXAuthConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Vertex1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Vertex1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": VERTEXAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/vertex2.py b/vectorize_client/models/vertex2.py new file mode 100644 index 0000000..60a1657 --- /dev/null +++ b/vectorize_client/models/vertex2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Vertex2(BaseModel): + """ + Vertex2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"VERTEX\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['VERTEX']): + raise ValueError("must be one of enum values ('VERTEX')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Vertex2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Vertex2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/vertex_auth_config.py b/vectorize_client/models/vertex_auth_config.py new file mode 100644 index 0000000..2a1c74d --- /dev/null +++ b/vectorize_client/models/vertex_auth_config.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class VERTEXAuthConfig(BaseModel): + """ + Authentication configuration for Google Vertex AI + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for your Google Vertex AI integration") + key: StrictStr = Field(description="Service Account Json. Example: Enter the contents of your Google Vertex AI Service Account JSON file") + region: StrictStr = Field(description="Region. Example: Region Name, e.g. us-central1") + __properties: ClassVar[List[str]] = ["name", "key", "region"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of VERTEXAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of VERTEXAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "key": obj.get("key"), + "region": obj.get("region") + }) + return _obj + + diff --git a/vectorize_client/models/voyage.py b/vectorize_client/models/voyage.py new file mode 100644 index 0000000..09418ba --- /dev/null +++ b/vectorize_client/models/voyage.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.voyage_auth_config import VOYAGEAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class Voyage(BaseModel): + """ + Voyage + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"VOYAGE\")") + config: VOYAGEAuthConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['VOYAGE']): + raise ValueError("must be one of enum values ('VOYAGE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Voyage from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Voyage from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": VOYAGEAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/voyage1.py b/vectorize_client/models/voyage1.py new file mode 100644 index 0000000..b8d0819 --- /dev/null +++ b/vectorize_client/models/voyage1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.voyage_auth_config import VOYAGEAuthConfig +from typing import Optional, Set +from typing_extensions import Self + +class Voyage1(BaseModel): + """ + Voyage1 + """ # noqa: E501 + config: Optional[VOYAGEAuthConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Voyage1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Voyage1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": VOYAGEAuthConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/voyage2.py b/vectorize_client/models/voyage2.py new file mode 100644 index 0000000..37da244 --- /dev/null +++ b/vectorize_client/models/voyage2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Voyage2(BaseModel): + """ + Voyage2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"VOYAGE\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['VOYAGE']): + raise ValueError("must be one of enum values ('VOYAGE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Voyage2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Voyage2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/voyage_auth_config.py b/vectorize_client/models/voyage_auth_config.py new file mode 100644 index 0000000..922cee8 --- /dev/null +++ b/vectorize_client/models/voyage_auth_config.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class VOYAGEAuthConfig(BaseModel): + """ + Authentication configuration for Voyage AI + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for your Voyage AI integration") + key: Annotated[str, Field(strict=True)] = Field(description="API Key. Example: Enter your Voyage AI API Key") + __properties: ClassVar[List[str]] = ["name", "key"] + + @field_validator('key') + def key_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of VOYAGEAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of VOYAGEAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "key": obj.get("key") + }) + return _obj + + diff --git a/vectorize_client/models/weaviate.py b/vectorize_client/models/weaviate.py new file mode 100644 index 0000000..a882edd --- /dev/null +++ b/vectorize_client/models/weaviate.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.weaviate_config import WEAVIATEConfig +from typing import Optional, Set +from typing_extensions import Self + +class Weaviate(BaseModel): + """ + Weaviate + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"WEAVIATE\")") + config: WEAVIATEConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['WEAVIATE']): + raise ValueError("must be one of enum values ('WEAVIATE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Weaviate from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Weaviate from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": WEAVIATEConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/weaviate1.py b/vectorize_client/models/weaviate1.py new file mode 100644 index 0000000..b4310f4 --- /dev/null +++ b/vectorize_client/models/weaviate1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.weaviate_config import WEAVIATEConfig +from typing import Optional, Set +from typing_extensions import Self + +class Weaviate1(BaseModel): + """ + Weaviate1 + """ # noqa: E501 + config: Optional[WEAVIATEConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Weaviate1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Weaviate1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": WEAVIATEConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/weaviate2.py b/vectorize_client/models/weaviate2.py new file mode 100644 index 0000000..eb2a92b --- /dev/null +++ b/vectorize_client/models/weaviate2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Weaviate2(BaseModel): + """ + Weaviate2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"WEAVIATE\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['WEAVIATE']): + raise ValueError("must be one of enum values ('WEAVIATE')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Weaviate2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Weaviate2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/weaviate_auth_config.py b/vectorize_client/models/weaviate_auth_config.py new file mode 100644 index 0000000..1d91dbb --- /dev/null +++ b/vectorize_client/models/weaviate_auth_config.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class WEAVIATEAuthConfig(BaseModel): + """ + Authentication configuration for Weaviate + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name for your Weaviate integration") + host: StrictStr = Field(description="Endpoint. Example: Enter your Weaviate Cluster REST Endpoint") + api_key: Annotated[str, Field(strict=True)] = Field(description="API Key. Example: Enter your API key", alias="api-key") + __properties: ClassVar[List[str]] = ["name", "host", "api-key"] + + @field_validator('api_key') + def api_key_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^\S.*\S$|^\S$", value): + raise ValueError(r"must validate the regular expression /^\S.*\S$|^\S$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WEAVIATEAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WEAVIATEAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "host": obj.get("host"), + "api-key": obj.get("api-key") + }) + return _obj + + diff --git a/vectorize_client/models/weaviate_config.py b/vectorize_client/models/weaviate_config.py new file mode 100644 index 0000000..379104e --- /dev/null +++ b/vectorize_client/models/weaviate_config.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, field_validator +from typing import Any, ClassVar, Dict, List +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class WEAVIATEConfig(BaseModel): + """ + Configuration for Weaviate connector + """ # noqa: E501 + collection: Annotated[str, Field(strict=True)] = Field(description="Collection Name. Example: Enter collection name") + __properties: ClassVar[List[str]] = ["collection"] + + @field_validator('collection') + def collection_validate_regular_expression(cls, value): + """Validates the regular expression""" + if not re.match(r"^[A-Z][_0-9A-Za-z]*$", value): + raise ValueError(r"must validate the regular expression /^[A-Z][_0-9A-Za-z]*$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WEAVIATEConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WEAVIATEConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "collection": obj.get("collection") + }) + return _obj + + diff --git a/vectorize_client/models/web_crawler.py b/vectorize_client/models/web_crawler.py new file mode 100644 index 0000000..c2f9b82 --- /dev/null +++ b/vectorize_client/models/web_crawler.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from vectorize_client.models.webcrawler_config import WEBCRAWLERConfig +from typing import Optional, Set +from typing_extensions import Self + +class WebCrawler(BaseModel): + """ + WebCrawler + """ # noqa: E501 + name: StrictStr = Field(description="Name of the connector") + type: StrictStr = Field(description="Connector type (must be \"WEB_CRAWLER\")") + config: WEBCRAWLERConfig + __properties: ClassVar[List[str]] = ["name", "type", "config"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['WEB_CRAWLER']): + raise ValueError("must be one of enum values ('WEB_CRAWLER')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WebCrawler from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WebCrawler from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "type": obj.get("type"), + "config": WEBCRAWLERConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/web_crawler1.py b/vectorize_client/models/web_crawler1.py new file mode 100644 index 0000000..4221b49 --- /dev/null +++ b/vectorize_client/models/web_crawler1.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from vectorize_client.models.webcrawler_config import WEBCRAWLERConfig +from typing import Optional, Set +from typing_extensions import Self + +class WebCrawler1(BaseModel): + """ + WebCrawler1 + """ # noqa: E501 + config: Optional[WEBCRAWLERConfig] = None + __properties: ClassVar[List[str]] = ["config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WebCrawler1 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of config + if self.config: + _dict['config'] = self.config.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WebCrawler1 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "config": WEBCRAWLERConfig.from_dict(obj["config"]) if obj.get("config") is not None else None + }) + return _obj + + diff --git a/vectorize_client/models/web_crawler2.py b/vectorize_client/models/web_crawler2.py new file mode 100644 index 0000000..ec64978 --- /dev/null +++ b/vectorize_client/models/web_crawler2.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class WebCrawler2(BaseModel): + """ + WebCrawler2 + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for the connector") + type: StrictStr = Field(description="Connector type (must be \"WEB_CRAWLER\")") + __properties: ClassVar[List[str]] = ["id", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['WEB_CRAWLER']): + raise ValueError("must be one of enum values ('WEB_CRAWLER')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WebCrawler2 from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WebCrawler2 from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "type": obj.get("type") + }) + return _obj + + diff --git a/vectorize_client/models/webcrawler_auth_config.py b/vectorize_client/models/webcrawler_auth_config.py new file mode 100644 index 0000000..ed9670c --- /dev/null +++ b/vectorize_client/models/webcrawler_auth_config.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class WEBCRAWLERAuthConfig(BaseModel): + """ + Authentication configuration for Web Crawler + """ # noqa: E501 + name: StrictStr = Field(description="Name. Example: Enter a descriptive name") + seed_urls: StrictStr = Field(description="Seed URL(s). Add one or more seed URLs to crawl. The crawler will start from these URLs and follow links to other pages.. Example: (e.g. https://example.com)", alias="seed-urls") + __properties: ClassVar[List[str]] = ["name", "seed-urls"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WEBCRAWLERAuthConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WEBCRAWLERAuthConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "seed-urls": obj.get("seed-urls") + }) + return _obj + + diff --git a/vectorize_client/models/webcrawler_config.py b/vectorize_client/models/webcrawler_config.py new file mode 100644 index 0000000..1495ba1 --- /dev/null +++ b/vectorize_client/models/webcrawler_config.py @@ -0,0 +1,110 @@ +# coding: utf-8 + +""" + Vectorize API (Beta) + + API for Vectorize services + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing_extensions import Annotated +from typing import Optional, Set +from typing_extensions import Self + +class WEBCRAWLERConfig(BaseModel): + """ + Configuration for Web Crawler connector + """ # noqa: E501 + allowed_domains_opt: Optional[StrictStr] = Field(default=None, description="Additional Allowed URLs or prefix(es). Add one or more allowed URLs or URL prefixes. The crawler will read URLs that match these patterns in addition to the seed URL(s).. Example: (e.g. https://docs.example.com)", alias="allowed-domains-opt") + forbidden_paths: Optional[Annotated[str, Field(strict=True)]] = Field(default=None, description="Forbidden Paths. Example: Enter forbidden paths (e.g. /admin)", alias="forbidden-paths") + min_time_between_requests: Optional[Union[StrictFloat, StrictInt]] = Field(default=500, description="Throttle (ms). Example: Enter minimum time between requests in milliseconds", alias="min-time-between-requests") + max_error_count: Optional[Union[StrictFloat, StrictInt]] = Field(default=5, description="Max Error Count. Example: Enter maximum error count", alias="max-error-count") + max_urls: Optional[Union[StrictFloat, StrictInt]] = Field(default=1000, description="Max URLs. Example: Enter maximum number of URLs to crawl", alias="max-urls") + max_depth: Optional[Union[StrictFloat, StrictInt]] = Field(default=50, description="Max Depth. Example: Enter maximum crawl depth", alias="max-depth") + reindex_interval_seconds: Optional[Union[StrictFloat, StrictInt]] = Field(default=3600, description="Reindex Interval (seconds). Example: Enter reindex interval in seconds", alias="reindex-interval-seconds") + __properties: ClassVar[List[str]] = ["allowed-domains-opt", "forbidden-paths", "min-time-between-requests", "max-error-count", "max-urls", "max-depth", "reindex-interval-seconds"] + + @field_validator('forbidden_paths') + def forbidden_paths_validate_regular_expression(cls, value): + """Validates the regular expression""" + if value is None: + return value + + if not re.match(r"^\/([a-zA-Z0-9-_]+(\/)?)+$", value): + raise ValueError(r"must validate the regular expression /^\/([a-zA-Z0-9-_]+(\/)?)+$/") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WEBCRAWLERConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WEBCRAWLERConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "allowed-domains-opt": obj.get("allowed-domains-opt"), + "forbidden-paths": obj.get("forbidden-paths"), + "min-time-between-requests": obj.get("min-time-between-requests") if obj.get("min-time-between-requests") is not None else 500, + "max-error-count": obj.get("max-error-count") if obj.get("max-error-count") is not None else 5, + "max-urls": obj.get("max-urls") if obj.get("max-urls") is not None else 1000, + "max-depth": obj.get("max-depth") if obj.get("max-depth") is not None else 50, + "reindex-interval-seconds": obj.get("reindex-interval-seconds") if obj.get("reindex-interval-seconds") is not None else 3600 + }) + return _obj + + diff --git a/vectorize_client/py.typed b/vectorize_client/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/src/python/vectorize_client/rest.py b/vectorize_client/rest.py similarity index 99% rename from src/python/vectorize_client/rest.py rename to vectorize_client/rest.py index 174fed9..2d60aa0 100644 --- a/src/python/vectorize_client/rest.py +++ b/vectorize_client/rest.py @@ -76,6 +76,7 @@ def __init__(self, configuration) -> None: "ca_certs": configuration.ssl_ca_cert, "cert_file": configuration.cert_file, "key_file": configuration.key_file, + "ca_cert_data": configuration.ca_cert_data, } if configuration.assert_hostname is not None: pool_args['assert_hostname'] = (