diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8414c5fe8..c5789dea9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,7 +21,10 @@ exclude: | ^.*?/build/.*$| ^.*?/dist/.*$| ^.*?/\.coverage$| - ^.*?/coverage\.xml$ + ^.*?/coverage\.xml$| + + ^airbyte_cdk/sources/declarative/models/declarative_component_schema\.py$| + ^airbyte_cdk/test/models/connector_metadata/generated/models\.py$ ) repos: diff --git a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py index 35186ef71..421c6779f 100644 --- a/airbyte_cdk/sources/declarative/models/declarative_component_schema.py +++ b/airbyte_cdk/sources/declarative/models/declarative_component_schema.py @@ -1,5 +1,3 @@ -# Copyright (c) 2025 Airbyte, Inc., all rights reserved. - # generated by datamodel-codegen: # filename: declarative_component_schema.yaml @@ -928,24 +926,28 @@ class OAuthConfigSpecification(BaseModel): class Config: extra = Extra.allow - oauth_user_input_from_connector_config_specification: Optional[Dict[str, Any]] = Field( - None, - description="OAuth specific blob. This is a Json Schema used to validate Json configurations used as input to OAuth.\nMust be a valid non-nested JSON that refers to properties from ConnectorSpecification.connectionSpecification\nusing special annotation 'path_in_connector_config'.\nThese are input values the user is entering through the UI to authenticate to the connector, that might also shared\nas inputs for syncing data via the connector.\nExamples:\nif no connector values is shared during oauth flow, oauth_user_input_from_connector_config_specification=[]\nif connector values such as 'app_id' inside the top level are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['app_id']\n }\n }\nif connector values such as 'info.app_id' nested inside another object are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['info', 'app_id']\n }\n }", - examples=[ - {"app_id": {"type": "string", "path_in_connector_config": ["app_id"]}}, - { - "app_id": { - "type": "string", - "path_in_connector_config": ["info", "app_id"], - } - }, - ], - title="OAuth user input", + oauth_user_input_from_connector_config_specification: Optional[Dict[str, Any]] = ( + Field( + None, + description="OAuth specific blob. This is a Json Schema used to validate Json configurations used as input to OAuth.\nMust be a valid non-nested JSON that refers to properties from ConnectorSpecification.connectionSpecification\nusing special annotation 'path_in_connector_config'.\nThese are input values the user is entering through the UI to authenticate to the connector, that might also shared\nas inputs for syncing data via the connector.\nExamples:\nif no connector values is shared during oauth flow, oauth_user_input_from_connector_config_specification=[]\nif connector values such as 'app_id' inside the top level are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['app_id']\n }\n }\nif connector values such as 'info.app_id' nested inside another object are used to generate the API url for the oauth flow,\n oauth_user_input_from_connector_config_specification={\n app_id: {\n type: string\n path_in_connector_config: ['info', 'app_id']\n }\n }", + examples=[ + {"app_id": {"type": "string", "path_in_connector_config": ["app_id"]}}, + { + "app_id": { + "type": "string", + "path_in_connector_config": ["info", "app_id"], + } + }, + ], + title="OAuth user input", + ) ) - oauth_connector_input_specification: Optional[OauthConnectorInputSpecification] = Field( - None, - description='The DeclarativeOAuth specific blob.\nPertains to the fields defined by the connector relating to the OAuth flow.\n\nInterpolation capabilities:\n- The variables placeholders are declared as `{{my_var}}`.\n- The nested resolution variables like `{{ {{my_nested_var}} }}` is allowed as well.\n\n- The allowed interpolation context is:\n + base64Encoder - encode to `base64`, {{ {{my_var_a}}:{{my_var_b}} | base64Encoder }}\n + base64Decorer - decode from `base64` encoded string, {{ {{my_string_variable_or_string_value}} | base64Decoder }}\n + urlEncoder - encode the input string to URL-like format, {{ https://test.host.com/endpoint | urlEncoder}}\n + urlDecorer - decode the input url-encoded string into text format, {{ urlDecoder:https%3A%2F%2Fairbyte.io | urlDecoder}}\n + codeChallengeS256 - get the `codeChallenge` encoded value to provide additional data-provider specific authorisation values, {{ {{state_value}} | codeChallengeS256 }}\n\nExamples:\n - The TikTok Marketing DeclarativeOAuth spec:\n {\n "oauth_connector_input_specification": {\n "type": "object",\n "additionalProperties": false,\n "properties": {\n "consent_url": "https://ads.tiktok.com/marketing_api/auth?{{client_id_key}}={{client_id_value}}&{{redirect_uri_key}}={{ {{redirect_uri_value}} | urlEncoder}}&{{state_key}}={{state_value}}",\n "access_token_url": "https://business-api.tiktok.com/open_api/v1.3/oauth2/access_token/",\n "access_token_params": {\n "{{ auth_code_key }}": "{{ auth_code_value }}",\n "{{ client_id_key }}": "{{ client_id_value }}",\n "{{ client_secret_key }}": "{{ client_secret_value }}"\n },\n "access_token_headers": {\n "Content-Type": "application/json",\n "Accept": "application/json"\n },\n "extract_output": ["data.access_token"],\n "client_id_key": "app_id",\n "client_secret_key": "secret",\n "auth_code_key": "auth_code"\n }\n }\n }', - title="DeclarativeOAuth Connector Specification", + oauth_connector_input_specification: Optional[OauthConnectorInputSpecification] = ( + Field( + None, + description='The DeclarativeOAuth specific blob.\nPertains to the fields defined by the connector relating to the OAuth flow.\n\nInterpolation capabilities:\n- The variables placeholders are declared as `{{my_var}}`.\n- The nested resolution variables like `{{ {{my_nested_var}} }}` is allowed as well.\n\n- The allowed interpolation context is:\n + base64Encoder - encode to `base64`, {{ {{my_var_a}}:{{my_var_b}} | base64Encoder }}\n + base64Decorer - decode from `base64` encoded string, {{ {{my_string_variable_or_string_value}} | base64Decoder }}\n + urlEncoder - encode the input string to URL-like format, {{ https://test.host.com/endpoint | urlEncoder}}\n + urlDecorer - decode the input url-encoded string into text format, {{ urlDecoder:https%3A%2F%2Fairbyte.io | urlDecoder}}\n + codeChallengeS256 - get the `codeChallenge` encoded value to provide additional data-provider specific authorisation values, {{ {{state_value}} | codeChallengeS256 }}\n\nExamples:\n - The TikTok Marketing DeclarativeOAuth spec:\n {\n "oauth_connector_input_specification": {\n "type": "object",\n "additionalProperties": false,\n "properties": {\n "consent_url": "https://ads.tiktok.com/marketing_api/auth?{{client_id_key}}={{client_id_value}}&{{redirect_uri_key}}={{ {{redirect_uri_value}} | urlEncoder}}&{{state_key}}={{state_value}}",\n "access_token_url": "https://business-api.tiktok.com/open_api/v1.3/oauth2/access_token/",\n "access_token_params": {\n "{{ auth_code_key }}": "{{ auth_code_value }}",\n "{{ client_id_key }}": "{{ client_id_value }}",\n "{{ client_secret_key }}": "{{ client_secret_value }}"\n },\n "access_token_headers": {\n "Content-Type": "application/json",\n "Accept": "application/json"\n },\n "extract_output": ["data.access_token"],\n "client_id_key": "app_id",\n "client_secret_key": "secret",\n "auth_code_key": "auth_code"\n }\n }\n }', + title="DeclarativeOAuth Connector Specification", + ) ) complete_oauth_output_specification: Optional[Dict[str, Any]] = Field( None, @@ -963,7 +965,9 @@ class Config: complete_oauth_server_input_specification: Optional[Dict[str, Any]] = Field( None, description="OAuth specific blob. This is a Json Schema used to validate Json configurations persisted as Airbyte Server configurations.\nMust be a valid non-nested JSON describing additional fields configured by the Airbyte Instance or Workspace Admins to be used by the\nserver when completing an OAuth flow (typically exchanging an auth code for refresh token).\nExamples:\n complete_oauth_server_input_specification={\n client_id: {\n type: string\n },\n client_secret: {\n type: string\n }\n }", - examples=[{"client_id": {"type": "string"}, "client_secret": {"type": "string"}}], + examples=[ + {"client_id": {"type": "string"}, "client_secret": {"type": "string"}} + ], title="OAuth input specification", ) complete_oauth_server_output_specification: Optional[Dict[str, Any]] = Field( @@ -1467,7 +1471,9 @@ class CustomConfigTransformation(BaseModel): class_name: str = Field( ..., description="Fully-qualified name of the class that will be implementing the custom config transformation. The format is `source_..`.", - examples=["source_declarative_manifest.components.MyCustomConfigTransformation"], + examples=[ + "source_declarative_manifest.components.MyCustomConfigTransformation" + ], ) parameters: Optional[Dict[str, Any]] = Field( None, @@ -1885,7 +1891,9 @@ class OAuthAuthenticator(BaseModel): scopes: Optional[List[str]] = Field( None, description="List of scopes that should be granted to the access token.", - examples=[["crm.list.read", "crm.objects.contacts.read", "crm.schema.contacts.read"]], + examples=[ + ["crm.list.read", "crm.objects.contacts.read", "crm.schema.contacts.read"] + ], title="Scopes", ) token_expiry_date: Optional[str] = Field( @@ -2084,7 +2092,9 @@ class RecordSelector(BaseModel): description="Responsible for filtering records to be emitted by the Source.", title="Record Filter", ) - schema_normalization: Optional[Union[SchemaNormalization, CustomSchemaNormalization]] = Field( + schema_normalization: Optional[ + Union[SchemaNormalization, CustomSchemaNormalization] + ] = Field( None, description="Responsible for normalization according to the schema.", title="Schema Normalization", @@ -2126,10 +2136,12 @@ class DpathValidator(BaseModel): ], title="Field Path", ) - validation_strategy: Union[ValidateAdheresToSchema, CustomValidationStrategy] = Field( - ..., - description="The condition that the specified config value will be evaluated against", - title="Validation Strategy", + validation_strategy: Union[ValidateAdheresToSchema, CustomValidationStrategy] = ( + Field( + ..., + description="The condition that the specified config value will be evaluated against", + title="Validation Strategy", + ) ) @@ -2146,10 +2158,12 @@ class PredicateValidator(BaseModel): ], title="Value", ) - validation_strategy: Union[ValidateAdheresToSchema, CustomValidationStrategy] = Field( - ..., - description="The validation strategy to apply to the value.", - title="Validation Strategy", + validation_strategy: Union[ValidateAdheresToSchema, CustomValidationStrategy] = ( + Field( + ..., + description="The validation strategy to apply to the value.", + title="Validation Strategy", + ) ) @@ -2174,12 +2188,12 @@ class ConfigAddFields(BaseModel): class CompositeErrorHandler(BaseModel): type: Literal["CompositeErrorHandler"] - error_handlers: List[Union[CompositeErrorHandler, DefaultErrorHandler, CustomErrorHandler]] = ( - Field( - ..., - description="List of error handlers to iterate on to determine how to handle a failed response.", - title="Error Handlers", - ) + error_handlers: List[ + Union[CompositeErrorHandler, DefaultErrorHandler, CustomErrorHandler] + ] = Field( + ..., + description="List of error handlers to iterate on to determine how to handle a failed response.", + title="Error Handlers", ) parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters") @@ -2341,9 +2355,9 @@ class Config: type: Literal["DeclarativeSource"] check: Union[CheckStream, CheckDynamicStream] - streams: Optional[List[Union[ConditionalStreams, DeclarativeStream, StateDelegatingStream]]] = ( - None - ) + streams: Optional[ + List[Union[ConditionalStreams, DeclarativeStream, StateDelegatingStream]] + ] = None dynamic_streams: List[DynamicDeclarativeStream] version: str = Field( ..., @@ -2468,16 +2482,20 @@ class Config: extra = Extra.allow type: Literal["DeclarativeStream"] - name: Optional[str] = Field("", description="The stream name.", example=["Users"], title="Name") + name: Optional[str] = Field( + "", description="The stream name.", example=["Users"], title="Name" + ) retriever: Union[SimpleRetriever, AsyncRetriever, CustomRetriever] = Field( ..., description="Component used to coordinate how records are extracted across stream slices and request pages.", title="Retriever", ) - incremental_sync: Optional[Union[DatetimeBasedCursor, IncrementingCountCursor]] = Field( - None, - description="Component used to fetch data incrementally based on a time field in the data.", - title="Incremental Sync", + incremental_sync: Optional[Union[DatetimeBasedCursor, IncrementingCountCursor]] = ( + Field( + None, + description="Component used to fetch data incrementally based on a time field in the data.", + title="Incremental Sync", + ) ) primary_key: Optional[PrimaryKey] = Field("", title="Primary Key") schema_loader: Optional[ @@ -2651,18 +2669,20 @@ class HttpRequester(BaseModelWithDeprecations): description="For APIs that require explicit specification of the properties to query for, this component will take a static or dynamic set of properties (which can be optionally split into chunks) and allow them to be injected into an outbound request by accessing stream_partition.extra_fields.", title="Query Properties", ) - request_parameters: Optional[Union[Dict[str, Union[str, QueryProperties]], str]] = Field( - None, - description="Specifies the query parameters that should be set on an outgoing HTTP request given the inputs.", - examples=[ - {"unit": "day"}, - { - "query": 'last_event_time BETWEEN TIMESTAMP "{{ stream_interval.start_time }}" AND TIMESTAMP "{{ stream_interval.end_time }}"' - }, - {"searchIn": "{{ ','.join(config.get('search_in', [])) }}"}, - {"sort_by[asc]": "updated_at"}, - ], - title="Query Parameters", + request_parameters: Optional[Union[Dict[str, Union[str, QueryProperties]], str]] = ( + Field( + None, + description="Specifies the query parameters that should be set on an outgoing HTTP request given the inputs.", + examples=[ + {"unit": "day"}, + { + "query": 'last_event_time BETWEEN TIMESTAMP "{{ stream_interval.start_time }}" AND TIMESTAMP "{{ stream_interval.end_time }}"' + }, + {"searchIn": "{{ ','.join(config.get('search_in', [])) }}"}, + {"sort_by[asc]": "updated_at"}, + ], + title="Query Parameters", + ) ) request_headers: Optional[Union[Dict[str, str], str]] = Field( None, @@ -2834,7 +2854,9 @@ class QueryProperties(BaseModel): class StateDelegatingStream(BaseModel): type: Literal["StateDelegatingStream"] - name: str = Field(..., description="The stream name.", example=["Users"], title="Name") + name: str = Field( + ..., description="The stream name.", example=["Users"], title="Name" + ) full_refresh_stream: DeclarativeStream = Field( ..., description="Component used to coordinate how records are extracted across stream slices and request pages when the state is empty or not provided.", @@ -2921,13 +2943,17 @@ class AsyncRetriever(BaseModel): status_extractor: Union[DpathExtractor, CustomRecordExtractor] = Field( ..., description="Responsible for fetching the actual status of the async job." ) - download_target_extractor: Optional[Union[DpathExtractor, CustomRecordExtractor]] = Field( + download_target_extractor: Optional[ + Union[DpathExtractor, CustomRecordExtractor] + ] = Field( None, description="Responsible for fetching the final result `urls` provided by the completed / finished / ready async job.", ) download_extractor: Optional[ Union[DpathExtractor, CustomRecordExtractor, ResponseToFileExtractor] - ] = Field(None, description="Responsible for fetching the records from provided urls.") + ] = Field( + None, description="Responsible for fetching the records from provided urls." + ) creation_requester: Union[HttpRequester, CustomRequester] = Field( ..., description="Requester component that describes how to prepare HTTP requests to send to the source API to create the async server-side job.", diff --git a/airbyte_cdk/test/models/__init__.py b/airbyte_cdk/test/models/__init__.py index 70e6a3600..14aba786e 100644 --- a/airbyte_cdk/test/models/__init__.py +++ b/airbyte_cdk/test/models/__init__.py @@ -1,10 +1,16 @@ # Copyright (c) 2025 Airbyte, Inc., all rights reserved. """Models used for standard tests.""" +from airbyte_cdk.test.models.connector_metadata import ( + ConnectorMetadataDefinitionV0, + ConnectorTestSuiteOptions, +) from airbyte_cdk.test.models.outcome import ExpectedOutcome from airbyte_cdk.test.models.scenario import ConnectorTestScenario __all__ = [ + "ConnectorMetadataDefinitionV0", "ConnectorTestScenario", + "ConnectorTestSuiteOptions", "ExpectedOutcome", ] diff --git a/airbyte_cdk/test/models/connector_metadata/__init__.py b/airbyte_cdk/test/models/connector_metadata/__init__.py new file mode 100644 index 000000000..ff18779f7 --- /dev/null +++ b/airbyte_cdk/test/models/connector_metadata/__init__.py @@ -0,0 +1,24 @@ +"""Pydantic and JSON Schema models for `metadata.yaml` validation and testing. + +## Usage + +```python +from airbyte_cdk.test.models import ConnectorMetadataDefinitionV0 +import yaml + +metadata = ConnectorMetadataDefinitionV0(**yaml.safe_load(metadata_yaml)) +``` + +## Regenerating Models + +These models are auto-generated from JSON schemas in the airbytehq/airbyte repository. +For information on regenerating these models, see the Contributing Guide: +https://github.com/airbytehq/airbyte-python-cdk/blob/main/docs/CONTRIBUTING.md#regenerating-connector-metadata-models +""" + +from .generated.models import ConnectorMetadataDefinitionV0, ConnectorTestSuiteOptions + +__all__ = [ + "ConnectorMetadataDefinitionV0", + "ConnectorTestSuiteOptions", +] diff --git a/airbyte_cdk/test/models/connector_metadata/generated/__init__.py b/airbyte_cdk/test/models/connector_metadata/generated/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/airbyte_cdk/test/models/connector_metadata/generated/metadata_schema.json b/airbyte_cdk/test/models/connector_metadata/generated/metadata_schema.json new file mode 100644 index 000000000..aef8ce028 --- /dev/null +++ b/airbyte_cdk/test/models/connector_metadata/generated/metadata_schema.json @@ -0,0 +1,1193 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://github.com/airbytehq/airbyte/airbyte-ci/connectors_ci/metadata_service/lib/models/src/ConnectorMetadataDefinitionV0.yml", + "title": "ConnectorMetadataDefinitionV0", + "description": "describes the metadata of a connector", + "type": "object", + "required": [ + "metadataSpecVersion", + "data" + ], + "additionalProperties": false, + "properties": { + "metadataSpecVersion": { + "type": "string" + }, + "data": { + "type": "object", + "required": [ + "name", + "definitionId", + "connectorType", + "dockerRepository", + "dockerImageTag", + "license", + "documentationUrl", + "githubIssueLabel", + "connectorSubtype", + "releaseStage" + ], + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + }, + "icon": { + "type": "string" + }, + "definitionId": { + "type": "string", + "format": "uuid" + }, + "connectorBuildOptions": { + "$ref": "#/definitions/ConnectorBuildOptions" + }, + "connectorTestSuitesOptions": { + "type": "array", + "items": { + "$ref": "#/definitions/ConnectorTestSuiteOptions" + } + }, + "connectorType": { + "type": "string", + "enum": [ + "destination", + "source" + ] + }, + "dockerRepository": { + "type": "string" + }, + "dockerImageTag": { + "type": "string" + }, + "supportsDbt": { + "type": "boolean" + }, + "supportsNormalization": { + "type": "boolean" + }, + "license": { + "type": "string" + }, + "documentationUrl": { + "type": "string", + "format": "uri" + }, + "githubIssueLabel": { + "type": "string" + }, + "maxSecondsBetweenMessages": { + "description": "Maximum delay between 2 airbyte protocol messages, in second. The source will timeout if this delay is reached", + "type": "integer" + }, + "releaseDate": { + "description": "The date when this connector was first released, in yyyy-mm-dd format.", + "type": "string", + "format": "date" + }, + "protocolVersion": { + "type": "string", + "description": "the Airbyte Protocol version supported by the connector" + }, + "erdUrl": { + "type": "string", + "description": "The URL where you can visualize the ERD" + }, + "connectorSubtype": { + "type": "string", + "enum": [ + "api", + "database", + "datalake", + "file", + "custom", + "message_queue", + "unknown", + "vectorstore" + ] + }, + "releaseStage": { + "$ref": "#/definitions/ReleaseStage" + }, + "supportLevel": { + "$ref": "#/definitions/SupportLevel" + }, + "tags": { + "type": "array", + "description": "An array of tags that describe the connector. E.g: language:python, keyword:rds, etc.", + "items": { + "type": "string" + }, + "default": [] + }, + "registryOverrides": { + "anyOf": [ + { + "type": "object", + "additionalProperties": false, + "properties": { + "oss": { + "anyOf": [ + { + "$ref": "#/definitions/RegistryOverrides" + } + ] + }, + "cloud": { + "anyOf": [ + { + "$ref": "#/definitions/RegistryOverrides" + } + ] + } + } + } + ] + }, + "allowedHosts": { + "$ref": "#/definitions/AllowedHosts" + }, + "releases": { + "$ref": "#/definitions/ConnectorReleases" + }, + "normalizationConfig": { + "$ref": "#/definitions/NormalizationDestinationDefinitionConfig" + }, + "suggestedStreams": { + "$ref": "#/definitions/SuggestedStreams" + }, + "resourceRequirements": { + "$ref": "#/definitions/ActorDefinitionResourceRequirements" + }, + "ab_internal": { + "$ref": "#/definitions/AirbyteInternal" + }, + "remoteRegistries": { + "$ref": "#/definitions/RemoteRegistries" + }, + "supportsRefreshes": { + "type": "boolean", + "default": false + }, + "generated": { + "$ref": "#/definitions/GeneratedFields" + }, + "supportsFileTransfer": { + "type": "boolean", + "default": false + }, + "supportsDataActivation": { + "type": "boolean", + "default": false + }, + "connectorIPCOptions": { + "$ref": "#/definitions/ConnectorIPCOptions" + } + } + } + }, + "definitions": { + "TestConnections": { + "title": "TestConnections", + "description": "List of sandbox cloud connections that tests can be run against", + "type": "object", + "required": [ + "name", + "id" + ], + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The connection name" + }, + "id": { + "type": "string", + "description": "The connection ID" + } + } + }, + "SupportLevel": { + "title": "SupportLevel", + "description": "enum that describes a connector's release stage", + "type": "string", + "enum": [ + "community", + "certified", + "archived" + ] + }, + "SuggestedStreams": { + "title": "SuggestedStreams", + "description": "A source's suggested streams. These will be suggested by default for new connections using this source. Otherwise, all streams will be selected. This is useful for when your source has a lot of streams, but the average user will only want a subset of them synced.", + "type": "object", + "additionalProperties": true, + "properties": { + "streams": { + "type": "array", + "description": "An array of streams that this connector suggests the average user will want. SuggestedStreams not being present for the source means that all streams are suggested. An empty list here means that no streams are suggested.", + "items": { + "type": "string" + } + } + } + }, + "SourceFileInfo": { + "title": "SourceFileInfo", + "description": "Information about the source file that generated the registry entry", + "type": "object", + "properties": { + "metadata_etag": { + "type": "string" + }, + "metadata_file_path": { + "type": "string" + }, + "metadata_bucket_name": { + "type": "string" + }, + "metadata_last_modified": { + "type": "string" + }, + "registry_entry_generated_at": { + "type": "string" + } + } + }, + "SecretStore": { + "title": "SecretStore", + "description": "An object describing a secret store metadata", + "type": "object", + "required": [ + "name", + "secretStore" + ], + "additionalProperties": false, + "properties": { + "alias": { + "type": "string", + "description": "The alias of the secret store which can map to its actual secret address" + }, + "type": { + "type": "string", + "description": "The type of the secret store", + "enum": [ + "GSM" + ] + } + } + }, + "Secret": { + "title": "Secret", + "description": "An object describing a secret's metadata", + "type": "object", + "required": [ + "name", + "secretStore" + ], + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The secret name in the secret store" + }, + "fileName": { + "type": "string", + "description": "The name of the file to which the secret value would be persisted" + }, + "secretStore": { + "$ref": "#/definitions/SecretStore" + } + } + }, + "RolloutConfiguration": { + "title": "RolloutConfiguration", + "description": "configuration for the rollout of a connector", + "type": "object", + "additionalProperties": false, + "properties": { + "enableProgressiveRollout": { + "type": "boolean", + "default": false, + "description": "Whether to enable progressive rollout for the connector." + }, + "initialPercentage": { + "type": "integer", + "minimum": 0, + "maximum": 100, + "default": 0, + "description": "The percentage of users that should receive the new version initially." + }, + "maxPercentage": { + "type": "integer", + "minimum": 0, + "maximum": 100, + "default": 50, + "description": "The percentage of users who should receive the release candidate during the test phase before full rollout." + }, + "advanceDelayMinutes": { + "type": "integer", + "minimum": 10, + "default": 10, + "description": "The number of minutes to wait before advancing the rollout percentage." + } + } + }, + "ResourceRequirements": { + "title": "ResourceRequirements", + "description": "generic configuration for pod source requirements", + "type": "object", + "additionalProperties": false, + "properties": { + "cpu_request": { + "type": "string" + }, + "cpu_limit": { + "type": "string" + }, + "memory_request": { + "type": "string" + }, + "memory_limit": { + "type": "string" + } + } + }, + "PyPi": { + "title": "PyPi", + "description": "describes the PyPi publishing options", + "type": "object", + "additionalProperties": false, + "required": [ + "enabled", + "packageName" + ], + "properties": { + "enabled": { + "type": "boolean" + }, + "packageName": { + "type": "string", + "description": "The name of the package on PyPi." + } + } + }, + "RemoteRegistries": { + "title": "RemoteRegistries", + "description": "describes how the connector is published to remote registries", + "type": "object", + "additionalProperties": false, + "properties": { + "pypi": { + "$ref": "#/definitions/PyPi" + } + } + }, + "ReleaseStage": { + "title": "ReleaseStage", + "description": "enum that describes a connector's release stage", + "type": "string", + "enum": [ + "alpha", + "beta", + "generally_available", + "custom" + ] + }, + "RegistryOverrides": { + "title": "RegistryOverrides", + "description": "describes the overrides per registry of a connector", + "type": "object", + "additionalProperties": false, + "required": [ + "enabled" + ], + "properties": { + "enabled": { + "type": "boolean", + "default": false + }, + "name": { + "type": "string" + }, + "dockerRepository": { + "type": "string" + }, + "dockerImageTag": { + "type": "string" + }, + "supportsDbt": { + "type": "boolean" + }, + "supportsNormalization": { + "type": "boolean" + }, + "license": { + "type": "string" + }, + "documentationUrl": { + "type": "string", + "format": "uri" + }, + "connectorSubtype": { + "type": "string" + }, + "allowedHosts": { + "$ref": "#/definitions/AllowedHosts" + }, + "normalizationConfig": { + "$ref": "#/definitions/NormalizationDestinationDefinitionConfig" + }, + "suggestedStreams": { + "$ref": "#/definitions/SuggestedStreams" + }, + "resourceRequirements": { + "$ref": "#/definitions/ActorDefinitionResourceRequirements" + } + } + }, + "NormalizationDestinationDefinitionConfig": { + "title": "NormalizationDestinationDefinitionConfig", + "description": "describes a normalization config for destination definition", + "type": "object", + "required": [ + "normalizationRepository", + "normalizationTag", + "normalizationIntegrationType" + ], + "additionalProperties": true, + "properties": { + "normalizationRepository": { + "type": "string", + "description": "a field indicating the name of the repository to be used for normalization. If the value of the flag is NULL - normalization is not used." + }, + "normalizationTag": { + "type": "string", + "description": "a field indicating the tag of the docker repository to be used for normalization." + }, + "normalizationIntegrationType": { + "type": "string", + "description": "a field indicating the type of integration dialect to use for normalization." + } + } + }, + "JobType": { + "title": "JobType", + "description": "enum that describes the different types of jobs that the platform runs.", + "type": "string", + "enum": [ + "get_spec", + "check_connection", + "discover_schema", + "sync", + "reset_connection", + "connection_updater", + "replicate" + ] + }, + "GitInfo": { + "title": "GitInfo", + "description": "Information about the author of the last commit that modified this file. DO NOT DEFINE THIS FIELD MANUALLY. It will be overwritten by the CI.", + "type": "object", + "additionalProperties": false, + "properties": { + "commit_sha": { + "type": "string", + "description": "The git commit sha of the last commit that modified this file." + }, + "commit_timestamp": { + "type": "string", + "format": "date-time", + "description": "The git commit timestamp of the last commit that modified this file." + }, + "commit_author": { + "type": "string", + "description": "The git commit author of the last commit that modified this file." + }, + "commit_author_email": { + "type": "string", + "description": "The git commit author email of the last commit that modified this file." + } + } + }, + "GeneratedFields": { + "title": "GeneratedFields", + "description": "Optional schema for fields generated at metadata upload time", + "type": "object", + "properties": { + "git": { + "$ref": "#/definitions/GitInfo" + }, + "source_file_info": { + "$ref": "#/definitions/SourceFileInfo" + }, + "metrics": { + "$ref": "#/definitions/ConnectorMetrics" + }, + "sbomUrl": { + "type": "string", + "description": "URL to the SBOM file" + } + } + }, + "ConnectorTestSuiteOptions": { + "title": "ConnectorTestSuiteOptions", + "description": "Options for a specific connector test suite.", + "type": "object", + "required": [ + "suite" + ], + "additionalProperties": false, + "properties": { + "suite": { + "description": "Name of the configured test suite", + "type": "string", + "enum": [ + "unitTests", + "integrationTests", + "acceptanceTests", + "liveTests" + ] + }, + "testSecrets": { + "description": "List of secrets required to run the test suite", + "type": "array", + "items": { + "$ref": "#/definitions/Secret" + } + }, + "testConnections": { + "description": "List of sandbox cloud connections that tests can be run against", + "type": "array", + "items": { + "$ref": "#/definitions/TestConnections" + } + } + } + }, + "ConnectorReleases": { + "title": "ConnectorReleases", + "description": "Contains information about different types of releases for a connector.", + "type": "object", + "additionalProperties": false, + "properties": { + "rolloutConfiguration": { + "$ref": "#/definitions/RolloutConfiguration" + }, + "breakingChanges": { + "$ref": "#/definitions/ConnectorBreakingChanges" + }, + "migrationDocumentationUrl": { + "description": "URL to documentation on how to migrate from the previous version to the current version. Defaults to ${documentationUrl}-migrations", + "type": "string", + "format": "uri" + } + } + }, + "ConnectorRegistryV0": { + "title": "ConnectorRegistryV0", + "description": "describes the collection of connectors retrieved from a registry", + "type": "object", + "required": [ + "destinations", + "sources" + ], + "properties": { + "destinations": { + "type": "array", + "items": { + "$ref": "#/definitions/ConnectorRegistryDestinationDefinition" + } + }, + "sources": { + "type": "array", + "items": { + "$ref": "#/definitions/ConnectorRegistrySourceDefinition" + } + } + } + }, + "ConnectorRegistrySourceDefinition": { + "title": "ConnectorRegistrySourceDefinition", + "description": "describes a source", + "type": "object", + "required": [ + "sourceDefinitionId", + "name", + "dockerRepository", + "dockerImageTag", + "documentationUrl", + "spec" + ], + "additionalProperties": true, + "properties": { + "sourceDefinitionId": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "dockerRepository": { + "type": "string" + }, + "dockerImageTag": { + "type": "string" + }, + "documentationUrl": { + "type": "string" + }, + "icon": { + "type": "string" + }, + "iconUrl": { + "type": "string" + }, + "sourceType": { + "type": "string", + "enum": [ + "api", + "file", + "database", + "custom" + ] + }, + "spec": { + "type": "object" + }, + "tombstone": { + "description": "if false, the configuration is active. if true, then this configuration is permanently off.", + "type": "boolean", + "default": false + }, + "public": { + "description": "true if this connector definition is available to all workspaces", + "type": "boolean", + "default": false + }, + "custom": { + "description": "whether this is a custom connector definition", + "type": "boolean", + "default": false + }, + "releaseStage": { + "$ref": "#/definitions/ReleaseStage" + }, + "supportLevel": { + "$ref": "#/definitions/SupportLevel" + }, + "releaseDate": { + "description": "The date when this connector was first released, in yyyy-mm-dd format.", + "type": "string", + "format": "date" + }, + "resourceRequirements": { + "$ref": "#/definitions/ActorDefinitionResourceRequirements" + }, + "protocolVersion": { + "type": "string", + "description": "the Airbyte Protocol version supported by the connector" + }, + "allowedHosts": { + "$ref": "#/definitions/AllowedHosts" + }, + "suggestedStreams": { + "$ref": "#/definitions/SuggestedStreams" + }, + "maxSecondsBetweenMessages": { + "description": "Number of seconds allowed between 2 airbyte protocol messages. The source will timeout if this delay is reach", + "type": "integer" + }, + "erdUrl": { + "type": "string", + "description": "The URL where you can visualize the ERD" + }, + "releases": { + "$ref": "#/definitions/ConnectorRegistryReleases" + }, + "ab_internal": { + "$ref": "#/definitions/AirbyteInternal" + }, + "generated": { + "$ref": "#/definitions/GeneratedFields" + }, + "packageInfo": { + "$ref": "#/definitions/ConnectorPackageInfo" + }, + "language": { + "type": "string", + "description": "The language the connector is written in" + }, + "supportsFileTransfer": { + "type": "boolean", + "default": false + }, + "supportsDataActivation": { + "type": "boolean", + "default": false + } + } + }, + "ConnectorReleaseCandidates": { + "description": "Each entry denotes a release candidate version of a connector.", + "type": "object", + "additionalProperties": false, + "minProperties": 1, + "maxProperties": 1, + "patternProperties": { + "^\\d+\\.\\d+\\.\\d+(-[0-9A-Za-z-.]+)?$": { + "$ref": "#/definitions/VersionReleaseCandidate" + } + } + }, + "VersionReleaseCandidate": { + "description": "Contains information about a release candidate version of a connector.", + "additionalProperties": false, + "type": "object", + "oneOf": [ + { + "$ref": "#/definitions/ConnectorRegistrySourceDefinition" + }, + { + "$ref": "#/definitions/ConnectorRegistryDestinationDefinition" + } + ] + }, + "ConnectorRegistryReleases": { + "title": "ConnectorRegistryReleases", + "description": "Contains information about different types of releases for a connector.", + "type": "object", + "additionalProperties": false, + "properties": { + "releaseCandidates": { + "$ref": "#/definitions/ConnectorReleaseCandidates" + }, + "rolloutConfiguration": { + "$ref": "#/definitions/RolloutConfiguration" + }, + "breakingChanges": { + "$ref": "#/definitions/ConnectorBreakingChanges" + }, + "migrationDocumentationUrl": { + "description": "URL to documentation on how to migrate from the previous version to the current version. Defaults to ${documentationUrl}-migrations", + "type": "string", + "format": "uri" + } + } + }, + "ConnectorRegistryDestinationDefinition": { + "title": "ConnectorRegistryDestinationDefinition", + "description": "describes a destination", + "type": "object", + "required": [ + "destinationDefinitionId", + "name", + "dockerRepository", + "dockerImageTag", + "documentationUrl", + "spec" + ], + "additionalProperties": true, + "properties": { + "destinationDefinitionId": { + "type": "string", + "format": "uuid" + }, + "name": { + "type": "string" + }, + "dockerRepository": { + "type": "string" + }, + "dockerImageTag": { + "type": "string" + }, + "documentationUrl": { + "type": "string" + }, + "icon": { + "type": "string" + }, + "iconUrl": { + "type": "string" + }, + "spec": { + "type": "object" + }, + "tombstone": { + "description": "if false, the configuration is active. if true, then this configuration is permanently off.", + "type": "boolean", + "default": false + }, + "public": { + "description": "true if this connector definition is available to all workspaces", + "type": "boolean", + "default": false + }, + "custom": { + "description": "whether this is a custom connector definition", + "type": "boolean", + "default": false + }, + "releaseStage": { + "$ref": "#/definitions/ReleaseStage" + }, + "supportLevel": { + "$ref": "#/definitions/SupportLevel" + }, + "releaseDate": { + "description": "The date when this connector was first released, in yyyy-mm-dd format.", + "type": "string", + "format": "date" + }, + "tags": { + "type": "array", + "description": "An array of tags that describe the connector. E.g: language:python, keyword:rds, etc.", + "items": { + "type": "string" + } + }, + "resourceRequirements": { + "$ref": "#/definitions/ActorDefinitionResourceRequirements" + }, + "protocolVersion": { + "type": "string", + "description": "the Airbyte Protocol version supported by the connector" + }, + "normalizationConfig": { + "$ref": "#/definitions/NormalizationDestinationDefinitionConfig" + }, + "supportsDbt": { + "type": "boolean", + "description": "an optional flag indicating whether DBT is used in the normalization. If the flag value is NULL - DBT is not used." + }, + "allowedHosts": { + "$ref": "#/definitions/AllowedHosts" + }, + "releases": { + "$ref": "#/definitions/ConnectorRegistryReleases" + }, + "ab_internal": { + "$ref": "#/definitions/AirbyteInternal" + }, + "supportsRefreshes": { + "type": "boolean", + "default": false + }, + "supportsFileTransfer": { + "type": "boolean", + "default": false + }, + "supportsDataActivation": { + "type": "boolean", + "default": false + }, + "generated": { + "$ref": "#/definitions/GeneratedFields" + }, + "packageInfo": { + "$ref": "#/definitions/ConnectorPackageInfo" + }, + "language": { + "type": "string", + "description": "The language the connector is written in" + } + } + }, + "ConnectorPackageInfo": { + "title": "ConnectorPackageInfo", + "description": "Information about the contents of the connector image", + "type": "object", + "properties": { + "cdk_version": { + "type": "string" + } + } + }, + "ConnectorMetric": { + "type": "object", + "properties": { + "usage": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "string", + "enum": [ + "low", + "medium", + "high" + ] + } + ] + }, + "sync_success_rate": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "string", + "enum": [ + "low", + "medium", + "high" + ] + } + ] + }, + "connector_version": { + "type": "string" + } + }, + "additionalProperties": true + }, + "ConnectorMetrics": { + "title": "ConnectorMetrics", + "description": "Information about the source file that generated the registry entry", + "type": "object", + "properties": { + "all": { + "$ref": "#/definitions/ConnectorMetric" + }, + "cloud": { + "$ref": "#/definitions/ConnectorMetric" + }, + "oss": { + "$ref": "#/definitions/ConnectorMetric" + } + } + }, + "ConnectorIPCOptions": { + "title": "ConnectorIPCOptions", + "type": "object", + "required": [ + "dataChannel" + ], + "additionalProperties": false, + "properties": { + "dataChannel": { + "type": "object", + "required": [ + "version", + "supportedSerialization", + "supportedTransport" + ], + "additionalProperties": false, + "properties": { + "version": { + "type": "string" + }, + "supportedSerialization": { + "type": "array", + "items": { + "type": "string", + "enum": [ + "JSONL", + "PROTOBUF", + "FLATBUFFERS" + ] + } + }, + "supportedTransport": { + "type": "array", + "items": { + "type": "string", + "enum": [ + "STDIO", + "SOCKET" + ] + } + } + } + } + } + }, + "ConnectorBuildOptions": { + "title": "ConnectorBuildOptions", + "description": "metadata specific to the build process.", + "type": "object", + "additionalProperties": false, + "properties": { + "baseImage": { + "type": "string" + } + } + }, + "VersionBreakingChange": { + "description": "Contains information about a breaking change, including the deadline to upgrade and a message detailing the change.", + "type": "object", + "additionalProperties": false, + "required": [ + "upgradeDeadline", + "message" + ], + "properties": { + "upgradeDeadline": { + "description": "The deadline by which to upgrade before the breaking change takes effect.", + "type": "string", + "format": "date" + }, + "message": { + "description": "Descriptive message detailing the breaking change.", + "type": "string" + }, + "deadlineAction": { + "description": "Action to do when the deadline is reached.", + "type": "string", + "enum": [ + "auto_upgrade", + "disable" + ] + }, + "migrationDocumentationUrl": { + "description": "URL to documentation on how to migrate to the current version. Defaults to ${documentationUrl}-migrations#${version}", + "type": "string", + "format": "uri" + }, + "scopedImpact": { + "description": "List of scopes that are impacted by the breaking change. If not specified, the breaking change cannot be scoped to reduce impact via the supported scope types.", + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/BreakingChangeScope" + } + } + } + }, + "BreakingChangeScope": { + "description": "A scope that can be used to limit the impact of a breaking change.", + "type": "object", + "oneOf": [ + { + "$ref": "#/definitions/StreamBreakingChangeScope" + } + ] + }, + "StreamBreakingChangeScope": { + "description": "A scope that can be used to limit the impact of a breaking change to specific streams.", + "type": "object", + "additionalProperties": false, + "required": [ + "scopeType", + "impactedScopes" + ], + "properties": { + "scopeType": { + "type": "const", + "const": "stream" + }, + "impactedScopes": { + "description": "List of streams that are impacted by the breaking change.", + "type": "array", + "minItems": 1, + "items": { + "type": "string" + } + } + } + }, + "ConnectorBreakingChanges": { + "title": "ConnectorBreakingChanges", + "description": "Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade.", + "type": "object", + "additionalProperties": false, + "minProperties": 1, + "patternProperties": { + "^\\d+\\.\\d+\\.\\d+$": { + "$ref": "#/definitions/VersionBreakingChange" + } + } + }, + "AllowedHosts": { + "title": "AllowedHosts", + "description": "A connector's allowed hosts. If present, the platform will limit communication to only hosts which are listed in `AllowedHosts.hosts`.", + "type": "object", + "additionalProperties": true, + "properties": { + "hosts": { + "type": "array", + "description": "An array of hosts that this connector can connect to. AllowedHosts not being present for the source or destination means that access to all hosts is allowed. An empty list here means that no network access is granted.", + "items": { + "type": "string" + } + } + } + }, + "AirbyteInternal": { + "title": "AirbyteInternal", + "description": "Fields for internal use only", + "type": "object", + "additionalProperties": true, + "properties": { + "sl": { + "type": "integer", + "enum": [ + 0, + 100, + 200, + 300 + ] + }, + "ql": { + "type": "integer", + "enum": [ + 0, + 100, + 200, + 300, + 400, + 500, + 600 + ] + }, + "isEnterprise": { + "type": "boolean", + "default": false + }, + "requireVersionIncrementsInPullRequests": { + "type": "boolean", + "default": true, + "description": "When false, version increment checks will be skipped for this connector" + } + } + }, + "JobTypeResourceLimit": { + "description": "sets resource requirements for a specific job type for an actor definition. these values override the default, if both are set.", + "type": "object", + "additionalProperties": false, + "required": [ + "jobType", + "resourceRequirements" + ], + "properties": { + "jobType": { + "$ref": "#/definitions/JobType" + }, + "resourceRequirements": { + "$ref": "#/definitions/ResourceRequirements" + } + } + }, + "ActorDefinitionResourceRequirements": { + "title": "ActorDefinitionResourceRequirements", + "description": "actor definition specific resource requirements", + "type": "object", + "additionalProperties": false, + "properties": { + "default": { + "description": "if set, these are the requirements that should be set for ALL jobs run for this actor definition.", + "$ref": "#/definitions/ResourceRequirements" + }, + "jobSpecific": { + "type": "array", + "items": { + "$ref": "#/definitions/JobTypeResourceLimit" + } + } + } + } + } +} \ No newline at end of file diff --git a/airbyte_cdk/test/models/connector_metadata/generated/models.py b/airbyte_cdk/test/models/connector_metadata/generated/models.py new file mode 100644 index 000000000..51227677c --- /dev/null +++ b/airbyte_cdk/test/models/connector_metadata/generated/models.py @@ -0,0 +1,663 @@ +# generated by datamodel-codegen: +# filename: metadata_schema.json + +from __future__ import annotations + +from datetime import date, datetime +from enum import Enum +from typing import Any, Dict, List, Literal, Optional, Union +from uuid import UUID + +from pydantic.v1 import AnyUrl, BaseModel, Extra, Field, conint, constr + + +class ConnectorType(Enum): + destination = "destination" + source = "source" + + +class ConnectorSubtype(Enum): + api = "api" + database = "database" + datalake = "datalake" + file = "file" + custom = "custom" + message_queue = "message_queue" + unknown = "unknown" + vectorstore = "vectorstore" + + +class TestConnections(BaseModel): + class Config: + extra = Extra.forbid + + name: str = Field(..., description="The connection name") + id: str = Field(..., description="The connection ID") + + +class SupportLevel(Enum): + community = "community" + certified = "certified" + archived = "archived" + + +class SuggestedStreams(BaseModel): + class Config: + extra = Extra.allow + + streams: Optional[List[str]] = Field( + None, + description="An array of streams that this connector suggests the average user will want. SuggestedStreams not being present for the source means that all streams are suggested. An empty list here means that no streams are suggested.", + ) + + +class SourceFileInfo(BaseModel): + metadata_etag: Optional[str] = None + metadata_file_path: Optional[str] = None + metadata_bucket_name: Optional[str] = None + metadata_last_modified: Optional[str] = None + registry_entry_generated_at: Optional[str] = None + + +class SecretStore(BaseModel): + class Config: + extra = Extra.forbid + + alias: Optional[str] = Field( + None, + description="The alias of the secret store which can map to its actual secret address", + ) + type: Optional[Literal["GSM"]] = Field( + None, description="The type of the secret store" + ) + + +class Secret(BaseModel): + class Config: + extra = Extra.forbid + + name: str = Field(..., description="The secret name in the secret store") + fileName: Optional[str] = Field( + None, + description="The name of the file to which the secret value would be persisted", + ) + secretStore: SecretStore + + +class RolloutConfiguration(BaseModel): + class Config: + extra = Extra.forbid + + enableProgressiveRollout: Optional[bool] = Field( + False, description="Whether to enable progressive rollout for the connector." + ) + initialPercentage: Optional[conint(ge=0, le=100)] = Field( + 0, + description="The percentage of users that should receive the new version initially.", + ) + maxPercentage: Optional[conint(ge=0, le=100)] = Field( + 50, + description="The percentage of users who should receive the release candidate during the test phase before full rollout.", + ) + advanceDelayMinutes: Optional[conint(ge=10)] = Field( + 10, + description="The number of minutes to wait before advancing the rollout percentage.", + ) + + +class ResourceRequirements(BaseModel): + class Config: + extra = Extra.forbid + + cpu_request: Optional[str] = None + cpu_limit: Optional[str] = None + memory_request: Optional[str] = None + memory_limit: Optional[str] = None + + +class PyPi(BaseModel): + class Config: + extra = Extra.forbid + + enabled: bool + packageName: str = Field(..., description="The name of the package on PyPi.") + + +class RemoteRegistries(BaseModel): + class Config: + extra = Extra.forbid + + pypi: Optional[PyPi] = None + + +class ReleaseStage(Enum): + alpha = "alpha" + beta = "beta" + generally_available = "generally_available" + custom = "custom" + + +class NormalizationDestinationDefinitionConfig(BaseModel): + class Config: + extra = Extra.allow + + normalizationRepository: str = Field( + ..., + description="a field indicating the name of the repository to be used for normalization. If the value of the flag is NULL - normalization is not used.", + ) + normalizationTag: str = Field( + ..., + description="a field indicating the tag of the docker repository to be used for normalization.", + ) + normalizationIntegrationType: str = Field( + ..., + description="a field indicating the type of integration dialect to use for normalization.", + ) + + +class JobType(Enum): + get_spec = "get_spec" + check_connection = "check_connection" + discover_schema = "discover_schema" + sync = "sync" + reset_connection = "reset_connection" + connection_updater = "connection_updater" + replicate = "replicate" + + +class GitInfo(BaseModel): + class Config: + extra = Extra.forbid + + commit_sha: Optional[str] = Field( + None, + description="The git commit sha of the last commit that modified this file.", + ) + commit_timestamp: Optional[datetime] = Field( + None, + description="The git commit timestamp of the last commit that modified this file.", + ) + commit_author: Optional[str] = Field( + None, + description="The git commit author of the last commit that modified this file.", + ) + commit_author_email: Optional[str] = Field( + None, + description="The git commit author email of the last commit that modified this file.", + ) + + +class Suite(Enum): + unitTests = "unitTests" + integrationTests = "integrationTests" + acceptanceTests = "acceptanceTests" + liveTests = "liveTests" + + +class ConnectorTestSuiteOptions(BaseModel): + class Config: + extra = Extra.forbid + + suite: Suite = Field(..., description="Name of the configured test suite") + testSecrets: Optional[List[Secret]] = Field( + None, description="List of secrets required to run the test suite" + ) + testConnections: Optional[List[TestConnections]] = Field( + None, + description="List of sandbox cloud connections that tests can be run against", + ) + + +class SourceType(Enum): + api = "api" + file = "file" + database = "database" + custom = "custom" + + +class ConnectorPackageInfo(BaseModel): + cdk_version: Optional[str] = None + + +class Usage(Enum): + low = "low" + medium = "medium" + high = "high" + + +class SyncSuccessRate(Enum): + low = "low" + medium = "medium" + high = "high" + + +class ConnectorMetric(BaseModel): + class Config: + extra = Extra.allow + + usage: Optional[Union[str, Usage]] = None + sync_success_rate: Optional[Union[str, SyncSuccessRate]] = None + connector_version: Optional[str] = None + + +class ConnectorMetrics(BaseModel): + all: Optional[ConnectorMetric] = None + cloud: Optional[ConnectorMetric] = None + oss: Optional[ConnectorMetric] = None + + +class SupportedSerializationEnum(Enum): + JSONL = "JSONL" + PROTOBUF = "PROTOBUF" + FLATBUFFERS = "FLATBUFFERS" + + +class SupportedTransportEnum(Enum): + STDIO = "STDIO" + SOCKET = "SOCKET" + + +class DataChannel(BaseModel): + class Config: + extra = Extra.forbid + + version: str + supportedSerialization: List[SupportedSerializationEnum] + supportedTransport: List[SupportedTransportEnum] + + +class ConnectorIPCOptions(BaseModel): + class Config: + extra = Extra.forbid + + dataChannel: DataChannel + + +class ConnectorBuildOptions(BaseModel): + class Config: + extra = Extra.forbid + + baseImage: Optional[str] = None + + +class DeadlineAction(Enum): + auto_upgrade = "auto_upgrade" + disable = "disable" + + +class StreamBreakingChangeScope(BaseModel): + class Config: + extra = Extra.forbid + + scopeType: Any = Field("stream", const=True) + impactedScopes: List[str] = Field( + ..., + description="List of streams that are impacted by the breaking change.", + min_items=1, + ) + + +class AllowedHosts(BaseModel): + class Config: + extra = Extra.allow + + hosts: Optional[List[str]] = Field( + None, + description="An array of hosts that this connector can connect to. AllowedHosts not being present for the source or destination means that access to all hosts is allowed. An empty list here means that no network access is granted.", + ) + + +class Sl(Enum): + integer_0 = 0 + integer_100 = 100 + integer_200 = 200 + integer_300 = 300 + + +class Ql(Enum): + integer_0 = 0 + integer_100 = 100 + integer_200 = 200 + integer_300 = 300 + integer_400 = 400 + integer_500 = 500 + integer_600 = 600 + + +class AirbyteInternal(BaseModel): + class Config: + extra = Extra.allow + + sl: Optional[Sl] = None + ql: Optional[Ql] = None + isEnterprise: Optional[bool] = False + requireVersionIncrementsInPullRequests: Optional[bool] = Field( + True, + description="When false, version increment checks will be skipped for this connector", + ) + + +class JobTypeResourceLimit(BaseModel): + class Config: + extra = Extra.forbid + + jobType: JobType + resourceRequirements: ResourceRequirements + + +class ActorDefinitionResourceRequirements(BaseModel): + class Config: + extra = Extra.forbid + + default: Optional[ResourceRequirements] = Field( + None, + description="if set, these are the requirements that should be set for ALL jobs run for this actor definition.", + ) + jobSpecific: Optional[List[JobTypeResourceLimit]] = None + + +class RegistryOverrides(BaseModel): + class Config: + extra = Extra.forbid + + enabled: bool + name: Optional[str] = None + dockerRepository: Optional[str] = None + dockerImageTag: Optional[str] = None + supportsDbt: Optional[bool] = None + supportsNormalization: Optional[bool] = None + license: Optional[str] = None + documentationUrl: Optional[AnyUrl] = None + connectorSubtype: Optional[str] = None + allowedHosts: Optional[AllowedHosts] = None + normalizationConfig: Optional[NormalizationDestinationDefinitionConfig] = None + suggestedStreams: Optional[SuggestedStreams] = None + resourceRequirements: Optional[ActorDefinitionResourceRequirements] = None + + +class GeneratedFields(BaseModel): + git: Optional[GitInfo] = None + source_file_info: Optional[SourceFileInfo] = None + metrics: Optional[ConnectorMetrics] = None + sbomUrl: Optional[str] = Field(None, description="URL to the SBOM file") + + +class BreakingChangeScope(BaseModel): + __root__: StreamBreakingChangeScope = Field( + ..., + description="A scope that can be used to limit the impact of a breaking change.", + ) + + +class RegistryOverridesModel(BaseModel): + class Config: + extra = Extra.forbid + + oss: Optional[RegistryOverrides] = None + cloud: Optional[RegistryOverrides] = None + + +class VersionBreakingChange(BaseModel): + class Config: + extra = Extra.forbid + + upgradeDeadline: date = Field( + ..., + description="The deadline by which to upgrade before the breaking change takes effect.", + ) + message: str = Field( + ..., description="Descriptive message detailing the breaking change." + ) + deadlineAction: Optional[DeadlineAction] = Field( + None, description="Action to do when the deadline is reached." + ) + migrationDocumentationUrl: Optional[AnyUrl] = Field( + None, + description="URL to documentation on how to migrate to the current version. Defaults to ${documentationUrl}-migrations#${version}", + ) + scopedImpact: Optional[List[BreakingChangeScope]] = Field( + None, + description="List of scopes that are impacted by the breaking change. If not specified, the breaking change cannot be scoped to reduce impact via the supported scope types.", + min_items=1, + ) + + +class ConnectorBreakingChanges(BaseModel): + class Config: + extra = Extra.forbid + + __root__: Dict[constr(regex=r"^\d+\.\d+\.\d+$"), VersionBreakingChange] = Field( + ..., + description="Each entry denotes a breaking change in a specific version of a connector that requires user action to upgrade.", + title="ConnectorBreakingChanges", + ) + + +class ConnectorReleases(BaseModel): + class Config: + extra = Extra.forbid + + rolloutConfiguration: Optional[RolloutConfiguration] = None + breakingChanges: Optional[ConnectorBreakingChanges] = None + migrationDocumentationUrl: Optional[AnyUrl] = Field( + None, + description="URL to documentation on how to migrate from the previous version to the current version. Defaults to ${documentationUrl}-migrations", + ) + + +class Data(BaseModel): + class Config: + extra = Extra.forbid + + name: str + icon: Optional[str] = None + definitionId: UUID + connectorBuildOptions: Optional[ConnectorBuildOptions] = None + connectorTestSuitesOptions: Optional[List[ConnectorTestSuiteOptions]] = None + connectorType: ConnectorType + dockerRepository: str + dockerImageTag: str + supportsDbt: Optional[bool] = None + supportsNormalization: Optional[bool] = None + license: str + documentationUrl: AnyUrl + githubIssueLabel: str + maxSecondsBetweenMessages: Optional[int] = Field( + None, + description="Maximum delay between 2 airbyte protocol messages, in second. The source will timeout if this delay is reached", + ) + releaseDate: Optional[date] = Field( + None, + description="The date when this connector was first released, in yyyy-mm-dd format.", + ) + protocolVersion: Optional[str] = Field( + None, description="the Airbyte Protocol version supported by the connector" + ) + erdUrl: Optional[str] = Field( + None, description="The URL where you can visualize the ERD" + ) + connectorSubtype: ConnectorSubtype + releaseStage: ReleaseStage + supportLevel: Optional[SupportLevel] = None + tags: Optional[List[str]] = Field( + [], + description="An array of tags that describe the connector. E.g: language:python, keyword:rds, etc.", + ) + registryOverrides: Optional[RegistryOverridesModel] = None + allowedHosts: Optional[AllowedHosts] = None + releases: Optional[ConnectorReleases] = None + normalizationConfig: Optional[NormalizationDestinationDefinitionConfig] = None + suggestedStreams: Optional[SuggestedStreams] = None + resourceRequirements: Optional[ActorDefinitionResourceRequirements] = None + ab_internal: Optional[AirbyteInternal] = None + remoteRegistries: Optional[RemoteRegistries] = None + supportsRefreshes: Optional[bool] = False + generated: Optional[GeneratedFields] = None + supportsFileTransfer: Optional[bool] = False + supportsDataActivation: Optional[bool] = False + connectorIPCOptions: Optional[ConnectorIPCOptions] = None + + +class ConnectorMetadataDefinitionV0(BaseModel): + class Config: + extra = Extra.forbid + + metadataSpecVersion: str + data: Data + + +class ConnectorRegistryV0(BaseModel): + destinations: List[ConnectorRegistryDestinationDefinition] + sources: List[ConnectorRegistrySourceDefinition] + + +class ConnectorRegistrySourceDefinition(BaseModel): + class Config: + extra = Extra.allow + + sourceDefinitionId: UUID + name: str + dockerRepository: str + dockerImageTag: str + documentationUrl: str + icon: Optional[str] = None + iconUrl: Optional[str] = None + sourceType: Optional[SourceType] = None + spec: Dict[str, Any] + tombstone: Optional[bool] = Field( + False, + description="if false, the configuration is active. if true, then this configuration is permanently off.", + ) + public: Optional[bool] = Field( + False, + description="true if this connector definition is available to all workspaces", + ) + custom: Optional[bool] = Field( + False, description="whether this is a custom connector definition" + ) + releaseStage: Optional[ReleaseStage] = None + supportLevel: Optional[SupportLevel] = None + releaseDate: Optional[date] = Field( + None, + description="The date when this connector was first released, in yyyy-mm-dd format.", + ) + resourceRequirements: Optional[ActorDefinitionResourceRequirements] = None + protocolVersion: Optional[str] = Field( + None, description="the Airbyte Protocol version supported by the connector" + ) + allowedHosts: Optional[AllowedHosts] = None + suggestedStreams: Optional[SuggestedStreams] = None + maxSecondsBetweenMessages: Optional[int] = Field( + None, + description="Number of seconds allowed between 2 airbyte protocol messages. The source will timeout if this delay is reach", + ) + erdUrl: Optional[str] = Field( + None, description="The URL where you can visualize the ERD" + ) + releases: Optional[ConnectorRegistryReleases] = None + ab_internal: Optional[AirbyteInternal] = None + generated: Optional[GeneratedFields] = None + packageInfo: Optional[ConnectorPackageInfo] = None + language: Optional[str] = Field( + None, description="The language the connector is written in" + ) + supportsFileTransfer: Optional[bool] = False + supportsDataActivation: Optional[bool] = False + + +class ConnectorReleaseCandidates(BaseModel): + class Config: + extra = Extra.forbid + + __root__: Dict[ + constr(regex=r"^\d+\.\d+\.\d+(-[0-9A-Za-z-.]+)?$"), VersionReleaseCandidate + ] = Field( + ..., + description="Each entry denotes a release candidate version of a connector.", + ) + + +class VersionReleaseCandidate(BaseModel): + class Config: + extra = Extra.forbid + + __root__: Union[ + ConnectorRegistrySourceDefinition, ConnectorRegistryDestinationDefinition + ] = Field( + ..., + description="Contains information about a release candidate version of a connector.", + ) + + +class ConnectorRegistryReleases(BaseModel): + class Config: + extra = Extra.forbid + + releaseCandidates: Optional[ConnectorReleaseCandidates] = None + rolloutConfiguration: Optional[RolloutConfiguration] = None + breakingChanges: Optional[ConnectorBreakingChanges] = None + migrationDocumentationUrl: Optional[AnyUrl] = Field( + None, + description="URL to documentation on how to migrate from the previous version to the current version. Defaults to ${documentationUrl}-migrations", + ) + + +class ConnectorRegistryDestinationDefinition(BaseModel): + class Config: + extra = Extra.allow + + destinationDefinitionId: UUID + name: str + dockerRepository: str + dockerImageTag: str + documentationUrl: str + icon: Optional[str] = None + iconUrl: Optional[str] = None + spec: Dict[str, Any] + tombstone: Optional[bool] = Field( + False, + description="if false, the configuration is active. if true, then this configuration is permanently off.", + ) + public: Optional[bool] = Field( + False, + description="true if this connector definition is available to all workspaces", + ) + custom: Optional[bool] = Field( + False, description="whether this is a custom connector definition" + ) + releaseStage: Optional[ReleaseStage] = None + supportLevel: Optional[SupportLevel] = None + releaseDate: Optional[date] = Field( + None, + description="The date when this connector was first released, in yyyy-mm-dd format.", + ) + tags: Optional[List[str]] = Field( + None, + description="An array of tags that describe the connector. E.g: language:python, keyword:rds, etc.", + ) + resourceRequirements: Optional[ActorDefinitionResourceRequirements] = None + protocolVersion: Optional[str] = Field( + None, description="the Airbyte Protocol version supported by the connector" + ) + normalizationConfig: Optional[NormalizationDestinationDefinitionConfig] = None + supportsDbt: Optional[bool] = Field( + None, + description="an optional flag indicating whether DBT is used in the normalization. If the flag value is NULL - DBT is not used.", + ) + allowedHosts: Optional[AllowedHosts] = None + releases: Optional[ConnectorRegistryReleases] = None + ab_internal: Optional[AirbyteInternal] = None + supportsRefreshes: Optional[bool] = False + supportsFileTransfer: Optional[bool] = False + supportsDataActivation: Optional[bool] = False + generated: Optional[GeneratedFields] = None + packageInfo: Optional[ConnectorPackageInfo] = None + language: Optional[str] = Field( + None, description="The language the connector is written in" + ) + + +ConnectorRegistryV0.update_forward_refs() +ConnectorRegistrySourceDefinition.update_forward_refs() +ConnectorReleaseCandidates.update_forward_refs() +VersionReleaseCandidate.update_forward_refs() diff --git a/bin/generate_connector_metadata_files.py b/bin/generate_connector_metadata_files.py new file mode 100755 index 000000000..3d24f0b52 --- /dev/null +++ b/bin/generate_connector_metadata_files.py @@ -0,0 +1,217 @@ +#!/usr/bin/env python3 +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +""" +Generate Pydantic models and JSON schema for connector metadata validation. + +This script downloads metadata schema YAML files from the airbyte monorepo and generates: +1. A consolidated JSON schema file (metadata_schema.json) +2. A single Python file with all Pydantic models (models.py) generated from the JSON schema + +The generated files are used for validating connector metadata.yaml files. +""" + +import json +import re +import subprocess +import sys +import tempfile +from pathlib import Path +from typing import Any + +try: + import yaml +except ImportError: + print("Error: pyyaml is required. Install with: pip install pyyaml", file=sys.stderr) + sys.exit(1) + +OUTPUT_DIR_PATH = "airbyte_cdk/test/models/connector_metadata/generated" +AIRBYTE_REPO_URL = "https://github.com/airbytehq/airbyte.git" +SCHEMA_PATH = "airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src" +DATAMODEL_CODEGEN_VERSION = "0.26.3" + + +def clone_schemas_from_github(temp_dir: Path) -> Path: + """Clone metadata schema YAML files from GitHub using sparse checkout.""" + clone_dir = temp_dir / "airbyte" + + print("Cloning metadata schemas from airbyte repo...", file=sys.stderr) + + subprocess.run( + [ + "git", + "clone", + "--depth", + "1", + "--filter=blob:none", + "--sparse", + AIRBYTE_REPO_URL, + str(clone_dir), + ], + check=True, + capture_output=True, + ) + + subprocess.run( + ["git", "-C", str(clone_dir), "sparse-checkout", "set", SCHEMA_PATH], + check=True, + capture_output=True, + ) + + schemas_dir = clone_dir / SCHEMA_PATH + print(f"Cloned schemas to {schemas_dir}", file=sys.stderr) + + return schemas_dir + + +def consolidate_yaml_schemas_to_json(yaml_dir_path: Path, output_json_path: Path) -> None: + """Consolidate all YAML schemas into a single JSON schema file.""" + schemas = {} + + for yaml_file in yaml_dir_path.glob("*.yaml"): + schema_name = yaml_file.stem + schema_content = yaml.safe_load(yaml_file.read_text()) + schemas[schema_name] = schema_content + + all_schema_names = set(schemas.keys()) + json_primitives = {"string", "number", "integer", "boolean", "object", "array", "null"} + + for schema_content in schemas.values(): + if isinstance(schema_content, dict) and "definitions" in schema_content: + all_schema_names.update(schema_content["definitions"].keys()) + + def fix_refs(obj: Any, in_definition: bool = False) -> Any: + """Recursively fix $ref and type references in schema objects.""" + if isinstance(obj, dict): + new_obj = {} + for key, value in obj.items(): + if (key == "$id" or key == "$schema") and in_definition: + continue + elif key == "$ref" and isinstance(value, str): + m = re.match(r"(?:.*/)?(?P[^/#]+)\.yaml(?P#.*)?$", value) + if m: + schema_name = m.group("name") + frag = m.group("frag") or "" + new_obj[key] = f"#/definitions/{schema_name}{frag}" + else: + new_obj[key] = value + elif key == "type" and isinstance(value, str): + if value in all_schema_names and value not in json_primitives: + new_obj["$ref"] = f"#/definitions/{value}" + else: + new_obj[key] = value + elif key == "type" and value == "const": + pass + else: + new_obj[key] = fix_refs(value, in_definition=in_definition) + return new_obj + elif isinstance(obj, list): + return [fix_refs(item, in_definition=in_definition) for item in obj] + else: + return obj + + # Find the main schema (ConnectorMetadataDefinitionV0) + main_schema = schemas.get("ConnectorMetadataDefinitionV0") + + if main_schema: + # Create a consolidated schema preserving main schema structure + consolidated = dict(main_schema) # shallow copy + consolidated.setdefault("$schema", "http://json-schema.org/draft-07/schema#") + consolidated.setdefault("title", "Connector Metadata Schema") + consolidated.setdefault( + "description", "Consolidated JSON schema for Airbyte connector metadata validation" + ) + + consolidated_definitions = dict(consolidated.get("definitions", {})) + + # Add all schemas (including their internal definitions) as top-level definitions + for schema_name, schema_content in schemas.items(): + if schema_name != "ConnectorMetadataDefinitionV0": + if isinstance(schema_content, dict) and "definitions" in schema_content: + for def_name, def_content in schema_content["definitions"].items(): + consolidated_definitions[def_name] = fix_refs( + def_content, in_definition=True + ) + schema_without_defs = { + k: v for k, v in schema_content.items() if k != "definitions" + } + consolidated_definitions[schema_name] = fix_refs( + schema_without_defs, in_definition=True + ) + else: + consolidated_definitions[schema_name] = fix_refs( + schema_content, in_definition=True + ) + + consolidated["definitions"] = consolidated_definitions + consolidated = fix_refs(consolidated, in_definition=False) + + output_json_path.write_text(json.dumps(consolidated, indent=2)) + print(f"Generated consolidated JSON schema: {output_json_path}", file=sys.stderr) + else: + print( + "Warning: ConnectorMetadataDefinitionV0 not found, generating simple consolidation", + file=sys.stderr, + ) + output_json_path.write_text(json.dumps(schemas, indent=2)) + + +def generate_models_from_json_schema(json_schema_path: Path, output_file_path: Path) -> None: + """Generate Pydantic models from consolidated JSON schema.""" + print("Running datamodel-codegen via uvx...", file=sys.stderr) + + subprocess.run( + [ + "uvx", + "--from", + f"datamodel-code-generator=={DATAMODEL_CODEGEN_VERSION}", + "datamodel-codegen", + "--input", + str(json_schema_path), + "--output", + str(output_file_path), + "--input-file-type", + "jsonschema", + "--disable-timestamp", + "--enum-field-as-literal", + "one", + "--set-default-enum-member", + "--use-double-quotes", + "--remove-special-field-name-prefix", + "--field-extra-keys", + "deprecated", + "deprecation_message", + ], + check=True, + ) + + content = output_file_path.read_text() + content = content.replace("from pydantic", "from pydantic.v1") + output_file_path.write_text(content) + + print(f"Generated models: {output_file_path}", file=sys.stderr) + + +def main() -> None: + print("Generating connector metadata models...", file=sys.stderr) + + with tempfile.TemporaryDirectory() as temp_dir: + temp_path = Path(temp_dir) + schemas_dir = clone_schemas_from_github(temp_path) + + output_dir = Path(OUTPUT_DIR_PATH) + output_dir.mkdir(parents=True, exist_ok=True) + + print("Consolidating YAML schemas into JSON...", file=sys.stderr) + json_schema_file = output_dir / "metadata_schema.json" + consolidate_yaml_schemas_to_json(schemas_dir, json_schema_file) + + print("Generating Python models from JSON schema...", file=sys.stderr) + output_file = output_dir / "models.py" + generate_models_from_json_schema(json_schema_file, output_file) + + print("Connector metadata model generation complete!", file=sys.stderr) + + +if __name__ == "__main__": + main() diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index bb7dc9b2c..359e8f1ce 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -85,6 +85,33 @@ poetry run poe build This will generate the code generator docker image and the component manifest files based on the schemas and templates. +## Regenerating Connector Metadata Models + +The CDK includes Pydantic models for validating connector `metadata.yaml` files. These models are automatically generated from JSON Schema YAML files maintained in the [airbytehq/airbyte repository](https://github.com/airbytehq/airbyte/tree/master/airbyte-ci/connectors/metadata_service/lib/metadata_service/models/src). + +To regenerate the metadata models, run: + +```bash +poetry run poe build +``` + +This command: +1. Downloads the latest schema YAML files from the airbyte repository +2. Generates all Pydantic models into a single file using `datamodel-code-generator` +3. Generates a consolidated JSON schema file for external validation tools +4. Outputs to `airbyte_cdk/test/models/connector_metadata/generated/`: + - `models.py` - All Pydantic models in a single file + - `metadata_schema.json` - Consolidated JSON schema + +The models can be imported and used for validation: + +```python +from airbyte_cdk.test.models import ConnectorMetadataDefinitionV0 +import yaml + +metadata = ConnectorMetadataDefinitionV0(**yaml.safe_load(metadata_yaml)) +``` + ## Generating API Reference Docs Documentation auto-gen code lives in the `/docs` folder. Based on the doc strings of public methods, we generate API documentation using [pdoc](https://pdoc.dev). diff --git a/mypy.ini b/mypy.ini index ff616f462..57612bc59 100644 --- a/mypy.ini +++ b/mypy.ini @@ -12,7 +12,8 @@ disallow_untyped_calls = True disallow_incomplete_defs = True disallow_untyped_defs = True warn_return_any = True -exclude = unit_tests/ +# Exclude tests and auto-generated files from type checking +exclude = (unit_tests/|airbyte_cdk/sources/declarative/models/declarative_component_schema\.py|airbyte_cdk/test/models/connector_metadata/generated/) # Only alert on the files we want to check follow_imports = silent @@ -25,3 +26,7 @@ plugins = ["pydantic.mypy", "pytest-mypy-plugins"] [mypy-airbyte_cdk.models] ignore_errors = True +[mypy-airbyte_cdk.test.models.connector_metadata.generated.*] +ignore_errors = True +[mypy-airbyte_cdk.sources.declarative.models.declarative_component_schema] +ignore_errors = True diff --git a/pyproject.toml b/pyproject.toml index a1fb961e5..71b02bcb5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -144,7 +144,9 @@ lock = { shell = "poetry lock", help = "Lock all dependencies." } pre-commit = {cmd = "poetry run pre-commit run --all-files", help = "Run all pre-commit hooks on all files."} # Build tasks -assemble = {cmd = "bin/generate-component-manifest-dagger.sh", help = "Generate component manifest files."} +assemble-declarative = {cmd = "python bin/generate_component_manifest_files.py", help = "Generate declarative component manifest files."} +assemble-metadata = {cmd = "python bin/generate_connector_metadata_files.py", help = "Generate connector metadata models."} +assemble = {sequence = ["assemble-declarative", "assemble-metadata"], help = "Generate all manifest files."} build-package = {cmd = "poetry build", help = "Build the python package: source and wheels archives."} build = {sequence = ["assemble", "openapi-generate", "build-package", "ruff-fix"], help = "Run all tasks to build the package."} diff --git a/ruff.toml b/ruff.toml index 5ed2f45e2..fca658f77 100644 --- a/ruff.toml +++ b/ruff.toml @@ -2,5 +2,10 @@ target-version = "py310" line-length = 100 +exclude = [ + "airbyte_cdk/sources/declarative/models/declarative_component_schema.py", + "airbyte_cdk/test/models/connector_metadata/generated/models.py", +] + [lint] select = ["I"]