diff --git a/.github/workflows/check-yaml-format.yml b/.github/workflows/check-yaml-format.yml index 5f593b62d..f485d6dc5 100644 --- a/.github/workflows/check-yaml-format.yml +++ b/.github/workflows/check-yaml-format.yml @@ -15,7 +15,7 @@ jobs: - name: Get changed files in the docs folder id: changed-files - uses: tj-actions/changed-files@v44.5.5 + uses: tj-actions/changed-files@v45.0.1 with: files: | _data/**/*.yml diff --git a/.github/workflows/test_dispatcher.yml b/.github/workflows/test_dispatcher.yml index aa4782d60..67280aeac 100644 --- a/.github/workflows/test_dispatcher.yml +++ b/.github/workflows/test_dispatcher.yml @@ -25,7 +25,7 @@ jobs: - name: Get changed files id: changed_files - uses: tj-actions/changed-files@v44.5.5 + uses: tj-actions/changed-files@v45.0.1 with: sha: ${{ github.event.pull_request.head.sha }} json: "true" diff --git a/.github/workflows/test_meltano_add_install.yml b/.github/workflows/test_meltano_add_install.yml index da29d097c..fb7bc6587 100644 --- a/.github/workflows/test_meltano_add_install.yml +++ b/.github/workflows/test_meltano_add_install.yml @@ -21,7 +21,7 @@ jobs: - name: Get changed plugins id: changed_plugins - uses: tj-actions/changed-files@v44.5.5 + uses: tj-actions/changed-files@v45.0.1 with: files: _data/meltano/*/*/*.yml matrix: true diff --git a/_data/default_variants.yml b/_data/default_variants.yml index ef8cdb9f9..4723e6e5a 100644 --- a/_data/default_variants.yml +++ b/_data/default_variants.yml @@ -239,6 +239,7 @@ extractors: tap-hotjar: epoch8 tap-hubplanner: airbyte tap-hubspot: singer-io + tap-iceberg: shaped-ai tap-idealo-click-report: horze-international tap-ilevel: singer-io tap-immuta: immuta @@ -491,6 +492,7 @@ extractors: tap-ssb-klass: storebrand tap-stackexchange: meltanolabs tap-stackoverflow-sampledata: buzzcutnorman + tap-staffwise: chartica tap-stamped: hotgluexyz tap-starshipit: zookal tap-statuspage: airbyte @@ -616,6 +618,7 @@ loaders: target-clickhouse: shaped-ai target-csv: meltanolabs target-datadotworld: datadotworld + target-db2: infostrux target-duckdb: jwills target-elasticsearch: dtmirizzi target-gcs: datateer diff --git a/_data/maintainers.yml b/_data/maintainers.yml index 92b3d6609..f6bb67eaf 100644 --- a/_data/maintainers.yml +++ b/_data/maintainers.yml @@ -166,6 +166,10 @@ cguimont: label: cguimont name: cguimont url: https://github.com/cguimont +chartica: + label: chartica + name: chartica + url: https://github.com/chartica checkr: label: Checkr name: checkr @@ -553,6 +557,10 @@ immuta: label: Immuta, Inc. name: immuta url: http://www.immuta.com/ +infostrux: + label: Infostrux Solutions + name: infostrux + url: https://www.infostrux.com/ integress-inc: label: Integress Inc name: integress-inc diff --git a/_data/meltano/extractors/tap-iceberg/shaped-ai.yml b/_data/meltano/extractors/tap-iceberg/shaped-ai.yml new file mode 100644 index 000000000..434296961 --- /dev/null +++ b/_data/meltano/extractors/tap-iceberg/shaped-ai.yml @@ -0,0 +1,112 @@ +capabilities: +- about +- batch +- catalog +- discover +- schema-flattening +- state +- stream-maps +description: Meltano Tap for ingesting Apache Iceberg data +domain_url: https://iceberg.apache.org +executable: tap-iceberg +keywords: +- meltano_sdk +label: Iceberg +logo_url: /assets/logos/extractors/iceberg.png +maintenance_status: active +name: tap-iceberg +namespace: tap_iceberg +next_steps: '' +pip_url: git+https://github.com/shaped-ai/tap-iceberg.git +quality: unknown +repo: https://github.com/shaped-ai/tap-iceberg +settings: +- description: Compression format to use for batch files. + kind: options + label: Batch Config Encoding Compression + name: batch_config.encoding.compression + options: + - label: Gzip + value: gzip + - label: None + value: none +- description: Format to use for batch files. + kind: options + label: Batch Config Encoding Format + name: batch_config.encoding.format + options: + - label: Jsonl + value: jsonl + - label: Parquet + value: parquet +- description: Prefix to use when writing batch files. + kind: string + label: Batch Config Storage Prefix + name: batch_config.storage.prefix +- description: Root path to use when writing batch files. + kind: string + label: Batch Config Storage Root + name: batch_config.storage.root +- description: The name of the catalog to connect to. + label: Catalog Name + name: catalog_name +- description: The type of catalog to connect to. + label: Catalog Type + name: catalog_type +- description: The URI of the catalog to connect to. + label: Catalog URI + name: catalog_uri +- description: Additional properties to pass to the catalog connection. + kind: object + label: Catalog Properties + name: catalog_properties +- description: The access key of the AWS Glue Data Catalog. + label: Access Key + name: client_access_key_id + sensitive: true +- description: The secret access key of the AWS Glue Data Catalog. + label: Secret Access Key + name: client_secret_access_key + sensitive: true +- description: The session token of the AWS Glue Data Catalog. + label: Session Token + name: client_session_token + sensitive: true +- description: The region of the AWS Glue Data Catalog. + label: Region + name: client_region +- description: The IAM role ARN to assume when connecting to the AWS Glue Data Catalog. + label: Role ARN + name: client_iam_role_arn +- description: 'One or more LCID locale strings to produce localized output for: https://faker.readthedocs.io/en/master/#localization' + kind: array + label: Faker Config Locale + name: faker_config.locale +- description: 'Value to seed the Faker generator for deterministic output: https://faker.readthedocs.io/en/master/#seeding-the-generator' + kind: string + label: Faker Config Seed + name: faker_config.seed +- description: "'True' to enable schema flattening and automatically expand nested + properties." + kind: boolean + label: Flattening Enabled + name: flattening_enabled +- description: The max depth to flatten schemas. + kind: integer + label: Flattening Max Depth + name: flattening_max_depth +- description: User-defined config values to be used within map expressions. + kind: object + label: Stream Map Config + name: stream_map_config +- description: Config object for stream maps capability. For more information check + out [Stream Maps](https://sdk.meltano.com/en/latest/stream_maps.html). + kind: object + label: Stream Maps + name: stream_maps +settings_group_validation: +- - catalog_type + - catalog_uri +settings_preamble: '' +usage: '' +variant: shaped-ai diff --git a/_data/meltano/extractors/tap-plausible/airbyte.yml b/_data/meltano/extractors/tap-plausible/airbyte.yml index a0b02d482..d21813203 100644 --- a/_data/meltano/extractors/tap-plausible/airbyte.yml +++ b/_data/meltano/extractors/tap-plausible/airbyte.yml @@ -25,9 +25,15 @@ settings: - description: Plausible API Key. See the docs for information on how to generate this key. kind: password - label: Airbyte Config Api Key + label: Airbyte Config API Key name: airbyte_config.api_key sensitive: true +- description: Plausible API URL. The API URL of your plausible instance. + Change this if you self-host plausible. The default is https://plausible.io/api/v1/stats + kind: string + label: Airbyte Config API URL + name: airbyte_config.api_url + value: https://plausible.io/api/v1/stats - description: The domain of the site you want to retrieve data for. Enter the name of your site as configured on Plausible, i.e., excluding "https://" and "www". Can be retrieved from the 'domain' field in your Plausible site settings. @@ -75,6 +81,7 @@ settings: name: stream_maps settings_group_validation: - - airbyte_config.api_key +- - airbyte_config.api_url - airbyte_config.site_id - airbyte_spec - airbyte_spec.image diff --git a/_data/meltano/extractors/tap-sftp/singer-io.yml b/_data/meltano/extractors/tap-sftp/singer-io.yml index 9090e1701..608e69f09 100644 --- a/_data/meltano/extractors/tap-sftp/singer-io.yml +++ b/_data/meltano/extractors/tap-sftp/singer-io.yml @@ -56,5 +56,4 @@ settings_group_validation: - start_date - tables - username - - username variant: singer-io diff --git a/_data/meltano/extractors/tap-staffwise/chartica.yml b/_data/meltano/extractors/tap-staffwise/chartica.yml new file mode 100644 index 000000000..967eb558e --- /dev/null +++ b/_data/meltano/extractors/tap-staffwise/chartica.yml @@ -0,0 +1,106 @@ +capabilities: +- about +- batch +- catalog +- discover +- schema-flattening +- state +- stream-maps +description: Healthcare Productivity Tools +domain_url: https://staffwi.se/user-guide/api/reporting +executable: tap-staffwise +keywords: +- health +- payroll +- meltano_sdk +label: Staffwise +logo_url: /assets/logos/extractors/staffwise.svg +maintenance_status: active +name: tap-staffwise +namespace: tap_staffwise +next_steps: '' +pip_url: git+https://github.com/chartica/tap-staffwise.git +quality: silver +repo: https://github.com/chartica/tap-staffwise +settings: +- description: API Key to authenticate requests and access the API + kind: password + label: API Key + name: api_key + sensitive: true +- description: Subdomain you would like to pull data from + kind: string + label: Subdomain + name: subdomain +- description: Reporting ID of each report + kind: password + label: Reporting ID + name: reporting_id + sensitive: true +- description: The start date for filtering results in the API call + kind: string + label: Start Date (Optional) + name: start_date +- description: The end date for filtering results in the API call + kind: string + label: Stop Date (Optional) + name: stop_date +- description: Compression format to use for batch files. + kind: options + label: Batch Config Encoding Compression + name: batch_config.encoding.compression + options: + - label: Gzip + value: gzip + - label: None + value: none +- description: Format to use for batch files. + kind: options + label: Batch Config Encoding Format + name: batch_config.encoding.format + options: + - label: Jsonl + value: jsonl + - label: Parquet + value: parquet +- description: Prefix to use when writing batch files. + kind: string + label: Batch Config Storage Prefix + name: batch_config.storage.prefix +- description: Root path to use when writing batch files. + kind: string + label: Batch Config Storage Root + name: batch_config.storage.root +- description: 'One or more LCID locale strings to produce localized output for: https://faker.readthedocs.io/en/master/#localization' + kind: array + label: Faker Config Locale + name: faker_config.locale +- description: 'Value to seed the Faker generator for deterministic output: https://faker.readthedocs.io/en/master/#seeding-the-generator' + kind: string + label: Faker Config Seed + name: faker_config.seed +- description: "'True' to enable schema flattening and automatically expand nested + properties." + kind: boolean + label: Flattening Enabled + name: flattening_enabled +- description: The max depth to flatten schemas. + kind: integer + label: Flattening Max Depth + name: flattening_max_depth +- description: User-defined config values to be used within map expressions. + kind: object + label: Stream Map Config + name: stream_map_config +- description: Config object for stream maps capability. For more information check + out [Stream Maps](https://sdk.meltano.com/en/latest/stream_maps.html). + kind: object + label: Stream Maps + name: stream_maps +settings_group_validation: +- - api_key + - reporting_id + - subdomain +settings_preamble: '' +usage: '' +variant: chartica diff --git a/_data/meltano/loaders/target-db2/infostrux.yml b/_data/meltano/loaders/target-db2/infostrux.yml new file mode 100644 index 000000000..0d564a09a --- /dev/null +++ b/_data/meltano/loaders/target-db2/infostrux.yml @@ -0,0 +1,111 @@ +capabilities: +- about +- schema-flattening +- stream-maps +- validate-records +description: Singer Target for IBM Db2 +domain_url: https://www.ibm.com/db2 +executable: target-db2 +keywords: +- meltano_sdk +label: IBM Db2 +logo_url: /assets/logos/loaders/db2.png +maintenance_status: active +name: target-db2 +namespace: target_db2 +next_steps: '' +pip_url: target-db2 +quality: unknown +repo: https://github.com/Infostrux-Solutions/target-db2 +settings: +- description: Add metadata to records. + kind: boolean + label: Add Record Metadata + name: add_record_metadata +- description: Maximum number of rows in each batch. + kind: integer + label: Batch Size Rows + name: batch_size_rows +- description: IBM Db2 Database Name + kind: string + label: Database + name: database +- description: 'One or more LCID locale strings to produce localized output for: https://faker.readthedocs.io/en/master/#localization' + kind: array + label: Faker Config Locale + name: faker_config.locale +- description: 'Value to seed the Faker generator for deterministic output: https://faker.readthedocs.io/en/master/#seeding-the-generator' + kind: string + label: Faker Config Seed + name: faker_config.seed +- description: "'True' to enable schema flattening and automatically expand nested properties." + kind: boolean + label: Flattening Enabled + name: flattening_enabled +- description: The max depth to flatten schemas. + kind: integer + label: Flattening Max Depth + name: flattening_max_depth +- description: IBM Db2 Database Host + kind: string + label: Host + name: host +- description: | + The method to use when loading data into the destination. `append-only` + will always write all input records whether that records already exists or not. + `upsert` will update existing records and insert new records. `overwrite` will + delete all existing records and insert all input records. + kind: options + label: Load Method + name: load_method + options: + - label: Append Only + value: append-only + - label: Upsert + value: upsert + - label: Overwrite + value: overwrite + value: append-only +- description: IBM Db2 Database User Password + kind: password + label: Password + name: password + sensitive: true +- description: IBM Db2 Database Port + kind: integer + label: Port + name: port +- description: User-defined config values to be used within map expressions. + kind: object + label: Stream Map Config + name: stream_map_config +- description: | + Config object for stream maps capability. For more information check + out [Stream Maps](https://sdk.meltano.com/en/latest/stream_maps.html). + kind: object + label: Stream Maps + name: stream_maps +- description: IBM Db2 Database User Name + kind: string + label: User + name: user +- description: Whether to validate the schema of the incoming streams. + kind: boolean + label: Validate Records + name: validate_records + value: true +- description: | + Field size for Varchar type. Default 10000. Since JSON values are serialized + to varchar, it may be necessary to increase this value. Max possible value 32764 + kind: integer + label: Varchar Size + name: varchar_size +settings_group_validation: +- - database + - host + - password + - port + - user +settings_preamble: '' +usage: '' +variant: infostrux diff --git a/_data/meltano/loaders/target-redshift/ticketswap.yml b/_data/meltano/loaders/target-redshift/ticketswap.yml index 281d654cd..215d6e482 100644 --- a/_data/meltano/loaders/target-redshift/ticketswap.yml +++ b/_data/meltano/loaders/target-redshift/ticketswap.yml @@ -40,13 +40,13 @@ settings: kind: integer label: Batch Size Rows name: batch_size_rows -- description: Redshift cluster identifier. Note if sqlalchemy_url is set or enable_iam_authentication +- description: Redshift cluster identifier. Note if enable_iam_authentication is false this will be ignored. kind: password label: Cluster IDentifier name: cluster_identifier sensitive: true -- description: Database name. Note if sqlalchemy_url is set this will be ignored. +- description: Database name. kind: string label: Database name name: dbname @@ -55,16 +55,8 @@ settings: label: Default Target Schema name: default_target_schema value: $MELTANO_EXTRACT__LOAD_SCHEMA -- description: Dialect+driver see - https://aws.amazon.com/blogs/big-data/use-the-amazon-redshift-sqlalchemy-dialect-to-interact-with-amazon-redshift. - Generally just leave this alone. Note if sqlalchemy_url is set this will be ignored. - kind: string - label: Dialect+Driver - name: dialect+driver - value: redshift+redshift_connector - description: If true, use temporary credentials (https://docs.aws.amazon.com/redshift/latest/mgmt/generating-iam-credentials-cli-api.html). - Note if sqlalchemy_url is set this will be ignored. kind: boolean label: Enable Iam Authentication name: enable_iam_authentication @@ -92,8 +84,7 @@ settings: label: Hard Delete name: hard_delete value: false -- description: Hostname for redshift instance. Note if sqlalchemy_url is set this - will be ignored. +- description: Hostname for redshift instance. kind: string label: Host name: host @@ -112,14 +103,12 @@ settings: - label: Overwrite value: overwrite value: append-only -- description: Password used to authenticate. Note if sqlalchemy_url is set this will - be ignored. +- description: Password used to authenticate. kind: password label: Password name: password sensitive: true -- description: The port on which redshift is awaiting connection. Note if sqlalchemy_url - is set this will be ignored. +- description: The port on which redshift is awaiting connection. kind: string label: Port name: port @@ -139,17 +128,14 @@ settings: name: s3_key_prefix sensitive: true value: '' -- description: SQLAlchemy connection string. This will override using host, user, - password, port, dialect, and all ssl settings. Note that you must escape password - special characters properly. See - https://docs.sqlalchemy.org/en/20/core/engines.html#escaping-special-characters-such-as-signs-in-passwords +- description: AWS region for S3 bucket. If not specified, region will be detected by boto config resolution. + See https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html. kind: string - label: Sqlalchemy URL - name: sqlalchemy_url + label: S3 Region + name: s3_region - description: Whether or not to use ssl to verify the server's identity. Use ssl_certificate_authority and ssl_mode for further customization. To use a client certificate to authenticate - yourself to the server, use ssl_client_certificate_enable instead. Note if sqlalchemy_url - is set this will be ignored. + yourself to the server, use ssl_client_certificate_enable instead. kind: boolean label: SSL Enable name: ssl_enable @@ -157,7 +143,7 @@ settings: - description: SSL Protection method, see [postgres documentation](https://www.postgresql.org/docs/current/libpq-ssl.html#LIBPQ-SSL-PROTECTION) for more information. Must be one of disable, allow, prefer, require, verify-ca, - or verify-full. Note if sqlalchemy_url is set this will be ignored. + or verify-full. kind: string label: SSL Mode name: ssl_mode @@ -176,8 +162,7 @@ settings: label: Temp Dir name: temp_dir value: temp -- description: User name used to authenticate. Note if sqlalchemy_url is set this - will be ignored. +- description: User name used to authenticate. kind: string label: User name: user diff --git a/_data/meltano/utilities/dbt-athena/dbt-athena.yml b/_data/meltano/utilities/dbt-athena/dbt-athena.yml index de029023b..c16594ec0 100644 --- a/_data/meltano/utilities/dbt-athena/dbt-athena.yml +++ b/_data/meltano/utilities/dbt-athena/dbt-athena.yml @@ -69,7 +69,7 @@ next_steps: |- # create a starter dbt_project.yml file, a profiles.yml file, and models directory meltano invoke dbt-athena:initialize ``` -pip_url: dbt-core dbt-athena-community git+https://github.com/meltano/dbt-ext.git@main +pip_url: dbt-core dbt-athena-community meltano-dbt-ext~=0.3.0 repo: https://github.com/dbt-athena/dbt-athena settings: - description: | diff --git a/_data/meltano/utilities/dbt-bigquery/dbt-labs.yml b/_data/meltano/utilities/dbt-bigquery/dbt-labs.yml index 79e1b7835..d03f28476 100644 --- a/_data/meltano/utilities/dbt-bigquery/dbt-labs.yml +++ b/_data/meltano/utilities/dbt-bigquery/dbt-labs.yml @@ -69,7 +69,7 @@ next_steps: |- # create a starter dbt_project.yml file, a profiles.yml file, and models directory meltano invoke dbt-bigquery:initialize ``` -pip_url: dbt-core dbt-bigquery git+https://github.com/meltano/dbt-ext.git@main +pip_url: dbt-core dbt-bigquery meltano-dbt-ext~=0.3.0 repo: https://github.com/dbt-labs/dbt-bigquery settings: - description: | diff --git a/_data/meltano/utilities/dbt-duckdb/jwills.yml b/_data/meltano/utilities/dbt-duckdb/jwills.yml index 2814c14e3..f389ea64e 100644 --- a/_data/meltano/utilities/dbt-duckdb/jwills.yml +++ b/_data/meltano/utilities/dbt-duckdb/jwills.yml @@ -70,7 +70,7 @@ next_steps: | # create a starter dbt_project.yml file, a profiles.yml file, and models directory meltano invoke dbt-duckdb:initialize ``` -pip_url: dbt-core dbt-duckdb git+https://github.com/meltano/dbt-ext.git@main +pip_url: dbt-core dbt-duckdb meltano-dbt-ext~=0.3.0 repo: https://github.com/jwills/dbt-duckdb settings: - description: The path on your local filesystem where you would like the DuckDB database diff --git a/_data/meltano/utilities/dbt-postgres/dbt-labs.yml b/_data/meltano/utilities/dbt-postgres/dbt-labs.yml index 7fd595975..84bbb6e57 100644 --- a/_data/meltano/utilities/dbt-postgres/dbt-labs.yml +++ b/_data/meltano/utilities/dbt-postgres/dbt-labs.yml @@ -68,7 +68,7 @@ next_steps: |- # create a starter dbt_project.yml file, a profiles.yml file, and models directory meltano invoke dbt-postgres:initialize ``` -pip_url: dbt-core dbt-postgres git+https://github.com/meltano/dbt-ext.git@main +pip_url: dbt-core dbt-postgres meltano-dbt-ext~=0.3.0 repo: https://github.com/dbt-labs/dbt-core settings: - aliases: diff --git a/_data/meltano/utilities/dbt-redshift/dbt-labs.yml b/_data/meltano/utilities/dbt-redshift/dbt-labs.yml index 9e1ee570c..921b6854f 100644 --- a/_data/meltano/utilities/dbt-redshift/dbt-labs.yml +++ b/_data/meltano/utilities/dbt-redshift/dbt-labs.yml @@ -69,7 +69,7 @@ next_steps: |- # create a starter dbt_project.yml file, a profiles.yml file, and models directory meltano invoke dbt-redshift:initialize ``` -pip_url: dbt-core dbt-redshift git+https://github.com/meltano/dbt-ext.git@main +pip_url: dbt-core dbt-redshift meltano-dbt-ext~=0.3.0 repo: https://github.com/dbt-labs/dbt-redshift settings: - description: | diff --git a/_data/meltano/utilities/dbt-snowflake/dbt-labs.yml b/_data/meltano/utilities/dbt-snowflake/dbt-labs.yml index df7ea9c31..db9851368 100644 --- a/_data/meltano/utilities/dbt-snowflake/dbt-labs.yml +++ b/_data/meltano/utilities/dbt-snowflake/dbt-labs.yml @@ -69,7 +69,7 @@ next_steps: |- # create a starter dbt_project.yml file, a profiles.yml file, and models directory meltano invoke dbt-snowflake:initialize ``` -pip_url: dbt-core dbt-snowflake git+https://github.com/meltano/dbt-ext.git@main +pip_url: dbt-core dbt-snowflake meltano-dbt-ext~=0.3.0 repo: https://github.com/dbt-labs/dbt-snowflake settings: - description: The snowflake account to connect to. diff --git a/_data/meltano/utilities/matatika/matatika.yml b/_data/meltano/utilities/matatika/matatika.yml index 68d75ace9..29f077df9 100644 --- a/_data/meltano/utilities/matatika/matatika.yml +++ b/_data/meltano/utilities/matatika/matatika.yml @@ -50,7 +50,7 @@ next_steps: | If you're running into problems with this extension, or just want to chat about all things data, [join the Matatika Slack community](https://join.slack.com/t/matatika/shared_invite/zt-19n1bfokx-F31DNitTpSxWCFO2aFlgxg).

-pip_url: git+https://github.com/Matatika/matatika-ext.git@v0.34.0 +pip_url: git+https://github.com/Matatika/matatika-ext.git@v0.36.0 repo: https://github.com/Matatika/matatika-ce usage: For help, try `meltano invoke matatika --help` (or `--help` on any subcommand). variant: matatika diff --git a/_data/variant_metrics.yml b/_data/variant_metrics.yml index 825c2da9d..20aa09c31 100644 --- a/_data/variant_metrics.yml +++ b/_data/variant_metrics.yml @@ -9531,7 +9531,7 @@ metrics: all_projects_unstruct_by_name: 9 all_projects_unstruct_by_variant: 9 name: dbt-bigquery - pip_url: dbt-core~=1.3.0 dbt-bigquery~=1.3.0 git+https://github.com/meltano/dbt-ext.git@main + pip_url: dbt-core dbt-bigquery meltano-dbt-ext~=0.3.0 plugin_type: utilities success_execs_by_name: 0 success_execs_unstruct_by_name: 650 @@ -9550,7 +9550,7 @@ metrics: all_projects_unstruct_by_name: 141 all_projects_unstruct_by_variant: 141 name: dbt-postgres - pip_url: dbt-core~=1.3.0 dbt-postgres~=1.3.0 + pip_url: dbt-core dbt-postgres meltano-dbt-ext~=0.3.0 plugin_type: transformers success_execs_by_name: 0 success_execs_unstruct_by_name: 208849 @@ -9569,7 +9569,7 @@ metrics: all_projects_unstruct_by_name: 7 all_projects_unstruct_by_variant: 7 name: dbt-redshift - pip_url: dbt-core~=1.3.0 dbt-redshift~=1.3.0 git+https://github.com/meltano/dbt-ext.git@main + pip_url: dbt-core dbt-redshift meltano-dbt-ext~=0.3.0 plugin_type: utilities success_execs_by_name: 0 success_execs_unstruct_by_name: 35 @@ -9588,7 +9588,7 @@ metrics: all_projects_unstruct_by_name: 19 all_projects_unstruct_by_variant: 19 name: dbt-snowflake - pip_url: dbt-core~=1.3.0 dbt-snowflake~=1.3.0 + pip_url: dbt-core dbt-snowflake meltano-dbt-ext~=0.3.0 plugin_type: transformers success_execs_by_name: 0 success_execs_unstruct_by_name: 6889 @@ -12799,7 +12799,7 @@ metrics: all_projects_unstruct_by_name: 70 all_projects_unstruct_by_variant: 70 name: dbt-duckdb - pip_url: dbt-core~=1.2.0 dbt-duckdb~=1.2.0 + pip_url: dbt-core dbt-duckdb meltano-dbt-ext~=0.3.0 plugin_type: transformers success_execs_by_name: 0 success_execs_unstruct_by_name: 368 diff --git a/poetry.lock b/poetry.lock index e8cd0f4f7..5efb6b4b5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "attrs" @@ -19,33 +19,33 @@ tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy" [[package]] name = "black" -version = "24.4.2" +version = "24.8.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, - {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, - {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, - {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, - {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, - {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, - {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, - {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, + {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, + {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, + {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, + {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, + {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, + {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, + {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, + {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, + {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, + {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, + {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, + {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, + {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, + {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, + {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, + {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, + {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, + {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, + {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, + {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, + {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, + {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, ] [package.dependencies] @@ -499,4 +499,4 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "f6c947e1045cd6da7b090313bfd1f9ca7fb5c3d57fbc04e4f85660af787ce1a1" +content-hash = "341bd84d3e62bd543bf902b2c8bb6647487b0d0885689a3bb6b46bd80e7b5803" diff --git a/pyproject.toml b/pyproject.toml index 81ce1dcdd..5d3cbbba7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ yamllint = "^1.35.1" requests = "^2.32.3" [tool.poetry.dev-dependencies] -black = "^24.4.2" +black = "^24.8.0" isort = "^5.13.2" [build-system] diff --git a/static/assets/logos/extractors/iceberg.png b/static/assets/logos/extractors/iceberg.png new file mode 100644 index 000000000..e4a99c395 Binary files /dev/null and b/static/assets/logos/extractors/iceberg.png differ diff --git a/static/assets/logos/extractors/staffwise.svg b/static/assets/logos/extractors/staffwise.svg new file mode 100644 index 000000000..a6aa4529e --- /dev/null +++ b/static/assets/logos/extractors/staffwise.svg @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/static/assets/logos/loaders/db2.png b/static/assets/logos/loaders/db2.png new file mode 100644 index 000000000..cd272ed1e Binary files /dev/null and b/static/assets/logos/loaders/db2.png differ