From a6329457b8ebfb4cb6b1f25679fe8a0ddd7315a8 Mon Sep 17 00:00:00 2001 From: prateekrai-atlan Date: Thu, 21 Nov 2024 14:44:29 +0530 Subject: [PATCH 01/19] replacement of `ApplicationContainer` by `Application` --- docs/asset/applicationcontainer.rst | 10 - docs/assets.rst | 2 +- ...on_container.jinja2 => application.jinja2} | 4 +- ...on_container.jinja2 => application.jinja2} | 4 +- pyatlan/model/assets/__init__.py | 4 +- pyatlan/model/assets/__init__.pyi | 6 +- pyatlan/model/assets/a_d_l_s.py | 33 --- pyatlan/model/assets/core/__init__.py | 6 +- pyatlan/model/assets/core/app.py | 61 +++++ pyatlan/model/assets/core/application.py | 80 +++++-- .../assets/core/application_container.py | 113 ---------- pyatlan/model/assets/core/asset.py | 41 ++++ pyatlan/model/assets/core/catalog.py | 56 +---- pyatlan/model/assets/core/dbt_metric.py | 28 --- .../assets/core/dynamo_d_b_secondary_index.py | 28 --- pyatlan/model/assets/core/snowflake_tag.py | 28 --- pyatlan/model/assets/data_studio.py | 53 ----- pyatlan/model/assets/data_studio_asset.py | 28 --- pyatlan/model/assets/dbt_column_process.py | 28 --- pyatlan/model/assets/dbt_process.py | 28 --- pyatlan/model/assets/dbt_tag.py | 28 --- pyatlan/model/assets/dynamo_dbtable.py | 28 --- pyatlan/model/assets/g_c_s.py | 53 ----- pyatlan/model/assets/s3.py | 28 --- pyatlan/model/enums.py | 4 +- pyatlan/model/typedef.py | 2 +- tests/integration/app_asset_test.py | 211 +++++++++++++++++ tests/integration/application_asset_test.py | 212 ------------------ tests/unit/model/application_asset_test.py | 73 ------ tests/unit/model/application_test.py | 73 ++++++ tests/unit/model/constants.py | 10 +- tests/unit/test_model.py | 4 +- 32 files changed, 474 insertions(+), 893 deletions(-) delete mode 100644 docs/asset/applicationcontainer.rst rename pyatlan/generator/templates/methods/asset/{application_container.jinja2 => application.jinja2} (81%) rename pyatlan/generator/templates/methods/attribute/{application_container.jinja2 => application.jinja2} (86%) create mode 100644 pyatlan/model/assets/core/app.py delete mode 100644 pyatlan/model/assets/core/application_container.py create mode 100644 tests/integration/app_asset_test.py delete mode 100644 tests/integration/application_asset_test.py delete mode 100644 tests/unit/model/application_asset_test.py create mode 100644 tests/unit/model/application_test.py diff --git a/docs/asset/applicationcontainer.rst b/docs/asset/applicationcontainer.rst deleted file mode 100644 index 45dfe9125..000000000 --- a/docs/asset/applicationcontainer.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. _applicationcontainer: - -ApplicationContainer -==================== - -.. module:: pyatlan.model.assets - :no-index: - -.. autoclass:: ApplicationContainer - :members: diff --git a/docs/assets.rst b/docs/assets.rst index 296d571bc..d832cca0a 100644 --- a/docs/assets.rst +++ b/docs/assets.rst @@ -31,8 +31,8 @@ You can interact with all of the following different kinds of assets: asset/airflowtask asset/anomalo asset/anomalocheck + asset/app asset/application - asset/applicationcontainer asset/asset asset/atlasglossary asset/atlasglossarycategory diff --git a/pyatlan/generator/templates/methods/asset/application_container.jinja2 b/pyatlan/generator/templates/methods/asset/application.jinja2 similarity index 81% rename from pyatlan/generator/templates/methods/asset/application_container.jinja2 rename to pyatlan/generator/templates/methods/asset/application.jinja2 index 417af5c4c..4b3d9b6e1 100644 --- a/pyatlan/generator/templates/methods/asset/application_container.jinja2 +++ b/pyatlan/generator/templates/methods/asset/application.jinja2 @@ -6,11 +6,11 @@ *, name: str, connection_qualified_name: str, - ) -> ApplicationContainer: + ) -> Application: validate_required_fields( ["name", "connection_qualified_name"], [name, connection_qualified_name] ) - attributes = ApplicationContainer.Attributes.creator( + attributes = Application.Attributes.creator( name=name, connection_qualified_name=connection_qualified_name, ) diff --git a/pyatlan/generator/templates/methods/attribute/application_container.jinja2 b/pyatlan/generator/templates/methods/attribute/application.jinja2 similarity index 86% rename from pyatlan/generator/templates/methods/attribute/application_container.jinja2 rename to pyatlan/generator/templates/methods/attribute/application.jinja2 index aa2d838db..b09cbf042 100644 --- a/pyatlan/generator/templates/methods/attribute/application_container.jinja2 +++ b/pyatlan/generator/templates/methods/attribute/application.jinja2 @@ -6,11 +6,11 @@ *, name: str, connection_qualified_name: str, - ) -> ApplicationContainer.Attributes: + ) -> Application.Attributes: validate_required_fields( ["name", "connection_qualified_name"], [name, connection_qualified_name] ) - return ApplicationContainer.Attributes( + return Application.Attributes( name=name, qualified_name=f"{connection_qualified_name}/{name}", connection_qualified_name=connection_qualified_name, diff --git a/pyatlan/model/assets/__init__.py b/pyatlan/model/assets/__init__.py index c0aa20b5c..b04732018 100644 --- a/pyatlan/model/assets/__init__.py +++ b/pyatlan/model/assets/__init__.py @@ -21,6 +21,7 @@ "Folder", "Airflow", "DataContract", + "App", "ADF", "DataQuality", "BI", @@ -35,11 +36,11 @@ "Spark", "Tag", "SchemaRegistry", - "Application", "Fivetran", "Stakeholder", "AirflowDag", "AirflowTask", + "Application", "AdfDataflow", "AdfDataset", "AdfPipeline", @@ -87,7 +88,6 @@ "ModelDataModel", "SparkJob", "SchemaRegistrySubject", - "ApplicationContainer", "FivetranConnector", "AnomaloCheck", "MCIncident", diff --git a/pyatlan/model/assets/__init__.pyi b/pyatlan/model/assets/__init__.pyi index bd05ee1f7..6b03e8fbe 100644 --- a/pyatlan/model/assets/__init__.pyi +++ b/pyatlan/model/assets/__init__.pyi @@ -18,6 +18,7 @@ __all__ = [ "Folder", "Airflow", "DataContract", + "App", "ADF", "DataQuality", "BI", @@ -32,11 +33,11 @@ __all__ = [ "Spark", "Tag", "SchemaRegistry", - "Application", "Fivetran", "Stakeholder", "AirflowDag", "AirflowTask", + "Application", "AdfDataflow", "AdfDataset", "AdfPipeline", @@ -84,7 +85,6 @@ __all__ = [ "ModelDataModel", "SparkJob", "SchemaRegistrySubject", - "ApplicationContainer", "FivetranConnector", "AnomaloCheck", "MCIncident", @@ -352,8 +352,8 @@ from .core.airflow_dag import AirflowDag from .core.airflow_task import AirflowTask from .core.anomalo import Anomalo from .core.anomalo_check import AnomaloCheck +from .core.app import App from .core.application import Application -from .core.application_container import ApplicationContainer from .core.asset import Asset from .core.atlas_glossary import AtlasGlossary from .core.atlas_glossary_category import AtlasGlossaryCategory diff --git a/pyatlan/model/assets/a_d_l_s.py b/pyatlan/model/assets/a_d_l_s.py index 3336dc99c..23ee2a72c 100644 --- a/pyatlan/model/assets/a_d_l_s.py +++ b/pyatlan/model/assets/a_d_l_s.py @@ -60,12 +60,6 @@ def __setattr__(self, name, value): """ Tags that have been applied to this asset in Azure. """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ _convenience_properties: ClassVar[List[str]] = [ "adls_account_qualified_name", @@ -73,7 +67,6 @@ def __setattr__(self, name, value): "azure_location", "adls_account_secondary_location", "azure_tags", - "asset_application_qualified_name", ] @property @@ -138,38 +131,14 @@ def azure_tags(self, azure_tags: Optional[List[AzureTag]]): self.attributes = self.Attributes() self.attributes.azure_tags = azure_tags - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - class Attributes(ObjectStore.Attributes): adls_account_qualified_name: Optional[str] = Field(default=None, description="") - application_asset_qualified_name: Optional[str] = Field( - default=None, description="" - ) azure_resource_id: Optional[str] = Field(default=None, description="") azure_location: Optional[str] = Field(default=None, description="") adls_account_secondary_location: Optional[str] = Field( default=None, description="" ) azure_tags: Optional[List[AzureTag]] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) attributes: ADLS.Attributes = Field( default_factory=lambda: ADLS.Attributes(), @@ -181,6 +150,4 @@ class Attributes(ObjectStore.Attributes): ) -from .core.application_container import ApplicationContainer # noqa - ADLS.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/core/__init__.py b/pyatlan/model/assets/core/__init__.py index 474bf8369..e4350b02f 100644 --- a/pyatlan/model/assets/core/__init__.py +++ b/pyatlan/model/assets/core/__init__.py @@ -14,8 +14,8 @@ from .airflow_task import AirflowTask from .anomalo import Anomalo from .anomalo_check import AnomaloCheck +from .app import App from .application import Application -from .application_container import ApplicationContainer from .asset import Asset from .atlas_glossary import AtlasGlossary from .atlas_glossary_category import AtlasGlossaryCategory @@ -128,6 +128,7 @@ Folder.Attributes.update_forward_refs(**localns) Airflow.Attributes.update_forward_refs(**localns) DataContract.Attributes.update_forward_refs(**localns) +App.Attributes.update_forward_refs(**localns) ADF.Attributes.update_forward_refs(**localns) DataQuality.Attributes.update_forward_refs(**localns) BI.Attributes.update_forward_refs(**localns) @@ -142,11 +143,11 @@ Spark.Attributes.update_forward_refs(**localns) Tag.Attributes.update_forward_refs(**localns) SchemaRegistry.Attributes.update_forward_refs(**localns) -Application.Attributes.update_forward_refs(**localns) Fivetran.Attributes.update_forward_refs(**localns) Stakeholder.Attributes.update_forward_refs(**localns) AirflowDag.Attributes.update_forward_refs(**localns) AirflowTask.Attributes.update_forward_refs(**localns) +Application.Attributes.update_forward_refs(**localns) AdfDataflow.Attributes.update_forward_refs(**localns) AdfDataset.Attributes.update_forward_refs(**localns) AdfPipeline.Attributes.update_forward_refs(**localns) @@ -194,7 +195,6 @@ ModelDataModel.Attributes.update_forward_refs(**localns) SparkJob.Attributes.update_forward_refs(**localns) SchemaRegistrySubject.Attributes.update_forward_refs(**localns) -ApplicationContainer.Attributes.update_forward_refs(**localns) FivetranConnector.Attributes.update_forward_refs(**localns) AnomaloCheck.Attributes.update_forward_refs(**localns) MCIncident.Attributes.update_forward_refs(**localns) diff --git a/pyatlan/model/assets/core/app.py b/pyatlan/model/assets/core/app.py new file mode 100644 index 000000000..ea602994b --- /dev/null +++ b/pyatlan/model/assets/core/app.py @@ -0,0 +1,61 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, List, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField + +from .catalog import Catalog + + +class App(Catalog): + """Description""" + + type_name: str = Field(default="App", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "App": + raise ValueError("must be App") + return v + + def __setattr__(self, name, value): + if name in App._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + APP_ID: ClassVar[KeywordField] = KeywordField("appId", "appId") + """ + Unique identifier for the App asset from the source system. + """ + + _convenience_properties: ClassVar[List[str]] = [ + "app_id", + ] + + @property + def app_id(self) -> Optional[str]: + return None if self.attributes is None else self.attributes.app_id + + @app_id.setter + def app_id(self, app_id: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.app_id = app_id + + class Attributes(Catalog.Attributes): + app_id: Optional[str] = Field(default=None, description="") + + attributes: App.Attributes = Field( + default_factory=lambda: App.Attributes(), + description=( + "Map of attributes in the instance and their values. " + "The specific keys of this map will vary by type, " + "so are described in the sub-types of this schema." + ), + ) diff --git a/pyatlan/model/assets/core/application.py b/pyatlan/model/assets/core/application.py index 53a16906a..8d1f9b450 100644 --- a/pyatlan/model/assets/core/application.py +++ b/pyatlan/model/assets/core/application.py @@ -8,14 +8,33 @@ from pydantic.v1 import Field, validator -from pyatlan.model.fields.atlan_fields import KeywordField +from pyatlan.model.enums import AtlanConnectorType +from pyatlan.model.fields.atlan_fields import RelationField +from pyatlan.utils import init_guid, validate_required_fields -from .catalog import Catalog +from .app import App -class Application(Catalog): +class Application(App): """Description""" + @classmethod + @init_guid + def creator( + cls, + *, + name: str, + connection_qualified_name: str, + ) -> Application: + validate_required_fields( + ["name", "connection_qualified_name"], [name, connection_qualified_name] + ) + attributes = Application.Attributes.creator( + name=name, + connection_qualified_name=connection_qualified_name, + ) + return cls(attributes=attributes) + type_name: str = Field(default="Application", allow_mutation=False) @validator("type_name") @@ -29,29 +48,55 @@ def __setattr__(self, name, value): return object.__setattr__(self, name, value) super().__setattr__(name, value) - APPLICATION_ID: ClassVar[KeywordField] = KeywordField( - "applicationId", "applicationId" + APPLICATION_OWNED_ASSETS: ClassVar[RelationField] = RelationField( + "applicationOwnedAssets" ) """ - Unique identifier for the Application asset from the source system. + TBC """ _convenience_properties: ClassVar[List[str]] = [ - "application_id", + "application_owned_assets", ] @property - def application_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.application_id - - @application_id.setter - def application_id(self, application_id: Optional[str]): + def application_owned_assets(self) -> Optional[List[Asset]]: + return ( + None + if self.attributes is None + else self.attributes.application_owned_assets + ) + + @application_owned_assets.setter + def application_owned_assets(self, application_owned_assets: Optional[List[Asset]]): if self.attributes is None: self.attributes = self.Attributes() - self.attributes.application_id = application_id - - class Attributes(Catalog.Attributes): - application_id: Optional[str] = Field(default=None, description="") + self.attributes.application_owned_assets = application_owned_assets + + class Attributes(App.Attributes): + application_owned_assets: Optional[List[Asset]] = Field( + default=None, description="" + ) # relationship + + @classmethod + @init_guid + def creator( + cls, + *, + name: str, + connection_qualified_name: str, + ) -> Application.Attributes: + validate_required_fields( + ["name", "connection_qualified_name"], [name, connection_qualified_name] + ) + return Application.Attributes( + name=name, + qualified_name=f"{connection_qualified_name}/{name}", + connection_qualified_name=connection_qualified_name, + connector_name=AtlanConnectorType.get_connector_name( + connection_qualified_name + ), + ) attributes: Application.Attributes = Field( default_factory=lambda: Application.Attributes(), @@ -61,3 +106,6 @@ class Attributes(Catalog.Attributes): "so are described in the sub-types of this schema." ), ) + + +from .asset import Asset # noqa diff --git a/pyatlan/model/assets/core/application_container.py b/pyatlan/model/assets/core/application_container.py deleted file mode 100644 index 0f856372c..000000000 --- a/pyatlan/model/assets/core/application_container.py +++ /dev/null @@ -1,113 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from typing import ClassVar, List, Optional - -from pydantic.v1 import Field, validator - -from pyatlan.model.enums import AtlanConnectorType -from pyatlan.model.fields.atlan_fields import RelationField -from pyatlan.utils import init_guid, validate_required_fields - -from .application import Application - - -class ApplicationContainer(Application): - """Description""" - - @classmethod - @init_guid - def creator( - cls, - *, - name: str, - connection_qualified_name: str, - ) -> ApplicationContainer: - validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] - ) - attributes = ApplicationContainer.Attributes.creator( - name=name, - connection_qualified_name=connection_qualified_name, - ) - return cls(attributes=attributes) - - type_name: str = Field(default="ApplicationContainer", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "ApplicationContainer": - raise ValueError("must be ApplicationContainer") - return v - - def __setattr__(self, name, value): - if name in ApplicationContainer._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - APPLICATION_OWNED_ASSETS: ClassVar[RelationField] = RelationField( - "applicationOwnedAssets" - ) - """ - TBC - """ - - _convenience_properties: ClassVar[List[str]] = [ - "application_owned_assets", - ] - - @property - def application_owned_assets(self) -> Optional[List[Catalog]]: - return ( - None - if self.attributes is None - else self.attributes.application_owned_assets - ) - - @application_owned_assets.setter - def application_owned_assets( - self, application_owned_assets: Optional[List[Catalog]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.application_owned_assets = application_owned_assets - - class Attributes(Application.Attributes): - application_owned_assets: Optional[List[Catalog]] = Field( - default=None, description="" - ) # relationship - - @classmethod - @init_guid - def creator( - cls, - *, - name: str, - connection_qualified_name: str, - ) -> ApplicationContainer.Attributes: - validate_required_fields( - ["name", "connection_qualified_name"], [name, connection_qualified_name] - ) - return ApplicationContainer.Attributes( - name=name, - qualified_name=f"{connection_qualified_name}/{name}", - connection_qualified_name=connection_qualified_name, - connector_name=AtlanConnectorType.get_connector_name( - connection_qualified_name - ), - ) - - attributes: ApplicationContainer.Attributes = Field( - default_factory=lambda: ApplicationContainer.Attributes(), - description=( - "Map of attributes in the instance and their values. " - "The specific keys of this map will vary by type, " - "so are described in the sub-types of this schema." - ), - ) - - -from .catalog import Catalog # noqa diff --git a/pyatlan/model/assets/core/asset.py b/pyatlan/model/assets/core/asset.py index c1c8c5897..ad7139292 100644 --- a/pyatlan/model/assets/core/asset.py +++ b/pyatlan/model/assets/core/asset.py @@ -1077,6 +1077,12 @@ def __setattr__(self, name, value): """ Array of policy ids non-compliant to this asset """ + APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( + "applicationQualifiedName", "applicationQualifiedName" + ) + """ + Qualified name of the Application that contains this asset. + """ SCHEMA_REGISTRY_SUBJECTS: ClassVar[RelationField] = RelationField( "schemaRegistrySubjects" @@ -1124,6 +1130,10 @@ def __setattr__(self, name, value): """ TBC """ + APPLICATION: ClassVar[RelationField] = RelationField("application") + """ + TBC + """ FILES: ClassVar[RelationField] = RelationField("files") """ TBC @@ -1294,6 +1304,7 @@ def __setattr__(self, name, value): "asset_policies_count", "domain_g_u_i_ds", "non_compliant_asset_policy_g_u_i_ds", + "application_qualified_name", "schema_registry_subjects", "data_contract_latest_certified", "anomalo_checks", @@ -1304,6 +1315,7 @@ def __setattr__(self, name, value): "data_contract_latest", "assigned_terms", "mc_monitors", + "application", "files", "mc_incidents", "links", @@ -3203,6 +3215,20 @@ def non_compliant_asset_policy_g_u_i_ds( non_compliant_asset_policy_g_u_i_ds ) + @property + def application_qualified_name(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.application_qualified_name + ) + + @application_qualified_name.setter + def application_qualified_name(self, application_qualified_name: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.application_qualified_name = application_qualified_name + @property def schema_registry_subjects(self) -> Optional[List[SchemaRegistrySubject]]: return ( @@ -3333,6 +3359,16 @@ def mc_monitors(self, mc_monitors: Optional[List[MCMonitor]]): self.attributes = self.Attributes() self.attributes.mc_monitors = mc_monitors + @property + def application(self) -> Optional[Application]: + return None if self.attributes is None else self.attributes.application + + @application.setter + def application(self, application: Optional[Application]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.application = application + @property def files(self) -> Optional[List[File]]: return None if self.attributes is None else self.attributes.files @@ -3660,6 +3696,7 @@ class Attributes(Referenceable.Attributes): non_compliant_asset_policy_g_u_i_ds: Optional[Set[str]] = Field( default=None, description="" ) + application_qualified_name: Optional[str] = Field(default=None, description="") schema_registry_subjects: Optional[List[SchemaRegistrySubject]] = Field( default=None, description="" ) # relationship @@ -3688,6 +3725,9 @@ class Attributes(Referenceable.Attributes): mc_monitors: Optional[List[MCMonitor]] = Field( default=None, description="" ) # relationship + application: Optional[Application] = Field( + default=None, description="" + ) # relationship files: Optional[List[File]] = Field( default=None, description="" ) # relationship @@ -3737,6 +3777,7 @@ def remove_announcement(self): from .anomalo_check import AnomaloCheck # noqa +from .application import Application # noqa from .atlas_glossary_term import AtlasGlossaryTerm # noqa from .data_contract import DataContract # noqa from .data_product import DataProduct # noqa diff --git a/pyatlan/model/assets/core/catalog.py b/pyatlan/model/assets/core/catalog.py index d456b9392..8ccf9c952 100644 --- a/pyatlan/model/assets/core/catalog.py +++ b/pyatlan/model/assets/core/catalog.py @@ -8,7 +8,7 @@ from pydantic.v1 import Field, validator -from pyatlan.model.fields.atlan_fields import KeywordField, RelationField +from pyatlan.model.fields.atlan_fields import RelationField from .asset import Asset @@ -29,13 +29,6 @@ def __setattr__(self, name, value): return object.__setattr__(self, name, value) super().__setattr__(name, value) - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ - INPUT_TO_PROCESSES: ClassVar[RelationField] = RelationField("inputToProcesses") """ TBC @@ -46,12 +39,6 @@ def __setattr__(self, name, value): """ TBC """ - APPLICATION_CONTAINER: ClassVar[RelationField] = RelationField( - "applicationContainer" - ) - """ - TBC - """ INPUT_TO_SPARK_JOBS: ClassVar[RelationField] = RelationField("inputToSparkJobs") """ TBC @@ -82,10 +69,8 @@ def __setattr__(self, name, value): """ _convenience_properties: ClassVar[List[str]] = [ - "asset_application_qualified_name", "input_to_processes", "output_from_airflow_tasks", - "application_container", "input_to_spark_jobs", "output_from_spark_jobs", "model_implemented_entities", @@ -93,24 +78,6 @@ def __setattr__(self, name, value): "output_from_processes", ] - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def input_to_processes(self) -> Optional[List[Process]]: return None if self.attributes is None else self.attributes.input_to_processes @@ -137,20 +104,6 @@ def output_from_airflow_tasks( self.attributes = self.Attributes() self.attributes.output_from_airflow_tasks = output_from_airflow_tasks - @property - def application_container(self) -> Optional[ApplicationContainer]: - return ( - None if self.attributes is None else self.attributes.application_container - ) - - @application_container.setter - def application_container( - self, application_container: Optional[ApplicationContainer] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.application_container = application_container - @property def input_to_spark_jobs(self) -> Optional[List[SparkJob]]: return None if self.attributes is None else self.attributes.input_to_spark_jobs @@ -216,18 +169,12 @@ def output_from_processes(self, output_from_processes: Optional[List[Process]]): self.attributes.output_from_processes = output_from_processes class Attributes(Asset.Attributes): - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) input_to_processes: Optional[List[Process]] = Field( default=None, description="" ) # relationship output_from_airflow_tasks: Optional[List[AirflowTask]] = Field( default=None, description="" ) # relationship - application_container: Optional[ApplicationContainer] = Field( - default=None, description="" - ) # relationship input_to_spark_jobs: Optional[List[SparkJob]] = Field( default=None, description="" ) # relationship @@ -256,6 +203,5 @@ class Attributes(Asset.Attributes): from .airflow_task import AirflowTask # noqa from .model_entity import ModelEntity # noqa -from .application_container import ApplicationContainer # noqa from .process import Process # noqa from .spark_job import SparkJob # noqa diff --git a/pyatlan/model/assets/core/dbt_metric.py b/pyatlan/model/assets/core/dbt_metric.py index 16b1199ee..49a22b866 100644 --- a/pyatlan/model/assets/core/dbt_metric.py +++ b/pyatlan/model/assets/core/dbt_metric.py @@ -157,12 +157,6 @@ def __setattr__(self, name, value): """ List of latest DBT job runs across all environments """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ METRIC_TYPE: ClassVar[KeywordField] = KeywordField("metricType", "metricType") """ Type of the metric. @@ -230,7 +224,6 @@ def __setattr__(self, name, value): "dbt_connection_context", "dbt_semantic_layer_proxy_url", "dbt_job_runs", - "asset_application_qualified_name", "metric_type", "metric_s_q_l", "metric_filters", @@ -464,24 +457,6 @@ def dbt_job_runs(self, dbt_job_runs: Optional[List[DbtJobRun]]): self.attributes = self.Attributes() self.attributes.dbt_job_runs = dbt_job_runs - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def metric_type(self) -> Optional[str]: return None if self.attributes is None else self.attributes.metric_type @@ -613,9 +588,6 @@ class Attributes(Dbt.Attributes): default=None, description="" ) dbt_job_runs: Optional[List[DbtJobRun]] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) metric_type: Optional[str] = Field(default=None, description="") metric_s_q_l: Optional[str] = Field(default=None, description="") metric_filters: Optional[str] = Field(default=None, description="") diff --git a/pyatlan/model/assets/core/dynamo_d_b_secondary_index.py b/pyatlan/model/assets/core/dynamo_d_b_secondary_index.py index 18a3581fb..bc577b1c8 100644 --- a/pyatlan/model/assets/core/dynamo_d_b_secondary_index.py +++ b/pyatlan/model/assets/core/dynamo_d_b_secondary_index.py @@ -267,12 +267,6 @@ def __setattr__(self, name, value): """ Time (epoch) at which this asset was last profiled, in milliseconds. """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ DYNAMO_DB_STATUS: ClassVar[KeywordField] = KeywordField( "dynamoDBStatus", "dynamoDBStatus" ) @@ -352,7 +346,6 @@ def __setattr__(self, name, value): "calculation_view_qualified_name", "is_profiled", "last_profiled_at", - "asset_application_qualified_name", "dynamo_d_b_status", "dynamo_d_b_partition_key", "dynamo_d_b_sort_key", @@ -830,24 +823,6 @@ def last_profiled_at(self, last_profiled_at: Optional[datetime]): self.attributes = self.Attributes() self.attributes.last_profiled_at = last_profiled_at - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def dynamo_d_b_status(self) -> Optional[DynamoDBStatus]: return None if self.attributes is None else self.attributes.dynamo_d_b_status @@ -980,9 +955,6 @@ class Attributes(Table.Attributes): ) is_profiled: Optional[bool] = Field(default=None, description="") last_profiled_at: Optional[datetime] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) dynamo_d_b_status: Optional[DynamoDBStatus] = Field( default=None, description="" ) diff --git a/pyatlan/model/assets/core/snowflake_tag.py b/pyatlan/model/assets/core/snowflake_tag.py index 1b66d7847..e04428a26 100644 --- a/pyatlan/model/assets/core/snowflake_tag.py +++ b/pyatlan/model/assets/core/snowflake_tag.py @@ -59,12 +59,6 @@ def __setattr__(self, name, value): """ Name of the classification in Atlan that is mapped to this tag. """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") """ Number of times this asset has been queried. @@ -188,7 +182,6 @@ def __setattr__(self, name, value): "tag_attributes", "tag_allowed_values", "mapped_atlan_tag_name", - "asset_application_qualified_name", "query_count", "query_user_count", "query_user_map", @@ -255,24 +248,6 @@ def mapped_atlan_tag_name(self, mapped_atlan_tag_name: Optional[str]): self.attributes = self.Attributes() self.attributes.mapped_atlan_tag_name = mapped_atlan_tag_name - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def query_count(self) -> Optional[int]: return None if self.attributes is None else self.attributes.query_count @@ -516,9 +491,6 @@ class Attributes(Tag.Attributes): ) tag_allowed_values: Optional[Set[str]] = Field(default=None, description="") mapped_atlan_tag_name: Optional[str] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) query_count: Optional[int] = Field(default=None, description="") query_user_count: Optional[int] = Field(default=None, description="") query_user_map: Optional[Dict[str, int]] = Field(default=None, description="") diff --git a/pyatlan/model/assets/data_studio.py b/pyatlan/model/assets/data_studio.py index 1d4d0d702..f1701fedb 100644 --- a/pyatlan/model/assets/data_studio.py +++ b/pyatlan/model/assets/data_studio.py @@ -79,12 +79,6 @@ def __setattr__(self, name, value): """ List of tags that have been applied to the asset in Google. """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ INPUT_TO_PROCESSES: ClassVar[RelationField] = RelationField("inputToProcesses") """ @@ -96,12 +90,6 @@ def __setattr__(self, name, value): """ TBC """ - APPLICATION_CONTAINER: ClassVar[RelationField] = RelationField( - "applicationContainer" - ) - """ - TBC - """ INPUT_TO_SPARK_JOBS: ClassVar[RelationField] = RelationField("inputToSparkJobs") """ TBC @@ -140,10 +128,8 @@ def __setattr__(self, name, value): "google_location_type", "google_labels", "google_tags", - "asset_application_qualified_name", "input_to_processes", "output_from_airflow_tasks", - "application_container", "input_to_spark_jobs", "output_from_spark_jobs", "input_to_airflow_tasks", @@ -232,24 +218,6 @@ def google_tags(self, google_tags: Optional[List[GoogleTag]]): self.attributes = self.Attributes() self.attributes.google_tags = google_tags - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def input_to_processes(self) -> Optional[List[Process]]: return None if self.attributes is None else self.attributes.input_to_processes @@ -276,20 +244,6 @@ def output_from_airflow_tasks( self.attributes = self.Attributes() self.attributes.output_from_airflow_tasks = output_from_airflow_tasks - @property - def application_container(self) -> Optional[ApplicationContainer]: - return ( - None if self.attributes is None else self.attributes.application_container - ) - - @application_container.setter - def application_container( - self, application_container: Optional[ApplicationContainer] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.application_container = application_container - @property def input_to_spark_jobs(self) -> Optional[List[SparkJob]]: return None if self.attributes is None else self.attributes.input_to_spark_jobs @@ -363,18 +317,12 @@ class Attributes(Google.Attributes): google_location_type: Optional[str] = Field(default=None, description="") google_labels: Optional[List[GoogleLabel]] = Field(default=None, description="") google_tags: Optional[List[GoogleTag]] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) input_to_processes: Optional[List[Process]] = Field( default=None, description="" ) # relationship output_from_airflow_tasks: Optional[List[AirflowTask]] = Field( default=None, description="" ) # relationship - application_container: Optional[ApplicationContainer] = Field( - default=None, description="" - ) # relationship input_to_spark_jobs: Optional[List[SparkJob]] = Field( default=None, description="" ) # relationship @@ -403,7 +351,6 @@ class Attributes(Google.Attributes): from .core.airflow_task import AirflowTask # noqa from .core.model_entity import ModelEntity # noqa -from .core.application_container import ApplicationContainer # noqa from .core.process import Process # noqa from .core.spark_job import SparkJob # noqa diff --git a/pyatlan/model/assets/data_studio_asset.py b/pyatlan/model/assets/data_studio_asset.py index a6e5521ee..de4c806c4 100644 --- a/pyatlan/model/assets/data_studio_asset.py +++ b/pyatlan/model/assets/data_studio_asset.py @@ -162,12 +162,6 @@ def __setattr__(self, name, value): """ List of tags that have been applied to the asset in Google. """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ _convenience_properties: ClassVar[List[str]] = [ "data_studio_asset_type", @@ -182,7 +176,6 @@ def __setattr__(self, name, value): "google_location_type", "google_labels", "google_tags", - "asset_application_qualified_name", ] @property @@ -321,24 +314,6 @@ def google_tags(self, google_tags: Optional[List[GoogleTag]]): self.attributes = self.Attributes() self.attributes.google_tags = google_tags - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - class Attributes(DataStudio.Attributes): data_studio_asset_type: Optional[GoogleDatastudioAssetType] = Field( default=None, description="" @@ -356,9 +331,6 @@ class Attributes(DataStudio.Attributes): google_location_type: Optional[str] = Field(default=None, description="") google_labels: Optional[List[GoogleLabel]] = Field(default=None, description="") google_tags: Optional[List[GoogleTag]] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) @classmethod @init_guid diff --git a/pyatlan/model/assets/dbt_column_process.py b/pyatlan/model/assets/dbt_column_process.py index 1b099b3d6..3686fb408 100644 --- a/pyatlan/model/assets/dbt_column_process.py +++ b/pyatlan/model/assets/dbt_column_process.py @@ -157,12 +157,6 @@ def __setattr__(self, name, value): """ List of latest DBT job runs across all environments """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ CODE: ClassVar[TextField] = TextField("code", "code") """ Code that ran within the process. @@ -236,7 +230,6 @@ def __setattr__(self, name, value): "dbt_connection_context", "dbt_semantic_layer_proxy_url", "dbt_job_runs", - "asset_application_qualified_name", "inputs", "outputs", "code", @@ -481,24 +474,6 @@ def dbt_job_runs(self, dbt_job_runs: Optional[List[DbtJobRun]]): self.attributes = self.Attributes() self.attributes.dbt_job_runs = dbt_job_runs - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def inputs(self) -> Optional[List[Catalog]]: return None if self.attributes is None else self.attributes.inputs @@ -668,9 +643,6 @@ class Attributes(Dbt.Attributes): default=None, description="" ) dbt_job_runs: Optional[List[DbtJobRun]] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) inputs: Optional[List[Catalog]] = Field(default=None, description="") outputs: Optional[List[Catalog]] = Field(default=None, description="") code: Optional[str] = Field(default=None, description="") diff --git a/pyatlan/model/assets/dbt_process.py b/pyatlan/model/assets/dbt_process.py index b0edfbdf4..d8f293acc 100644 --- a/pyatlan/model/assets/dbt_process.py +++ b/pyatlan/model/assets/dbt_process.py @@ -157,12 +157,6 @@ def __setattr__(self, name, value): """ List of latest DBT job runs across all environments """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ CODE: ClassVar[TextField] = TextField("code", "code") """ Code that ran within the process. @@ -232,7 +226,6 @@ def __setattr__(self, name, value): "dbt_connection_context", "dbt_semantic_layer_proxy_url", "dbt_job_runs", - "asset_application_qualified_name", "inputs", "outputs", "code", @@ -472,24 +465,6 @@ def dbt_job_runs(self, dbt_job_runs: Optional[List[DbtJobRun]]): self.attributes = self.Attributes() self.attributes.dbt_job_runs = dbt_job_runs - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def inputs(self) -> Optional[List[Catalog]]: return None if self.attributes is None else self.attributes.inputs @@ -647,9 +622,6 @@ class Attributes(Dbt.Attributes): default=None, description="" ) dbt_job_runs: Optional[List[DbtJobRun]] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) inputs: Optional[List[Catalog]] = Field(default=None, description="") outputs: Optional[List[Catalog]] = Field(default=None, description="") code: Optional[str] = Field(default=None, description="") diff --git a/pyatlan/model/assets/dbt_tag.py b/pyatlan/model/assets/dbt_tag.py index c236ceba1..22b02d83d 100644 --- a/pyatlan/model/assets/dbt_tag.py +++ b/pyatlan/model/assets/dbt_tag.py @@ -150,12 +150,6 @@ def __setattr__(self, name, value): """ List of latest DBT job runs across all environments """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ TAG_ID: ClassVar[KeywordField] = KeywordField("tagId", "tagId") """ Unique identifier of the tag in the source system. @@ -199,7 +193,6 @@ def __setattr__(self, name, value): "dbt_connection_context", "dbt_semantic_layer_proxy_url", "dbt_job_runs", - "asset_application_qualified_name", "tag_id", "tag_attributes", "tag_allowed_values", @@ -418,24 +411,6 @@ def dbt_job_runs(self, dbt_job_runs: Optional[List[DbtJobRun]]): self.attributes = self.Attributes() self.attributes.dbt_job_runs = dbt_job_runs - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def tag_id(self) -> Optional[str]: return None if self.attributes is None else self.attributes.tag_id @@ -502,9 +477,6 @@ class Attributes(Dbt.Attributes): default=None, description="" ) dbt_job_runs: Optional[List[DbtJobRun]] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) tag_id: Optional[str] = Field(default=None, description="") tag_attributes: Optional[List[SourceTagAttribute]] = Field( default=None, description="" diff --git a/pyatlan/model/assets/dynamo_dbtable.py b/pyatlan/model/assets/dynamo_dbtable.py index 75e0811c5..7039d77e4 100644 --- a/pyatlan/model/assets/dynamo_dbtable.py +++ b/pyatlan/model/assets/dynamo_dbtable.py @@ -270,12 +270,6 @@ def __setattr__(self, name, value): """ Time (epoch) at which this asset was last profiled, in milliseconds. """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ DYNAMO_DB_STATUS: ClassVar[KeywordField] = KeywordField( "dynamoDBStatus", "dynamoDBStatus" ) @@ -369,7 +363,6 @@ def __setattr__(self, name, value): "calculation_view_qualified_name", "is_profiled", "last_profiled_at", - "asset_application_qualified_name", "dynamo_d_b_status", "dynamo_d_b_partition_key", "dynamo_d_b_sort_key", @@ -854,24 +847,6 @@ def last_profiled_at(self, last_profiled_at: Optional[datetime]): self.attributes = self.Attributes() self.attributes.last_profiled_at = last_profiled_at - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def dynamo_d_b_status(self) -> Optional[DynamoDBStatus]: return None if self.attributes is None else self.attributes.dynamo_d_b_status @@ -1047,9 +1022,6 @@ class Attributes(Table.Attributes): ) is_profiled: Optional[bool] = Field(default=None, description="") last_profiled_at: Optional[datetime] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) dynamo_d_b_status: Optional[DynamoDBStatus] = Field( default=None, description="" ) diff --git a/pyatlan/model/assets/g_c_s.py b/pyatlan/model/assets/g_c_s.py index 813e42d21..6d96962c7 100644 --- a/pyatlan/model/assets/g_c_s.py +++ b/pyatlan/model/assets/g_c_s.py @@ -114,12 +114,6 @@ def __setattr__(self, name, value): """ List of tags that have been applied to the asset in Google. """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ INPUT_TO_PROCESSES: ClassVar[RelationField] = RelationField("inputToProcesses") """ @@ -131,12 +125,6 @@ def __setattr__(self, name, value): """ TBC """ - APPLICATION_CONTAINER: ClassVar[RelationField] = RelationField( - "applicationContainer" - ) - """ - TBC - """ INPUT_TO_SPARK_JOBS: ClassVar[RelationField] = RelationField("inputToSparkJobs") """ TBC @@ -181,10 +169,8 @@ def __setattr__(self, name, value): "google_location_type", "google_labels", "google_tags", - "asset_application_qualified_name", "input_to_processes", "output_from_airflow_tasks", - "application_container", "input_to_spark_jobs", "output_from_spark_jobs", "model_implemented_entities", @@ -336,24 +322,6 @@ def google_tags(self, google_tags: Optional[List[GoogleTag]]): self.attributes = self.Attributes() self.attributes.google_tags = google_tags - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def input_to_processes(self) -> Optional[List[Process]]: return None if self.attributes is None else self.attributes.input_to_processes @@ -380,20 +348,6 @@ def output_from_airflow_tasks( self.attributes = self.Attributes() self.attributes.output_from_airflow_tasks = output_from_airflow_tasks - @property - def application_container(self) -> Optional[ApplicationContainer]: - return ( - None if self.attributes is None else self.attributes.application_container - ) - - @application_container.setter - def application_container( - self, application_container: Optional[ApplicationContainer] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.application_container = application_container - @property def input_to_spark_jobs(self) -> Optional[List[SparkJob]]: return None if self.attributes is None else self.attributes.input_to_spark_jobs @@ -473,18 +427,12 @@ class Attributes(Google.Attributes): google_location_type: Optional[str] = Field(default=None, description="") google_labels: Optional[List[GoogleLabel]] = Field(default=None, description="") google_tags: Optional[List[GoogleTag]] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) input_to_processes: Optional[List[Process]] = Field( default=None, description="" ) # relationship output_from_airflow_tasks: Optional[List[AirflowTask]] = Field( default=None, description="" ) # relationship - application_container: Optional[ApplicationContainer] = Field( - default=None, description="" - ) # relationship input_to_spark_jobs: Optional[List[SparkJob]] = Field( default=None, description="" ) # relationship @@ -513,7 +461,6 @@ class Attributes(Google.Attributes): from .core.airflow_task import AirflowTask # noqa from .core.model_entity import ModelEntity # noqa -from .core.application_container import ApplicationContainer # noqa from .core.process import Process # noqa from .core.spark_job import SparkJob # noqa diff --git a/pyatlan/model/assets/s3.py b/pyatlan/model/assets/s3.py index 022ab0add..8e4891389 100644 --- a/pyatlan/model/assets/s3.py +++ b/pyatlan/model/assets/s3.py @@ -39,12 +39,6 @@ def __setattr__(self, name, value): S3ENCRYPTION: ClassVar[KeywordField] = KeywordField("s3Encryption", "s3Encryption") """ - """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. """ AWS_ARN: ClassVar[KeywordTextField] = KeywordTextField( "awsArn", "awsArn", "awsArn.text" @@ -94,7 +88,6 @@ def __setattr__(self, name, value): _convenience_properties: ClassVar[List[str]] = [ "s3_e_tag", "s3_encryption", - "asset_application_qualified_name", "aws_arn", "aws_partition", "aws_service", @@ -126,24 +119,6 @@ def s3_encryption(self, s3_encryption: Optional[str]): self.attributes = self.Attributes() self.attributes.s3_encryption = s3_encryption - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def aws_arn(self) -> Optional[str]: return None if self.attributes is None else self.attributes.aws_arn @@ -237,9 +212,6 @@ def aws_tags(self, aws_tags: Optional[List[AwsTag]]): class Attributes(ObjectStore.Attributes): s3_e_tag: Optional[str] = Field(default=None, description="") s3_encryption: Optional[str] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) aws_arn: Optional[str] = Field(default=None, description="") aws_partition: Optional[str] = Field(default=None, description="") aws_service: Optional[str] = Field(default=None, description="") diff --git a/pyatlan/model/enums.py b/pyatlan/model/enums.py index 11c1724bf..49f22cbc1 100644 --- a/pyatlan/model/enums.py +++ b/pyatlan/model/enums.py @@ -136,7 +136,7 @@ class AtlanConnectionCategory(str, Enum): EVENT_BUS = "eventbus" DATA_QUALITY = "data-quality" SCHEMA_REGISTRY = "schema-registry" - APPLICATION = "application" + APP = "app" class AtlanConnectorType(str, Enum): @@ -334,7 +334,7 @@ def get_connector_name( SAP_GIGYA = ("sap-gigya", AtlanConnectionCategory.SAAS) SAP_HYBRIS = ("sap-hybris", AtlanConnectionCategory.SAAS) IBM_DB2 = ("ibmdb2", AtlanConnectionCategory.DATABASE) - APPLICATION = ("application", AtlanConnectionCategory.APPLICATION) + APP = ("app", AtlanConnectionCategory.APP) class AtlanCustomAttributePrimitiveType(str, Enum): diff --git a/pyatlan/model/typedef.py b/pyatlan/model/typedef.py index 178441e28..60a8b926d 100644 --- a/pyatlan/model/typedef.py +++ b/pyatlan/model/typedef.py @@ -35,7 +35,7 @@ "APIField", "APIPath", "APISpec", - "ApplicationContainer", + "Application", "Collection", "Query", "BIProcess", diff --git a/tests/integration/app_asset_test.py b/tests/integration/app_asset_test.py new file mode 100644 index 000000000..4a1d250fc --- /dev/null +++ b/tests/integration/app_asset_test.py @@ -0,0 +1,211 @@ +from typing import Generator + +import pytest + +from pyatlan.client.atlan import AtlanClient +from pyatlan.model.assets import Application, Connection +from pyatlan.model.core import Announcement +from pyatlan.model.enums import ( + AnnouncementType, + AtlanConnectorType, + CertificateStatus, + EntityStatus, +) +from pyatlan.model.response import AssetMutationResponse +from tests.integration.client import TestId, delete_asset +from tests.integration.connection_test import create_connection +from tests.integration.utils import block + +MODULE_NAME = TestId.make_unique("APP") + +CONNECTOR_TYPE = AtlanConnectorType.APP +APPLICATION_NAME = f"{MODULE_NAME}-application" +CERTIFICATE_STATUS = CertificateStatus.VERIFIED +CERTIFICATE_MESSAGE = "Automated testing of the Python SDK." +ANNOUNCEMENT_TYPE = AnnouncementType.INFORMATION +ANNOUNCEMENT_TITLE = "Python SDK testing." +ANNOUNCEMENT_MESSAGE = "Automated testing of the Python SDK." + + +response = block(AtlanClient(), AssetMutationResponse()) + + +@pytest.fixture(scope="module") +def connection(client: AtlanClient) -> Generator[Connection, None, None]: + result = create_connection( + client=client, name=MODULE_NAME, connector_type=CONNECTOR_TYPE + ) + yield result + delete_asset(client, guid=result.guid, asset_type=Connection) + + +@pytest.fixture(scope="module") +def application( + client: AtlanClient, connection: Connection +) -> Generator[Application, None, None]: + assert connection.qualified_name + to_create = Application.create( + name=APPLICATION_NAME, + connection_qualified_name=connection.qualified_name, + ) + response = client.asset.save(to_create) + result = response.assets_created(asset_type=Application)[0] + yield result + delete_asset(client, guid=result.guid, asset_type=Application) + + +def test_application( + client: AtlanClient, + connection: Connection, + application: Application, +): + assert application + assert application.guid + assert application.qualified_name + assert application.name == APPLICATION_NAME + assert application.connection_qualified_name == connection.qualified_name + assert application.connector_name == AtlanConnectorType.APPLICATION.value + + +# here +def test_update_application( + client: AtlanClient, + connection: Connection, + application: Application, +): + assert application.qualified_name + assert application.name + updated = client.asset.update_certificate( + asset_type=Application, + qualified_name=application.qualified_name, + name=application.name, + certificate_status=CERTIFICATE_STATUS, + message=CERTIFICATE_MESSAGE, + ) + assert updated + assert updated.certificate_status_message == CERTIFICATE_MESSAGE + assert application.qualified_name + assert application.name + updated = client.asset.update_announcement( + asset_type=Application, + qualified_name=application.qualified_name, + name=application.name, + announcement=Announcement( + announcement_type=ANNOUNCEMENT_TYPE, + announcement_title=ANNOUNCEMENT_TITLE, + announcement_message=ANNOUNCEMENT_MESSAGE, + ), + ) + assert updated + assert updated.announcement_type == ANNOUNCEMENT_TYPE.value + assert updated.announcement_title == ANNOUNCEMENT_TITLE + assert updated.announcement_message == ANNOUNCEMENT_MESSAGE + + +@pytest.mark.order(after="test_update_application") +def test_retrieve_application( + client: AtlanClient, + connection: Connection, + application: Application, +): + b = client.asset.get_by_guid( + application.guid, asset_type=Application + ) + assert b + assert not b.is_incomplete + assert b.guid == application.guid + assert b.qualified_name == application.qualified_name + assert b.name == application.name + assert b.connector_name == application.connector_name + assert ( + b.connection_qualified_name == application.connection_qualified_name + ) + assert b.certificate_status == CERTIFICATE_STATUS + assert b.certificate_status_message == CERTIFICATE_MESSAGE + + +@pytest.mark.order(after="test_retrieve_application") +def test_update_application_again( + client: AtlanClient, + connection: Connection, + application: Application, +): + assert application.qualified_name + assert application.name + updated = client.asset.remove_certificate( + asset_type=Application, + qualified_name=application.qualified_name, + name=application.name, + ) + assert updated + assert not updated.certificate_status + assert not updated.certificate_status_message + assert updated.announcement_type == ANNOUNCEMENT_TYPE.value + assert updated.announcement_title == ANNOUNCEMENT_TITLE + assert updated.announcement_message == ANNOUNCEMENT_MESSAGE + assert application.qualified_name + updated = client.asset.remove_announcement( + asset_type=Application, + qualified_name=application.qualified_name, + name=application.name, + ) + assert updated + assert not updated.announcement_type + assert not updated.announcement_title + assert not updated.announcement_message + + +@pytest.mark.order(after="test_update_application_again") +def test_delete_application( + client: AtlanClient, + connection: Connection, + application: Application, +): + response = client.asset.delete_by_guid(application.guid) + assert response + assert not response.assets_created(asset_type=Application) + assert not response.assets_updated(asset_type=Application) + deleted = response.assets_deleted(asset_type=Application) + assert deleted + assert len(deleted) == 1 + assert deleted[0].guid == application.guid + assert deleted[0].qualified_name == application.qualified_name + assert deleted[0].delete_handler == "SOFT" + assert deleted[0].status == EntityStatus.DELETED + + +@pytest.mark.order(after="test_delete_application") +def test_read_deleted_application( + client: AtlanClient, + connection: Connection, + application: Application, +): + deleted = client.asset.get_by_guid( + application.guid, asset_type=Application + ) + assert deleted + assert deleted.guid == application.guid + assert deleted.qualified_name == application.qualified_name + assert deleted.status == EntityStatus.DELETED + + +@pytest.mark.order(after="test_read_deleted_application") +def test_restore_application( + client: AtlanClient, + connection: Connection, + application: Application, +): + assert application.qualified_name + assert client.asset.restore( + asset_type=Application, + qualified_name=application.qualified_name, + ) + assert application.qualified_name + restored = client.asset.get_by_qualified_name( + asset_type=Application, + qualified_name=application.qualified_name, + ) + assert restored + assert restored.guid == application.guid + assert restored.qualified_name == application.qualified_name + assert restored.status == EntityStatus.ACTIVE diff --git a/tests/integration/application_asset_test.py b/tests/integration/application_asset_test.py deleted file mode 100644 index b3283764a..000000000 --- a/tests/integration/application_asset_test.py +++ /dev/null @@ -1,212 +0,0 @@ -from typing import Generator - -import pytest - -from pyatlan.client.atlan import AtlanClient -from pyatlan.model.assets import ApplicationContainer, Connection -from pyatlan.model.core import Announcement -from pyatlan.model.enums import ( - AnnouncementType, - AtlanConnectorType, - CertificateStatus, - EntityStatus, -) -from pyatlan.model.response import AssetMutationResponse -from tests.integration.client import TestId, delete_asset -from tests.integration.connection_test import create_connection -from tests.integration.utils import block - -MODULE_NAME = TestId.make_unique("APPLICATION") - -CONNECTOR_TYPE = AtlanConnectorType.APPLICATION -APPLICATION_CONTAINER_NAME = f"{MODULE_NAME}-application-container" -APPLICATION_CONTAINER_ID = "1234" -CERTIFICATE_STATUS = CertificateStatus.VERIFIED -CERTIFICATE_MESSAGE = "Automated testing of the Python SDK." -ANNOUNCEMENT_TYPE = AnnouncementType.INFORMATION -ANNOUNCEMENT_TITLE = "Python SDK testing." -ANNOUNCEMENT_MESSAGE = "Automated testing of the Python SDK." - - -response = block(AtlanClient(), AssetMutationResponse()) - - -@pytest.fixture(scope="module") -def connection(client: AtlanClient) -> Generator[Connection, None, None]: - result = create_connection( - client=client, name=MODULE_NAME, connector_type=CONNECTOR_TYPE - ) - yield result - delete_asset(client, guid=result.guid, asset_type=Connection) - - -@pytest.fixture(scope="module") -def application_container( - client: AtlanClient, connection: Connection -) -> Generator[ApplicationContainer, None, None]: - assert connection.qualified_name - to_create = ApplicationContainer.create( - name=APPLICATION_CONTAINER_NAME, - connection_qualified_name=connection.qualified_name, - ) - response = client.asset.save(to_create) - result = response.assets_created(asset_type=ApplicationContainer)[0] - yield result - delete_asset(client, guid=result.guid, asset_type=ApplicationContainer) - - -def test_application_container( - client: AtlanClient, - connection: Connection, - application_container: ApplicationContainer, -): - assert application_container - assert application_container.guid - assert application_container.qualified_name - assert application_container.name == APPLICATION_CONTAINER_NAME - assert application_container.connection_qualified_name == connection.qualified_name - assert application_container.connector_name == AtlanConnectorType.APPLICATION.value - - -# here -def test_update_application_container( - client: AtlanClient, - connection: Connection, - application_container: ApplicationContainer, -): - assert application_container.qualified_name - assert application_container.name - updated = client.asset.update_certificate( - asset_type=ApplicationContainer, - qualified_name=application_container.qualified_name, - name=application_container.name, - certificate_status=CERTIFICATE_STATUS, - message=CERTIFICATE_MESSAGE, - ) - assert updated - assert updated.certificate_status_message == CERTIFICATE_MESSAGE - assert application_container.qualified_name - assert application_container.name - updated = client.asset.update_announcement( - asset_type=ApplicationContainer, - qualified_name=application_container.qualified_name, - name=application_container.name, - announcement=Announcement( - announcement_type=ANNOUNCEMENT_TYPE, - announcement_title=ANNOUNCEMENT_TITLE, - announcement_message=ANNOUNCEMENT_MESSAGE, - ), - ) - assert updated - assert updated.announcement_type == ANNOUNCEMENT_TYPE.value - assert updated.announcement_title == ANNOUNCEMENT_TITLE - assert updated.announcement_message == ANNOUNCEMENT_MESSAGE - - -@pytest.mark.order(after="test_update_application_container") -def test_retrieve_application_container( - client: AtlanClient, - connection: Connection, - application_container: ApplicationContainer, -): - b = client.asset.get_by_guid( - application_container.guid, asset_type=ApplicationContainer - ) - assert b - assert not b.is_incomplete - assert b.guid == application_container.guid - assert b.qualified_name == application_container.qualified_name - assert b.name == application_container.name - assert b.connector_name == application_container.connector_name - assert ( - b.connection_qualified_name == application_container.connection_qualified_name - ) - assert b.certificate_status == CERTIFICATE_STATUS - assert b.certificate_status_message == CERTIFICATE_MESSAGE - - -@pytest.mark.order(after="test_retrieve_application_container") -def test_update_application_container_again( - client: AtlanClient, - connection: Connection, - application_container: ApplicationContainer, -): - assert application_container.qualified_name - assert application_container.name - updated = client.asset.remove_certificate( - asset_type=ApplicationContainer, - qualified_name=application_container.qualified_name, - name=application_container.name, - ) - assert updated - assert not updated.certificate_status - assert not updated.certificate_status_message - assert updated.announcement_type == ANNOUNCEMENT_TYPE.value - assert updated.announcement_title == ANNOUNCEMENT_TITLE - assert updated.announcement_message == ANNOUNCEMENT_MESSAGE - assert application_container.qualified_name - updated = client.asset.remove_announcement( - asset_type=ApplicationContainer, - qualified_name=application_container.qualified_name, - name=application_container.name, - ) - assert updated - assert not updated.announcement_type - assert not updated.announcement_title - assert not updated.announcement_message - - -@pytest.mark.order(after="test_update_application_container_again") -def test_delete_application_container( - client: AtlanClient, - connection: Connection, - application_container: ApplicationContainer, -): - response = client.asset.delete_by_guid(application_container.guid) - assert response - assert not response.assets_created(asset_type=ApplicationContainer) - assert not response.assets_updated(asset_type=ApplicationContainer) - deleted = response.assets_deleted(asset_type=ApplicationContainer) - assert deleted - assert len(deleted) == 1 - assert deleted[0].guid == application_container.guid - assert deleted[0].qualified_name == application_container.qualified_name - assert deleted[0].delete_handler == "SOFT" - assert deleted[0].status == EntityStatus.DELETED - - -@pytest.mark.order(after="test_delete_application_container") -def test_read_deleted_application_container( - client: AtlanClient, - connection: Connection, - application_container: ApplicationContainer, -): - deleted = client.asset.get_by_guid( - application_container.guid, asset_type=ApplicationContainer - ) - assert deleted - assert deleted.guid == application_container.guid - assert deleted.qualified_name == application_container.qualified_name - assert deleted.status == EntityStatus.DELETED - - -@pytest.mark.order(after="test_read_deleted_application_container") -def test_restore_application_container( - client: AtlanClient, - connection: Connection, - application_container: ApplicationContainer, -): - assert application_container.qualified_name - assert client.asset.restore( - asset_type=ApplicationContainer, - qualified_name=application_container.qualified_name, - ) - assert application_container.qualified_name - restored = client.asset.get_by_qualified_name( - asset_type=ApplicationContainer, - qualified_name=application_container.qualified_name, - ) - assert restored - assert restored.guid == application_container.guid - assert restored.qualified_name == application_container.qualified_name - assert restored.status == EntityStatus.ACTIVE diff --git a/tests/unit/model/application_asset_test.py b/tests/unit/model/application_asset_test.py deleted file mode 100644 index d4d82d77d..000000000 --- a/tests/unit/model/application_asset_test.py +++ /dev/null @@ -1,73 +0,0 @@ -import pytest - -from pyatlan.model.assets import ApplicationContainer -from tests.unit.model.constants import ( - APPLICATION_CONTAINER_NAME, - APPLICATION_CONTAINER_QUALIFIED_NAME, - APPLICATION_CONNECTION_QUALIFIED_NAME, - APPLICATION_CONNECTOR_TYPE, -) - - -@pytest.mark.parametrize( - "name, connection_qualified_name, message", - [ - (None, "connection/name", "name is required"), - (APPLICATION_CONTAINER_NAME, None, "connection_qualified_name is required"), - ], -) -def test_create_with_missing_parameters_raise_value_error( - name: str, connection_qualified_name: str, message: str -): - with pytest.raises(ValueError, match=message): - ApplicationContainer.creator( - name=name, connection_qualified_name=connection_qualified_name - ) - - -def test_create(): - sut = ApplicationContainer.creator( - name=APPLICATION_CONTAINER_NAME, - connection_qualified_name=APPLICATION_CONNECTION_QUALIFIED_NAME, - ) - - assert sut.name == APPLICATION_CONTAINER_NAME - assert sut.connection_qualified_name == APPLICATION_CONNECTION_QUALIFIED_NAME - assert sut.qualified_name == APPLICATION_CONTAINER_QUALIFIED_NAME - assert sut.connector_name == APPLICATION_CONNECTOR_TYPE - - -@pytest.mark.parametrize( - "qualified_name, name, message", - [ - (None, APPLICATION_CONTAINER_QUALIFIED_NAME, "qualified_name is required"), - (APPLICATION_CONTAINER_NAME, None, "name is required"), - ], -) -def test_create_for_modification_with_invalid_parameter_raises_value_error( - qualified_name: str, name: str, message: str -): - with pytest.raises(ValueError, match=message): - ApplicationContainer.create_for_modification( - qualified_name=qualified_name, name=name - ) - - -def test_create_for_modification(): - sut = ApplicationContainer.create_for_modification( - qualified_name=APPLICATION_CONTAINER_QUALIFIED_NAME, - name=APPLICATION_CONTAINER_NAME, - ) - - assert sut.qualified_name == APPLICATION_CONTAINER_QUALIFIED_NAME - assert sut.name == APPLICATION_CONTAINER_NAME - - -def test_trim_to_required(): - sut = ApplicationContainer.create_for_modification( - name=APPLICATION_CONTAINER_NAME, - qualified_name=APPLICATION_CONTAINER_QUALIFIED_NAME, - ).trim_to_required() - - assert sut.name == APPLICATION_CONTAINER_NAME - assert sut.qualified_name == APPLICATION_CONTAINER_QUALIFIED_NAME diff --git a/tests/unit/model/application_test.py b/tests/unit/model/application_test.py new file mode 100644 index 000000000..b42ce8f6a --- /dev/null +++ b/tests/unit/model/application_test.py @@ -0,0 +1,73 @@ +import pytest + +from pyatlan.model.assets import Application +from tests.unit.model.constants import ( + APPLICATION_NAME, + APPLICATION_QUALIFIED_NAME, + APP_CONNECTION_QUALIFIED_NAME, + APP_CONNECTOR_TYPE, +) + + +@pytest.mark.parametrize( + "name, connection_qualified_name, message", + [ + (None, "connection/name", "name is required"), + (APPLICATION_NAME, None, "connection_qualified_name is required"), + ], +) +def test_create_with_missing_parameters_raise_value_error( + name: str, connection_qualified_name: str, message: str +): + with pytest.raises(ValueError, match=message): + Application.creator( + name=name, connection_qualified_name=connection_qualified_name + ) + + +def test_create(): + sut = Application.creator( + name=APPLICATION_NAME, + connection_qualified_name=APP_CONNECTION_QUALIFIED_NAME, + ) + + assert sut.name == APPLICATION_NAME + assert sut.connection_qualified_name == APP_CONNECTION_QUALIFIED_NAME + assert sut.qualified_name == APPLICATION_QUALIFIED_NAME + assert sut.connector_name == APP_CONNECTOR_TYPE + + +@pytest.mark.parametrize( + "qualified_name, name, message", + [ + (None, APPLICATION_QUALIFIED_NAME, "qualified_name is required"), + (APPLICATION_NAME, None, "name is required"), + ], +) +def test_create_for_modification_with_invalid_parameter_raises_value_error( + qualified_name: str, name: str, message: str +): + with pytest.raises(ValueError, match=message): + Application.create_for_modification( + qualified_name=qualified_name, name=name + ) + + +def test_create_for_modification(): + sut = Application.create_for_modification( + qualified_name=APPLICATION_QUALIFIED_NAME, + name=APPLICATION_NAME, + ) + + assert sut.qualified_name == APPLICATION_QUALIFIED_NAME + assert sut.name == APPLICATION_NAME + + +def test_trim_to_required(): + sut = Application.create_for_modification( + name=APPLICATION_NAME, + qualified_name=APPLICATION_QUALIFIED_NAME, + ).trim_to_required() + + assert sut.name == APPLICATION_NAME + assert sut.qualified_name == APPLICATION_QUALIFIED_NAME diff --git a/tests/unit/model/constants.py b/tests/unit/model/constants.py index 23f6fcab5..27c30a189 100644 --- a/tests/unit/model/constants.py +++ b/tests/unit/model/constants.py @@ -68,11 +68,11 @@ f"{API_CONNECTION_QUALIFIED_NAME}/{API_QUERY_NAME}" ) API_FIELD_REFERENCE_OBJECT_QN = f"{API_CONNECTION_QUALIFIED_NAME}/{API_OBJECT_REF_NAME}" -APPLICATION_CONTAINER_NAME = "application-container" -APPLICATION_CONNECTOR_TYPE = "application" -APPLICATION_CONNECTION_QUALIFIED_NAME = "default/application/123456789" -APPLICATION_CONTAINER_QUALIFIED_NAME = ( - "default/application/123456789/application-container" +APPLICATION_NAME = "application" +APP_CONNECTOR_TYPE = "app" +APP_CONNECTION_QUALIFIED_NAME = "default/app/123456789" +APPLICATION_QUALIFIED_NAME = ( + "default/app/123456789/application" ) GCS_BUCKET_NAME = "mybucket" GCS_CONNECTION_QUALIFIED_NAME = "default/gcs/123456789" diff --git a/tests/unit/test_model.py b/tests/unit/test_model.py index 366395e09..b523dc0f1 100644 --- a/tests/unit/test_model.py +++ b/tests/unit/test_model.py @@ -27,7 +27,7 @@ APIPath, APIQuery, APISpec, - ApplicationContainer, + Application, Asset, AtlasGlossary, AtlasGlossaryCategory, @@ -438,7 +438,7 @@ "Optional[APIObject]": APIObject(), "Optional[APIQuery]": APIQuery(), "Optional[List[APIField]]": [APIField()], - "Optional[ApplicationContainer]": [ApplicationContainer], + "Optional[Application]": [Application], "Optional[Schema]": Schema(), "Optional[List[DbtModel]]": [DbtModel()], "Optional[List[DbtSource]]": [DbtSource()], From 0ad1a6c715e621818e3dbfeeab74c61fd07b0694 Mon Sep 17 00:00:00 2001 From: prateekrai-atlan Date: Thu, 21 Nov 2024 16:34:53 +0530 Subject: [PATCH 02/19] qa fix --- pyatlan/model/assets/a_d_l_s.py | 25 ----------------- .../core/cosmos_mongo_d_b_collection.py | 28 ------------------- .../assets/core/cosmos_mongo_d_b_database.py | 28 ------------------- .../model/assets/core/mongo_d_b_collection.py | 28 ------------------- .../model/assets/core/mongo_d_b_database.py | 28 ------------------- pyatlan/model/constants.py | 2 +- tests/integration/app_asset_test.py | 14 +++------- 7 files changed, 5 insertions(+), 148 deletions(-) diff --git a/pyatlan/model/assets/a_d_l_s.py b/pyatlan/model/assets/a_d_l_s.py index 72354fda4..23ee2a72c 100644 --- a/pyatlan/model/assets/a_d_l_s.py +++ b/pyatlan/model/assets/a_d_l_s.py @@ -38,12 +38,6 @@ def __setattr__(self, name, value): """ Unique name of the account for this ADLS asset. """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ AZURE_RESOURCE_ID: ClassVar[KeywordTextField] = KeywordTextField( "azureResourceId", "azureResourceId", "azureResourceId.text" ) @@ -69,7 +63,6 @@ def __setattr__(self, name, value): _convenience_properties: ClassVar[List[str]] = [ "adls_account_qualified_name", - "asset_application_qualified_name", "azure_resource_id", "azure_location", "adls_account_secondary_location", @@ -90,24 +83,6 @@ def adls_account_qualified_name(self, adls_account_qualified_name: Optional[str] self.attributes = self.Attributes() self.attributes.adls_account_qualified_name = adls_account_qualified_name - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def azure_resource_id(self) -> Optional[str]: return None if self.attributes is None else self.attributes.azure_resource_id diff --git a/pyatlan/model/assets/core/cosmos_mongo_d_b_collection.py b/pyatlan/model/assets/core/cosmos_mongo_d_b_collection.py index 15d505679..142fb1c5a 100644 --- a/pyatlan/model/assets/core/cosmos_mongo_d_b_collection.py +++ b/pyatlan/model/assets/core/cosmos_mongo_d_b_collection.py @@ -54,12 +54,6 @@ def __setattr__(self, name, value): """ Represents attributes for describing the key schema for the table and indexes. """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ MONGO_DB_COLLECTION_SUBTYPE: ClassVar[KeywordTextField] = KeywordTextField( "mongoDBCollectionSubtype", "mongoDBCollectionSubtype", @@ -413,7 +407,6 @@ def __setattr__(self, name, value): _convenience_properties: ClassVar[List[str]] = [ "cosmos_mongo_d_b_database_qualified_name", "no_s_q_l_schema_definition", - "asset_application_qualified_name", "mongo_d_b_collection_subtype", "mongo_d_b_collection_is_capped", "mongo_d_b_collection_time_field", @@ -513,24 +506,6 @@ def no_s_q_l_schema_definition(self, no_s_q_l_schema_definition: Optional[str]): self.attributes = self.Attributes() self.attributes.no_s_q_l_schema_definition = no_s_q_l_schema_definition - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def mongo_d_b_collection_subtype(self) -> Optional[str]: return ( @@ -1326,9 +1301,6 @@ class Attributes(CosmosMongoDB.Attributes): default=None, description="" ) no_s_q_l_schema_definition: Optional[str] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) mongo_d_b_collection_subtype: Optional[str] = Field( default=None, description="" ) diff --git a/pyatlan/model/assets/core/cosmos_mongo_d_b_database.py b/pyatlan/model/assets/core/cosmos_mongo_d_b_database.py index 9745893a1..b378ae861 100644 --- a/pyatlan/model/assets/core/cosmos_mongo_d_b_database.py +++ b/pyatlan/model/assets/core/cosmos_mongo_d_b_database.py @@ -53,12 +53,6 @@ def __setattr__(self, name, value): """ Represents attributes for describing the key schema for the table and indexes. """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ MONGO_DB_DATABASE_COLLECTION_COUNT: ClassVar[NumericField] = NumericField( "mongoDBDatabaseCollectionCount", "mongoDBDatabaseCollectionCount" ) @@ -206,7 +200,6 @@ def __setattr__(self, name, value): _convenience_properties: ClassVar[List[str]] = [ "cosmos_mongo_d_b_account_qualified_name", "no_s_q_l_schema_definition", - "asset_application_qualified_name", "mongo_d_b_database_collection_count", "schema_count", "query_count", @@ -268,24 +261,6 @@ def no_s_q_l_schema_definition(self, no_s_q_l_schema_definition: Optional[str]): self.attributes = self.Attributes() self.attributes.no_s_q_l_schema_definition = no_s_q_l_schema_definition - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def mongo_d_b_database_collection_count(self) -> Optional[int]: return ( @@ -601,9 +576,6 @@ class Attributes(CosmosMongoDB.Attributes): default=None, description="" ) no_s_q_l_schema_definition: Optional[str] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) mongo_d_b_database_collection_count: Optional[int] = Field( default=None, description="" ) diff --git a/pyatlan/model/assets/core/mongo_d_b_collection.py b/pyatlan/model/assets/core/mongo_d_b_collection.py index 7d318b8d3..109567078 100644 --- a/pyatlan/model/assets/core/mongo_d_b_collection.py +++ b/pyatlan/model/assets/core/mongo_d_b_collection.py @@ -332,12 +332,6 @@ def __setattr__(self, name, value): """ Time (epoch) at which this asset was last profiled, in milliseconds. """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ NO_SQL_SCHEMA_DEFINITION: ClassVar[TextField] = TextField( "noSQLSchemaDefinition", "noSQLSchemaDefinition" ) @@ -403,7 +397,6 @@ def __setattr__(self, name, value): "calculation_view_qualified_name", "is_profiled", "last_profiled_at", - "asset_application_qualified_name", "no_s_q_l_schema_definition", "mongo_d_b_database", ] @@ -1062,24 +1055,6 @@ def last_profiled_at(self, last_profiled_at: Optional[datetime]): self.attributes = self.Attributes() self.attributes.last_profiled_at = last_profiled_at - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def no_s_q_l_schema_definition(self) -> Optional[str]: return ( @@ -1187,9 +1162,6 @@ class Attributes(Table.Attributes): ) is_profiled: Optional[bool] = Field(default=None, description="") last_profiled_at: Optional[datetime] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) no_s_q_l_schema_definition: Optional[str] = Field(default=None, description="") mongo_d_b_database: Optional[MongoDBDatabase] = Field( default=None, description="" diff --git a/pyatlan/model/assets/core/mongo_d_b_database.py b/pyatlan/model/assets/core/mongo_d_b_database.py index 31de73545..6a9af1da1 100644 --- a/pyatlan/model/assets/core/mongo_d_b_database.py +++ b/pyatlan/model/assets/core/mongo_d_b_database.py @@ -139,12 +139,6 @@ def __setattr__(self, name, value): """ Time (epoch) at which this asset was last profiled, in milliseconds. """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ NO_SQL_SCHEMA_DEFINITION: ClassVar[TextField] = TextField( "noSQLSchemaDefinition", "noSQLSchemaDefinition" ) @@ -176,7 +170,6 @@ def __setattr__(self, name, value): "calculation_view_qualified_name", "is_profiled", "last_profiled_at", - "asset_application_qualified_name", "no_s_q_l_schema_definition", "mongo_d_b_collections", ] @@ -385,24 +378,6 @@ def last_profiled_at(self, last_profiled_at: Optional[datetime]): self.attributes = self.Attributes() self.attributes.last_profiled_at = last_profiled_at - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def no_s_q_l_schema_definition(self) -> Optional[str]: return ( @@ -454,9 +429,6 @@ class Attributes(Database.Attributes): ) is_profiled: Optional[bool] = Field(default=None, description="") last_profiled_at: Optional[datetime] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) no_s_q_l_schema_definition: Optional[str] = Field(default=None, description="") mongo_d_b_collections: Optional[List[MongoDBCollection]] = Field( default=None, description="" diff --git a/pyatlan/model/constants.py b/pyatlan/model/constants.py index c56cb2184..2bfdc19ab 100644 --- a/pyatlan/model/constants.py +++ b/pyatlan/model/constants.py @@ -25,7 +25,7 @@ "APIPath", "APIQuery", "APISpec", - "ApplicationContainer", + "Application", "Collection", "Query", "BIProcess", diff --git a/tests/integration/app_asset_test.py b/tests/integration/app_asset_test.py index 4a1d250fc..4031ca458 100644 --- a/tests/integration/app_asset_test.py +++ b/tests/integration/app_asset_test.py @@ -64,7 +64,7 @@ def test_application( assert application.qualified_name assert application.name == APPLICATION_NAME assert application.connection_qualified_name == connection.qualified_name - assert application.connector_name == AtlanConnectorType.APPLICATION.value + assert application.connector_name == AtlanConnectorType.APP.value # here @@ -108,18 +108,14 @@ def test_retrieve_application( connection: Connection, application: Application, ): - b = client.asset.get_by_guid( - application.guid, asset_type=Application - ) + b = client.asset.get_by_guid(application.guid, asset_type=Application) assert b assert not b.is_incomplete assert b.guid == application.guid assert b.qualified_name == application.qualified_name assert b.name == application.name assert b.connector_name == application.connector_name - assert ( - b.connection_qualified_name == application.connection_qualified_name - ) + assert b.connection_qualified_name == application.connection_qualified_name assert b.certificate_status == CERTIFICATE_STATUS assert b.certificate_status_message == CERTIFICATE_MESSAGE @@ -180,9 +176,7 @@ def test_read_deleted_application( connection: Connection, application: Application, ): - deleted = client.asset.get_by_guid( - application.guid, asset_type=Application - ) + deleted = client.asset.get_by_guid(application.guid, asset_type=Application) assert deleted assert deleted.guid == application.guid assert deleted.qualified_name == application.qualified_name From ded2e75710c491d0e52a54da08a3e50babf91388 Mon Sep 17 00:00:00 2001 From: prateekrai-atlan Date: Thu, 21 Nov 2024 16:38:00 +0530 Subject: [PATCH 03/19] formatter fix --- tests/unit/model/application_test.py | 4 +--- tests/unit/model/constants.py | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/tests/unit/model/application_test.py b/tests/unit/model/application_test.py index b42ce8f6a..bf1aaa817 100644 --- a/tests/unit/model/application_test.py +++ b/tests/unit/model/application_test.py @@ -48,9 +48,7 @@ def test_create_for_modification_with_invalid_parameter_raises_value_error( qualified_name: str, name: str, message: str ): with pytest.raises(ValueError, match=message): - Application.create_for_modification( - qualified_name=qualified_name, name=name - ) + Application.create_for_modification(qualified_name=qualified_name, name=name) def test_create_for_modification(): diff --git a/tests/unit/model/constants.py b/tests/unit/model/constants.py index 27c30a189..f2c9cdad7 100644 --- a/tests/unit/model/constants.py +++ b/tests/unit/model/constants.py @@ -71,9 +71,7 @@ APPLICATION_NAME = "application" APP_CONNECTOR_TYPE = "app" APP_CONNECTION_QUALIFIED_NAME = "default/app/123456789" -APPLICATION_QUALIFIED_NAME = ( - "default/app/123456789/application" -) +APPLICATION_QUALIFIED_NAME = "default/app/123456789/application" GCS_BUCKET_NAME = "mybucket" GCS_CONNECTION_QUALIFIED_NAME = "default/gcs/123456789" GCS_QUALIFIED_NAME = f"{GCS_CONNECTION_QUALIFIED_NAME}/{GCS_BUCKET_NAME}" From 10f651ed334cea168acc92dba01983adc09dcae2 Mon Sep 17 00:00:00 2001 From: vaibhavatlan Date: Thu, 21 Nov 2024 15:38:17 +0530 Subject: [PATCH 04/19] FT-778: Endpoint code added --- pyatlan/client/constants.py | 6 ++++++ pyatlan/client/credential.py | 30 +++++++++++++++++++++++++++++- pyatlan/model/credential.py | 12 +++++++++++- 3 files changed, 46 insertions(+), 2 deletions(-) diff --git a/pyatlan/client/constants.py b/pyatlan/client/constants.py index 5edd57b46..b62c61d84 100644 --- a/pyatlan/client/constants.py +++ b/pyatlan/client/constants.py @@ -488,6 +488,12 @@ HTTPStatus.OK, endpoint=EndPoint.HERACLES, ) +GET_ALL_CREDENTIALS = API( + CREDENTIALS_API, + HTTPMethod.GET, + HTTPStatus.OK, + endpoint=EndPoint.HERACLES +) UPDATE_CREDENTIAL_BY_GUID = API( CREDENTIALS_API + "/{credential_guid}", HTTPMethod.POST, diff --git a/pyatlan/client/credential.py b/pyatlan/client/credential.py index 6b81fe0e7..ec91c4466 100644 --- a/pyatlan/client/credential.py +++ b/pyatlan/client/credential.py @@ -4,12 +4,14 @@ from pyatlan.client.constants import ( GET_CREDENTIAL_BY_GUID, TEST_CREDENTIAL, - UPDATE_CREDENTIAL_BY_GUID, + GET_ALL_CREDENTIALS, + UPDATE_CREDENTIAL_BY_GUID ) from pyatlan.errors import ErrorCode from pyatlan.model.credential import ( Credential, CredentialResponse, + CredentialResponseList, CredentialTestResponse, ) @@ -46,6 +48,32 @@ def get(self, guid: str) -> CredentialResponse: if not isinstance(raw_json, dict): return raw_json return CredentialResponse(**raw_json) + + @validate_arguments + def get_all(self, filter: dict, limit: int = None, offset: int = None) -> CredentialResponseList: + """ + Retrieves all credentials based on the provided filter and optional pagination parameters. + + :param filter: A dictionary specifying the filter criteria (required). + :param limit: Maximum number of credentials to retrieve (optional). + :param offset: Number of credentials to skip before starting retrieval (optional). + :returns: A CredentialResponseList instance. + :raises: AtlanError on any error during API invocation. + """ + params = {"filter": filter} + if limit is not None: + params["limit"] = limit + if offset is not None: + params["offset"] = offset + + raw_json = self._client._call_api(GET_ALL_CREDENTIALS, query_params=params) + + if not isinstance(raw_json, dict) or "records" not in raw_json: + raise ErrorCode.INVALID_RESPONSE.exception_with_parameters( + "Expected a dictionary containing 'records'" + ) + + return CredentialResponseList(**raw_json) @validate_arguments def test(self, credential: Credential) -> CredentialTestResponse: diff --git a/pyatlan/model/credential.py b/pyatlan/model/credential.py index cb677d8e3..ef1e013fa 100644 --- a/pyatlan/model/credential.py +++ b/pyatlan/model/credential.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional +from typing import Any, Dict, Optional,List from pydantic.v1 import Field @@ -94,6 +94,16 @@ def to_credential(self) -> Credential: ) +class CredentialResponseList(AtlanObject): + """ + Model representing a response containing a list of CredentialResponse objects. + + Attributes: + records (List[CredentialResponse]): The list of credential records returned. + """ + + records: List[CredentialResponse] = Field(..., description="The list of credential records returned.") + class CredentialTestResponse(AtlanObject): code: Optional[int] error: Optional[str] From 4235b4eae27f916352be9f527ea896bf15677b2a Mon Sep 17 00:00:00 2001 From: Vaibhav Chopra Date: Thu, 21 Nov 2024 17:53:25 +0530 Subject: [PATCH 05/19] Update pyatlan/client/credential.py Co-authored-by: Aryaman <56113566+Aryamanz29@users.noreply.github.com> --- pyatlan/client/credential.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyatlan/client/credential.py b/pyatlan/client/credential.py index ec91c4466..2c7e3104e 100644 --- a/pyatlan/client/credential.py +++ b/pyatlan/client/credential.py @@ -50,7 +50,7 @@ def get(self, guid: str) -> CredentialResponse: return CredentialResponse(**raw_json) @validate_arguments - def get_all(self, filter: dict, limit: int = None, offset: int = None) -> CredentialResponseList: + def get_all(self, filter: Dict[str, Any], limit: int = None, offset: int = None) -> CredentialResponseList: """ Retrieves all credentials based on the provided filter and optional pagination parameters. From 74cca36c93f704387046179c559bb7c1b30a54e3 Mon Sep 17 00:00:00 2001 From: Vaibhav Chopra Date: Thu, 21 Nov 2024 17:54:04 +0530 Subject: [PATCH 06/19] Update pyatlan/client/credential.py Co-authored-by: Aryaman <56113566+Aryamanz29@users.noreply.github.com> --- pyatlan/client/credential.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pyatlan/client/credential.py b/pyatlan/client/credential.py index 2c7e3104e..8dbe04a20 100644 --- a/pyatlan/client/credential.py +++ b/pyatlan/client/credential.py @@ -54,10 +54,10 @@ def get_all(self, filter: Dict[str, Any], limit: int = None, offset: int = None) """ Retrieves all credentials based on the provided filter and optional pagination parameters. - :param filter: A dictionary specifying the filter criteria (required). - :param limit: Maximum number of credentials to retrieve (optional). - :param offset: Number of credentials to skip before starting retrieval (optional). - :returns: A CredentialResponseList instance. + :param filter: dictionary specifying the filter criteria. + :param limit: (optional) maximum number of credentials to retrieve. + :param offset: (optional) number of credentials to skip before starting retrieval. + :returns: CredentialResponseList instance. :raises: AtlanError on any error during API invocation. """ params = {"filter": filter} From 328258df30babe30d63a65460135695720464290 Mon Sep 17 00:00:00 2001 From: Vaibhav Chopra Date: Thu, 21 Nov 2024 18:08:44 +0530 Subject: [PATCH 07/19] Update pyatlan/client/credential.py Co-authored-by: Aryaman <56113566+Aryamanz29@users.noreply.github.com> --- pyatlan/client/credential.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyatlan/client/credential.py b/pyatlan/client/credential.py index 8dbe04a20..eb01d34b2 100644 --- a/pyatlan/client/credential.py +++ b/pyatlan/client/credential.py @@ -60,7 +60,8 @@ def get_all(self, filter: Dict[str, Any], limit: int = None, offset: int = None) :returns: CredentialResponseList instance. :raises: AtlanError on any error during API invocation. """ - params = {"filter": filter} + if filter is not None: + params["filter"] = filter if limit is not None: params["limit"] = limit if offset is not None: From 6f9cc40967471e46e51b1f5412585fb2e7def901 Mon Sep 17 00:00:00 2001 From: Vaibhav Chopra Date: Thu, 21 Nov 2024 18:09:04 +0530 Subject: [PATCH 08/19] Update pyatlan/model/credential.py Co-authored-by: Aryaman <56113566+Aryamanz29@users.noreply.github.com> --- pyatlan/model/credential.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyatlan/model/credential.py b/pyatlan/model/credential.py index ef1e013fa..f4c599a98 100644 --- a/pyatlan/model/credential.py +++ b/pyatlan/model/credential.py @@ -102,7 +102,7 @@ class CredentialResponseList(AtlanObject): records (List[CredentialResponse]): The list of credential records returned. """ - records: List[CredentialResponse] = Field(..., description="The list of credential records returned.") + records: Optional[List[CredentialResponse]] = Field(default=None, description="list of credential records returned.") class CredentialTestResponse(AtlanObject): code: Optional[int] From bc6e271444189b199dcde088ed902c597f3677f4 Mon Sep 17 00:00:00 2001 From: Vaibhav Chopra Date: Thu, 21 Nov 2024 18:09:13 +0530 Subject: [PATCH 09/19] Update pyatlan/model/credential.py Co-authored-by: Aryaman <56113566+Aryamanz29@users.noreply.github.com> --- pyatlan/model/credential.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/pyatlan/model/credential.py b/pyatlan/model/credential.py index f4c599a98..f78bf1677 100644 --- a/pyatlan/model/credential.py +++ b/pyatlan/model/credential.py @@ -97,9 +97,6 @@ def to_credential(self) -> Credential: class CredentialResponseList(AtlanObject): """ Model representing a response containing a list of CredentialResponse objects. - - Attributes: - records (List[CredentialResponse]): The list of credential records returned. """ records: Optional[List[CredentialResponse]] = Field(default=None, description="list of credential records returned.") From 97d636a41b1b223f35e964455443262a7cedd178 Mon Sep 17 00:00:00 2001 From: vaibhavatlan Date: Fri, 22 Nov 2024 10:33:42 +0530 Subject: [PATCH 10/19] FT-778: Added typing lib and defined params as dict --- pyatlan/client/credential.py | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/pyatlan/client/credential.py b/pyatlan/client/credential.py index eb01d34b2..3089a0b49 100644 --- a/pyatlan/client/credential.py +++ b/pyatlan/client/credential.py @@ -1,11 +1,13 @@ +from typing import Any, Dict, Optional + from pydantic.v1 import validate_arguments from pyatlan.client.common import ApiCaller from pyatlan.client.constants import ( + GET_ALL_CREDENTIALS, GET_CREDENTIAL_BY_GUID, TEST_CREDENTIAL, - GET_ALL_CREDENTIALS, - UPDATE_CREDENTIAL_BY_GUID + UPDATE_CREDENTIAL_BY_GUID, ) from pyatlan.errors import ErrorCode from pyatlan.model.credential import ( @@ -48,19 +50,25 @@ def get(self, guid: str) -> CredentialResponse: if not isinstance(raw_json, dict): return raw_json return CredentialResponse(**raw_json) - + @validate_arguments - def get_all(self, filter: Dict[str, Any], limit: int = None, offset: int = None) -> CredentialResponseList: + def get_all( + self, + filter: Optional[Dict[str, Any]] = None, + limit: Optional[int] = None, + offset: Optional[int] = None, + ) -> CredentialResponseList: """ Retrieves all credentials based on the provided filter and optional pagination parameters. - :param filter: dictionary specifying the filter criteria. + :param filter: dictionary specifying the filter criteria. :param limit: (optional) maximum number of credentials to retrieve. :param offset: (optional) number of credentials to skip before starting retrieval. :returns: CredentialResponseList instance. :raises: AtlanError on any error during API invocation. - """ - if filter is not None: + """ + params = {} + if filter is not None: params["filter"] = filter if limit is not None: params["limit"] = limit From 15d7d2cbd73dd43601dd0304c62b3b8df5a36160 Mon Sep 17 00:00:00 2001 From: vaibhavatlan Date: Fri, 22 Nov 2024 10:52:30 +0530 Subject: [PATCH 11/19] FT-778: Added typing lib and defined params --- pyatlan/client/constants.py | 5 +---- pyatlan/client/credential.py | 2 +- pyatlan/model/credential.py | 9 ++++++--- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/pyatlan/client/constants.py b/pyatlan/client/constants.py index b62c61d84..e1e2009b1 100644 --- a/pyatlan/client/constants.py +++ b/pyatlan/client/constants.py @@ -489,10 +489,7 @@ endpoint=EndPoint.HERACLES, ) GET_ALL_CREDENTIALS = API( - CREDENTIALS_API, - HTTPMethod.GET, - HTTPStatus.OK, - endpoint=EndPoint.HERACLES + CREDENTIALS_API, HTTPMethod.GET, HTTPStatus.OK, endpoint=EndPoint.HERACLES ) UPDATE_CREDENTIAL_BY_GUID = API( CREDENTIALS_API + "/{credential_guid}", diff --git a/pyatlan/client/credential.py b/pyatlan/client/credential.py index 3089a0b49..71ca5a437 100644 --- a/pyatlan/client/credential.py +++ b/pyatlan/client/credential.py @@ -67,7 +67,7 @@ def get_all( :returns: CredentialResponseList instance. :raises: AtlanError on any error during API invocation. """ - params = {} + params: Dict[str, Any] = {} if filter is not None: params["filter"] = filter if limit is not None: diff --git a/pyatlan/model/credential.py b/pyatlan/model/credential.py index f78bf1677..e2d50ecf5 100644 --- a/pyatlan/model/credential.py +++ b/pyatlan/model/credential.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional,List +from typing import Any, Dict, Optional, List from pydantic.v1 import Field @@ -98,8 +98,11 @@ class CredentialResponseList(AtlanObject): """ Model representing a response containing a list of CredentialResponse objects. """ - - records: Optional[List[CredentialResponse]] = Field(default=None, description="list of credential records returned.") + + records: Optional[List[CredentialResponse]] = Field( + default=None, description="list of credential records returned." + ) + class CredentialTestResponse(AtlanObject): code: Optional[int] From d489a9ee06da9b193905088e94cffa51ab4064e1 Mon Sep 17 00:00:00 2001 From: vaibhavatlan Date: Fri, 22 Nov 2024 11:37:44 +0530 Subject: [PATCH 12/19] Corrected the error code. --- pyatlan/client/credential.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/pyatlan/client/credential.py b/pyatlan/client/credential.py index 71ca5a437..b307f3ce4 100644 --- a/pyatlan/client/credential.py +++ b/pyatlan/client/credential.py @@ -78,9 +78,7 @@ def get_all( raw_json = self._client._call_api(GET_ALL_CREDENTIALS, query_params=params) if not isinstance(raw_json, dict) or "records" not in raw_json: - raise ErrorCode.INVALID_RESPONSE.exception_with_parameters( - "Expected a dictionary containing 'records'" - ) + raise ErrorCode.JSON_ERROR.exception_with_parameters("No records found in response", 400, "API response did not contain the expected 'records' key") return CredentialResponseList(**raw_json) From dc05a1f6703aceb4a6ca229aa5d2ec1005c80a59 Mon Sep 17 00:00:00 2001 From: vaibhavatlan Date: Fri, 22 Nov 2024 11:40:11 +0530 Subject: [PATCH 13/19] Corrected the error code and did qa check --- pyatlan/client/credential.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pyatlan/client/credential.py b/pyatlan/client/credential.py index b307f3ce4..c39eb98bf 100644 --- a/pyatlan/client/credential.py +++ b/pyatlan/client/credential.py @@ -78,7 +78,11 @@ def get_all( raw_json = self._client._call_api(GET_ALL_CREDENTIALS, query_params=params) if not isinstance(raw_json, dict) or "records" not in raw_json: - raise ErrorCode.JSON_ERROR.exception_with_parameters("No records found in response", 400, "API response did not contain the expected 'records' key") + raise ErrorCode.JSON_ERROR.exception_with_parameters( + "No records found in response", + 400, + "API response did not contain the expected 'records' key", + ) return CredentialResponseList(**raw_json) From 847247cc3050de61994836cec45b56e33dd4d5a0 Mon Sep 17 00:00:00 2001 From: vaibhavatlan Date: Fri, 22 Nov 2024 16:40:44 +0530 Subject: [PATCH 14/19] Made all fields in the CredentialResponse model optional to improve flexibility, as some fields were observed to have null values. Updated the filter query parameter to use json.dumps to ensure it is properly stringified as required by the API. Utilized format_path_with_params for consistent query parameter alignment across API request. --- pyatlan/client/credential.py | 9 ++++++--- pyatlan/model/credential.py | 28 ++++++++++++++-------------- 2 files changed, 20 insertions(+), 17 deletions(-) diff --git a/pyatlan/client/credential.py b/pyatlan/client/credential.py index c39eb98bf..b0a520d6b 100644 --- a/pyatlan/client/credential.py +++ b/pyatlan/client/credential.py @@ -1,3 +1,4 @@ +from json import dumps from typing import Any, Dict, Optional from pydantic.v1 import validate_arguments @@ -61,7 +62,7 @@ def get_all( """ Retrieves all credentials based on the provided filter and optional pagination parameters. - :param filter: dictionary specifying the filter criteria. + :param filter: (optional) dictionary specifying the filter criteria. :param limit: (optional) maximum number of credentials to retrieve. :param offset: (optional) number of credentials to skip before starting retrieval. :returns: CredentialResponseList instance. @@ -69,13 +70,15 @@ def get_all( """ params: Dict[str, Any] = {} if filter is not None: - params["filter"] = filter + params["filter"] = dumps(filter) if limit is not None: params["limit"] = limit if offset is not None: params["offset"] = offset - raw_json = self._client._call_api(GET_ALL_CREDENTIALS, query_params=params) + raw_json = self._client._call_api( + GET_ALL_CREDENTIALS.format_path_with_params(), query_params=params + ) if not isinstance(raw_json, dict) or "records" not in raw_json: raise ErrorCode.JSON_ERROR.exception_with_parameters( diff --git a/pyatlan/model/credential.py b/pyatlan/model/credential.py index e2d50ecf5..0e329c136 100644 --- a/pyatlan/model/credential.py +++ b/pyatlan/model/credential.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional, List +from typing import Any, Dict, List, Optional from pydantic.v1 import Field @@ -54,20 +54,20 @@ class Credential(AtlanObject): class CredentialResponse(AtlanObject): - id: str - version: str - is_active: bool - created_at: int - updated_at: int - created_by: str - tenant_id: str - name: str + id: Optional[str] + version: Optional[str] + is_active: Optional[bool] + created_at: Optional[int] + updated_at: Optional[int] + created_by: Optional[str] + tenant_id: Optional[str] + name: Optional[str] description: Optional[str] - connector_config_name: str - connector: str - connector_type: str - auth_type: str - host: str + connector_config_name: Optional[str] + connector: Optional[str] + connector_type: Optional[str] + auth_type: Optional[str] + host: Optional[str] port: Optional[int] metadata: Optional[Dict[str, Any]] level: Optional[Dict[str, Any]] From 8624f59bd3480a357e9c8cc6742b95ca5f8668d7 Mon Sep 17 00:00:00 2001 From: vaibhavatlan Date: Fri, 22 Nov 2024 17:48:55 +0530 Subject: [PATCH 15/19] Add Unit Test for get_all() --- tests/unit/test_credential_client.py | 95 ++++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) diff --git a/tests/unit/test_credential_client.py b/tests/unit/test_credential_client.py index 2cfacf59e..00961cf16 100644 --- a/tests/unit/test_credential_client.py +++ b/tests/unit/test_credential_client.py @@ -11,6 +11,7 @@ from pyatlan.model.credential import ( Credential, CredentialResponse, + CredentialResponseList, CredentialTestResponse, ) @@ -189,3 +190,97 @@ def test_cred_test_update_when_given_cred( assert isinstance(cred_response, CredentialResponse) cred = cred_response.to_credential() _assert_cred_response(cred, credential_response) + + +@pytest.mark.parametrize( + "test_filter, test_limit, test_offset, test_response", + [ + (None, None, None, {"records": [{"id": "cred1"}, {"id": "cred2"}]}), + ({"name": "test"}, 5, 0, {"records": [{"id": "cred3"}]}), + ({"invalid": "field"}, 10, 0, {"records": []}), + ], +) +def test_cred_get_all_success( + test_filter, test_limit, test_offset, test_response, mock_api_caller +): + mock_api_caller._call_api.return_value = test_response + client = CredentialClient(mock_api_caller) + + result = client.get_all(filter=test_filter, limit=test_limit, offset=test_offset) + + assert isinstance(result, CredentialResponseList) + assert len(result.records) == len(test_response["records"]) + for record, expected in zip(result.records, test_response["records"]): + assert record.id == expected["id"] + + +def test_cred_get_all_empty_response(mock_api_caller): + mock_api_caller._call_api.return_value = {"records": []} + client = CredentialClient(mock_api_caller) + + result = client.get_all() + + assert isinstance(result, CredentialResponseList) + assert len(result.records) == 0 + + +def test_cred_get_all_invalid_response(mock_api_caller): + mock_api_caller._call_api.return_value = {} + client = CredentialClient(mock_api_caller) + + with pytest.raises(Exception, match="No records found in response"): + client.get_all() + + +@pytest.mark.parametrize( + "test_filter, test_limit, test_offset", + [ + ("invalid_filter", None, None), + (None, "invalid_limit", None), + (None, None, "invalid_offset"), + ], +) +def test_cred_get_all_invalid_params_raises_validation_error( + test_filter, test_limit, test_offset, client: CredentialClient +): + with pytest.raises(ValidationError): + client.get_all(filter=test_filter, limit=test_limit, offset=test_offset) + + +def test_cred_get_all_timeout(mock_api_caller): + mock_api_caller._call_api.side_effect = TimeoutError("Request timed out") + client = CredentialClient(mock_api_caller) + + with pytest.raises(TimeoutError, match="Request timed out"): + client.get_all() + + +def test_cred_get_all_partial_response(mock_api_caller): + mock_api_caller._call_api.return_value = { + "records": [{"id": "cred1", "name": "Test Credential"}] + } + client = CredentialClient(mock_api_caller) + + result = client.get_all() + + assert isinstance(result, CredentialResponseList) + assert result.records[0].id == "cred1" + assert result.records[0].name == "Test Credential" + assert result.records[0].host is None + + +def test_cred_get_all_invalid_filter_type(mock_api_caller): + client = CredentialClient(mock_api_caller) + + with pytest.raises(ValidationError, match="value is not a valid dict"): + client.get_all(filter="invalid_filter") + + +def test_cred_get_all_no_results(mock_api_caller): + mock_api_caller._call_api.return_value = {"records": []} + client = CredentialClient(mock_api_caller) + + result = client.get_all(filter={"name": "nonexistent"}) + + assert isinstance(result, CredentialResponseList) + assert len(result.records) == 0 From 970c4525520bdfe56d485e00425cb9403ec80551 Mon Sep 17 00:00:00 2001 From: vaibhavatlan Date: Fri, 22 Nov 2024 19:33:39 +0530 Subject: [PATCH 16/19] Add all the required Integration Tests --- tests/integration/test_workflow_client.py | 74 +++++++++++++++++++++++ 1 file changed, 74 insertions(+) diff --git a/tests/integration/test_workflow_client.py b/tests/integration/test_workflow_client.py index bb09e098a..3b35aaa8a 100644 --- a/tests/integration/test_workflow_client.py +++ b/tests/integration/test_workflow_client.py @@ -13,6 +13,7 @@ from pyatlan.model.workflow import WorkflowResponse, WorkflowSchedule from tests.integration.client import TestId, delete_asset from tests.integration.connection_test import create_connection +from pyatlan.client.atlan import AtlanClient MODULE_NAME = TestId.make_unique("WorfklowClient") WORKFLOW_TEMPLATE_REF = "workflowTemplateRef" @@ -269,3 +270,76 @@ def test_workflow_add_remove_schedule(client: AtlanClient, workflow: WorkflowRes # Now remove the scheduled run response = client.workflow.remove_schedule(workflow) _assert_remove_schedule(response, workflow) + +def test_get_all_credentials(client: AtlanClient): + + credentials = client.credentials.get_all() + assert credentials, "Expected credentials but found None" + assert credentials.records is not None, "Expected records but found None" + assert len(credentials.records) > 0, "Expected at least one record but found none" + + +def test_get_all_credentials_with_limit_and_offset(client: AtlanClient): + limit = 5 + offset = 2 + credentials = client.credentials.get_all(limit=limit, offset=offset) + assert credentials.records is not None, "Expected records but found None" + assert len(credentials.records) <= limit, ( + f"Expected at most {limit} records, got {len(credentials.records)}" + ) + + +def test_get_all_credentials_with_filter_limit_offset(client: AtlanClient): + filter_criteria = {"connectorType": "rest"} + limit = 1 + offset = 1 + credentials = client.credentials.get_all(filter=filter_criteria, limit=limit, offset=offset) + assert len(credentials.records) <= limit, "Exceeded limit in results" + for cred in credentials.records: + assert cred.connector_type == "rest" + +def test_get_all_credentials_with_multiple_filters(client: AtlanClient): + filter_criteria = { + "connectorType": "jdbc", + "isActive": True + } + + credentials = client.credentials.get_all(filter=filter_criteria) + assert credentials, "Expected credentials but found None" + assert credentials.records is not None, "Expected records but found None" + assert len(credentials.records) > 0, "Expected at least one record but found none" + + for record in credentials.records: + assert record.connector_type == "jdbc", f"Expected 'jdbc', got {record.connectorType}" + assert record.is_active, f"Expected active record, but got inactive: {record}" + +def test_get_all_credentials_with_invalid_filter_key(client: AtlanClient): + filter_criteria = { + "invalidKey": "someValue" + } + try: + credentials = client.credentials.get_all(filter=filter_criteria) + pytest.fail("Expected an error due to invalid filter key, but none occurred.") + except Exception as e: + assert "400" in str(e), f"Expected a 400 error, but got: {e}" + +def test_get_all_credentials_with_invalid_filter_value(client: AtlanClient): + + filter_criteria = { + "connectorType": 123 # Invalid type (should be a string) + } + + try: + credentials = client.credentials.get_all(filter=filter_criteria) + pytest.fail("Expected an error due to invalid filter value, but none occurred.") + except Exception as e: + assert "400" in str(e), f"Expected a 400 error, but got: {e}" + +def test_get_all_credentials_with_large_limit(client: AtlanClient): + + limit = 100 # Larger than the total number of records + credentials = client.credentials.get_all(limit=limit) + + assert credentials, "Expected credentials but found None" + assert credentials.records is not None, "Expected records but found None" + assert len(credentials.records) <= limit, f"Expected at most {limit} records, but got {len(credentials.records)}" From 69a9c732dda351c4359a998120172b5e9c7255df Mon Sep 17 00:00:00 2001 From: vaibhavatlan Date: Fri, 22 Nov 2024 19:50:36 +0530 Subject: [PATCH 17/19] Final Integration Tests --- tests/integration/test_workflow_client.py | 65 ++++++++++++----------- 1 file changed, 35 insertions(+), 30 deletions(-) diff --git a/tests/integration/test_workflow_client.py b/tests/integration/test_workflow_client.py index 3b35aaa8a..8b1a68d7f 100644 --- a/tests/integration/test_workflow_client.py +++ b/tests/integration/test_workflow_client.py @@ -13,7 +13,6 @@ from pyatlan.model.workflow import WorkflowResponse, WorkflowSchedule from tests.integration.client import TestId, delete_asset from tests.integration.connection_test import create_connection -from pyatlan.client.atlan import AtlanClient MODULE_NAME = TestId.make_unique("WorfklowClient") WORKFLOW_TEMPLATE_REF = "workflowTemplateRef" @@ -271,12 +270,14 @@ def test_workflow_add_remove_schedule(client: AtlanClient, workflow: WorkflowRes response = client.workflow.remove_schedule(workflow) _assert_remove_schedule(response, workflow) + def test_get_all_credentials(client: AtlanClient): - credentials = client.credentials.get_all() assert credentials, "Expected credentials but found None" assert credentials.records is not None, "Expected records but found None" - assert len(credentials.records) > 0, "Expected at least one record but found none" + assert ( + len(credentials.records or []) > 0 + ), "Expected at least one record but found none" def test_get_all_credentials_with_limit_and_offset(client: AtlanClient): @@ -284,62 +285,66 @@ def test_get_all_credentials_with_limit_and_offset(client: AtlanClient): offset = 2 credentials = client.credentials.get_all(limit=limit, offset=offset) assert credentials.records is not None, "Expected records but found None" - assert len(credentials.records) <= limit, ( - f"Expected at most {limit} records, got {len(credentials.records)}" - ) + assert ( + len(credentials.records or []) <= limit + ), f"Expected at most {limit} records, got {len(credentials.records or [])}" def test_get_all_credentials_with_filter_limit_offset(client: AtlanClient): filter_criteria = {"connectorType": "rest"} limit = 1 offset = 1 - credentials = client.credentials.get_all(filter=filter_criteria, limit=limit, offset=offset) - assert len(credentials.records) <= limit, "Exceeded limit in results" - for cred in credentials.records: - assert cred.connector_type == "rest" + credentials = client.credentials.get_all( + filter=filter_criteria, limit=limit, offset=offset + ) + assert len(credentials.records or []) <= limit, "Exceeded limit in results" + for cred in credentials.records or []: + assert ( + cred.connector_type == "rest" + ), f"Expected 'rest', got {cred.connector_type}" + def test_get_all_credentials_with_multiple_filters(client: AtlanClient): - filter_criteria = { - "connectorType": "jdbc", - "isActive": True - } + filter_criteria = {"connectorType": "jdbc", "isActive": True} credentials = client.credentials.get_all(filter=filter_criteria) assert credentials, "Expected credentials but found None" assert credentials.records is not None, "Expected records but found None" - assert len(credentials.records) > 0, "Expected at least one record but found none" + assert ( + len(credentials.records or []) > 0 + ), "Expected at least one record but found none" - for record in credentials.records: - assert record.connector_type == "jdbc", f"Expected 'jdbc', got {record.connectorType}" + for record in credentials.records or []: + assert ( + record.connector_type == "jdbc" + ), f"Expected 'jdbc', got {record.connector_type}" assert record.is_active, f"Expected active record, but got inactive: {record}" + def test_get_all_credentials_with_invalid_filter_key(client: AtlanClient): - filter_criteria = { - "invalidKey": "someValue" - } + filter_criteria = {"invalidKey": "someValue"} try: - credentials = client.credentials.get_all(filter=filter_criteria) + client.credentials.get_all(filter=filter_criteria) pytest.fail("Expected an error due to invalid filter key, but none occurred.") except Exception as e: assert "400" in str(e), f"Expected a 400 error, but got: {e}" -def test_get_all_credentials_with_invalid_filter_value(client: AtlanClient): - filter_criteria = { - "connectorType": 123 # Invalid type (should be a string) - } +def test_get_all_credentials_with_invalid_filter_value(client: AtlanClient): + filter_criteria = {"connector_type": 123} try: - credentials = client.credentials.get_all(filter=filter_criteria) + client.credentials.get_all(filter=filter_criteria) pytest.fail("Expected an error due to invalid filter value, but none occurred.") except Exception as e: assert "400" in str(e), f"Expected a 400 error, but got: {e}" -def test_get_all_credentials_with_large_limit(client: AtlanClient): - limit = 100 # Larger than the total number of records +def test_get_all_credentials_with_large_limit(client: AtlanClient): + limit = 100 credentials = client.credentials.get_all(limit=limit) - assert credentials, "Expected credentials but found None" assert credentials.records is not None, "Expected records but found None" - assert len(credentials.records) <= limit, f"Expected at most {limit} records, but got {len(credentials.records)}" + assert ( + len(credentials.records or []) <= limit + ), f"Expected at most {limit} records, but got {len(credentials.records or [])}" From b95e4f4a18ca08daa023ddacce7c82a0929ccc2f Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Mon, 25 Nov 2024 16:15:02 +0530 Subject: [PATCH 18/19] [change] Minor changes in docstrings and tests --- pyatlan/client/credential.py | 4 ++-- tests/integration/test_workflow_client.py | 16 +++------------- 2 files changed, 5 insertions(+), 15 deletions(-) diff --git a/pyatlan/client/credential.py b/pyatlan/client/credential.py index b0a520d6b..ca9e7e464 100644 --- a/pyatlan/client/credential.py +++ b/pyatlan/client/credential.py @@ -60,11 +60,11 @@ def get_all( offset: Optional[int] = None, ) -> CredentialResponseList: """ - Retrieves all credentials based on the provided filter and optional pagination parameters. + Retrieves all credentials. :param filter: (optional) dictionary specifying the filter criteria. :param limit: (optional) maximum number of credentials to retrieve. - :param offset: (optional) number of credentials to skip before starting retrieval. + :param offset: (optional) number of credentials to skip before starting retrieval. :returns: CredentialResponseList instance. :raises: AtlanError on any error during API invocation. """ diff --git a/tests/integration/test_workflow_client.py b/tests/integration/test_workflow_client.py index 8b1a68d7f..61f6e5e6b 100644 --- a/tests/integration/test_workflow_client.py +++ b/tests/integration/test_workflow_client.py @@ -291,7 +291,7 @@ def test_get_all_credentials_with_limit_and_offset(client: AtlanClient): def test_get_all_credentials_with_filter_limit_offset(client: AtlanClient): - filter_criteria = {"connectorType": "rest"} + filter_criteria = {"connectorType": "jdbc"} limit = 1 offset = 1 credentials = client.credentials.get_all( @@ -300,8 +300,8 @@ def test_get_all_credentials_with_filter_limit_offset(client: AtlanClient): assert len(credentials.records or []) <= limit, "Exceeded limit in results" for cred in credentials.records or []: assert ( - cred.connector_type == "rest" - ), f"Expected 'rest', got {cred.connector_type}" + cred.connector_type == "jdbc" + ), f"Expected 'jdbc', got {cred.connector_type}" def test_get_all_credentials_with_multiple_filters(client: AtlanClient): @@ -338,13 +338,3 @@ def test_get_all_credentials_with_invalid_filter_value(client: AtlanClient): pytest.fail("Expected an error due to invalid filter value, but none occurred.") except Exception as e: assert "400" in str(e), f"Expected a 400 error, but got: {e}" - - -def test_get_all_credentials_with_large_limit(client: AtlanClient): - limit = 100 - credentials = client.credentials.get_all(limit=limit) - assert credentials, "Expected credentials but found None" - assert credentials.records is not None, "Expected records but found None" - assert ( - len(credentials.records or []) <= limit - ), f"Expected at most {limit} records, but got {len(credentials.records or [])}" From 61c22db1d59ea6b647e7cbe50c55751a85014599 Mon Sep 17 00:00:00 2001 From: Aryamanz29 Date: Mon, 25 Nov 2024 17:47:16 +0530 Subject: [PATCH 19/19] [generator] Generated latest typedefs models --- docs/asset/businessprocessmodel.rst | 10 + docs/asset/businessprocessmodelentity.rst | 10 + docs/assets.rst | 6 +- pyatlan/model/assets/__init__.py | 10 +- pyatlan/model/assets/__init__.pyi | 18 +- .../model/assets/business_process_model.py | 70 + .../assets/business_process_model_entity.py | 33 + pyatlan/model/assets/core/__init__.py | 6 +- pyatlan/model/assets/core/asset.py | 20 + pyatlan/model/assets/core/catalog.py | 107 +- pyatlan/model/assets/core/custom.py | 107 -- .../databricks_unity_catalog_tag.py | 38 +- pyatlan/model/assets/core/m_c_monitor.py | 18 + pyatlan/model/assets/core/model_attribute.py | 29 + .../core/model_attribute_association.py | 57 - .../assets/core/model_entity_association.py | 182 +- pyatlan/model/assets/custom_dataset.py | 68 - pyatlan/model/assets/custom_field.py | 1541 ----------------- pyatlan/model/assets/custom_table.py | 960 ---------- pyatlan/model/assets/data_studio.py | 107 +- pyatlan/model/assets/g_c_s.py | 107 +- pyatlan/model/enums.py | 5 - pyatlan/model/structs.py | 123 +- 23 files changed, 647 insertions(+), 2985 deletions(-) create mode 100644 docs/asset/businessprocessmodel.rst create mode 100644 docs/asset/businessprocessmodelentity.rst create mode 100644 pyatlan/model/assets/business_process_model.py create mode 100644 pyatlan/model/assets/business_process_model_entity.py delete mode 100644 pyatlan/model/assets/core/custom.py rename pyatlan/model/assets/{ => core}/databricks_unity_catalog_tag.py (94%) delete mode 100644 pyatlan/model/assets/custom_dataset.py delete mode 100644 pyatlan/model/assets/custom_field.py delete mode 100644 pyatlan/model/assets/custom_table.py diff --git a/docs/asset/businessprocessmodel.rst b/docs/asset/businessprocessmodel.rst new file mode 100644 index 000000000..6f408b88c --- /dev/null +++ b/docs/asset/businessprocessmodel.rst @@ -0,0 +1,10 @@ +.. _businessprocessmodel: + +BusinessProcessModel +==================== + +.. module:: pyatlan.model.assets + :no-index: + +.. autoclass:: BusinessProcessModel + :members: diff --git a/docs/asset/businessprocessmodelentity.rst b/docs/asset/businessprocessmodelentity.rst new file mode 100644 index 000000000..3b68c5d55 --- /dev/null +++ b/docs/asset/businessprocessmodelentity.rst @@ -0,0 +1,10 @@ +.. _businessprocessmodelentity: + +BusinessProcessModelEntity +========================== + +.. module:: pyatlan.model.assets + :no-index: + +.. autoclass:: BusinessProcessModelEntity + :members: diff --git a/docs/assets.rst b/docs/assets.rst index d832cca0a..61b6b3b5f 100644 --- a/docs/assets.rst +++ b/docs/assets.rst @@ -52,6 +52,8 @@ You can interact with all of the following different kinds of assets: asset/businesspolicyexception asset/businesspolicyincident asset/businesspolicylog + asset/businessprocessmodel + asset/businessprocessmodelentity asset/calculationview asset/catalog asset/cloud @@ -83,10 +85,6 @@ You can interact with all of the following different kinds of assets: asset/cubedimension asset/cubefield asset/cubehierarchy - asset/custom - asset/customdataset - asset/customfield - asset/customtable asset/datacontract asset/datadomain asset/datamesh diff --git a/pyatlan/model/assets/__init__.py b/pyatlan/model/assets/__init__.py index b04732018..628ce28d3 100644 --- a/pyatlan/model/assets/__init__.py +++ b/pyatlan/model/assets/__init__.py @@ -26,7 +26,6 @@ "DataQuality", "BI", "Resource", - "Custom", "DataMesh", "SQL", "NoSQL", @@ -65,9 +64,10 @@ "Function", "TablePartition", "Column", + "DatabricksUnityCatalogTag", "SnowflakeStream", - "CalculationView", "Database", + "CalculationView", "Procedure", "SnowflakeTag", "CosmosMongoDB", @@ -136,6 +136,7 @@ "event_store": ["EventStore"], "insight": ["Insight"], "a_p_i": ["API"], + "business_process_model": ["BusinessProcessModel"], "google": ["Google"], "azure": ["Azure"], "a_w_s": ["AWS"], @@ -167,10 +168,6 @@ "cube_hierarchy": ["CubeHierarchy"], "cube_field": ["CubeField"], "cube_dimension": ["CubeDimension"], - "custom_field": ["CustomField"], - "custom_dataset": ["CustomDataset"], - "custom_table": ["CustomTable"], - "databricks_unity_catalog_tag": ["DatabricksUnityCatalogTag"], "kafka": ["Kafka"], "azure_service_bus": ["AzureServiceBus"], "dynamo_d_b": ["DynamoDB"], @@ -181,6 +178,7 @@ "a_p_i_object": ["APIObject"], "a_p_i_path": ["APIPath"], "a_p_i_field": ["APIField"], + "business_process_model_entity": ["BusinessProcessModelEntity"], "data_studio_asset": ["DataStudioAsset"], "s3_bucket": ["S3Bucket"], "s3_object": ["S3Object"], diff --git a/pyatlan/model/assets/__init__.pyi b/pyatlan/model/assets/__init__.pyi index 6b03e8fbe..ef8600ade 100644 --- a/pyatlan/model/assets/__init__.pyi +++ b/pyatlan/model/assets/__init__.pyi @@ -23,7 +23,6 @@ __all__ = [ "DataQuality", "BI", "Resource", - "Custom", "DataMesh", "SQL", "NoSQL", @@ -62,9 +61,10 @@ __all__ = [ "Function", "TablePartition", "Column", + "DatabricksUnityCatalogTag", "SnowflakeStream", - "CalculationView", "Database", + "CalculationView", "Procedure", "SnowflakeTag", "CosmosMongoDB", @@ -133,6 +133,7 @@ __all__ = [ "NoSQL", "Insight", "API", + "BusinessProcessModel", "Google", "Azure", "AWS", @@ -164,10 +165,6 @@ __all__ = [ "CubeHierarchy", "CubeField", "CubeDimension", - "CustomField", - "CustomDataset", - "CustomTable", - "DatabricksUnityCatalogTag", "Kafka", "AzureServiceBus", "DynamoDB", @@ -178,6 +175,7 @@ __all__ = [ "APIObject", "APIPath", "APIField", + "BusinessProcessModelEntity", "DataStudioAsset", "S3Bucket", "S3Object", @@ -321,6 +319,8 @@ from .business_policy import BusinessPolicy from .business_policy_exception import BusinessPolicyException from .business_policy_incident import BusinessPolicyIncident from .business_policy_log import BusinessPolicyLog +from .business_process_model import BusinessProcessModel +from .business_process_model_entity import BusinessProcessModelEntity from .cloud import Cloud from .cognite import Cognite from .cognite3_d_model import Cognite3DModel @@ -369,13 +369,13 @@ from .core.cosmos_mongo_d_b import CosmosMongoDB from .core.cosmos_mongo_d_b_account import CosmosMongoDBAccount from .core.cosmos_mongo_d_b_collection import CosmosMongoDBCollection from .core.cosmos_mongo_d_b_database import CosmosMongoDBDatabase -from .core.custom import Custom from .core.data_contract import DataContract from .core.data_domain import DataDomain from .core.data_mesh import DataMesh from .core.data_product import DataProduct from .core.data_quality import DataQuality from .core.database import Database +from .core.databricks_unity_catalog_tag import DatabricksUnityCatalogTag from .core.dbt import Dbt from .core.dbt_metric import DbtMetric from .core.dbt_model import DbtModel @@ -451,13 +451,9 @@ from .cube import Cube from .cube_dimension import CubeDimension from .cube_field import CubeField from .cube_hierarchy import CubeHierarchy -from .custom_dataset import CustomDataset -from .custom_field import CustomField -from .custom_table import CustomTable from .data_set import DataSet from .data_studio import DataStudio from .data_studio_asset import DataStudioAsset -from .databricks_unity_catalog_tag import DatabricksUnityCatalogTag from .dbt_column_process import DbtColumnProcess from .dbt_process import DbtProcess from .dbt_tag import DbtTag diff --git a/pyatlan/model/assets/business_process_model.py b/pyatlan/model/assets/business_process_model.py new file mode 100644 index 000000000..1f8e2db99 --- /dev/null +++ b/pyatlan/model/assets/business_process_model.py @@ -0,0 +1,70 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, List, Optional + +from pydantic.v1 import Field, validator + +from pyatlan.model.fields.atlan_fields import KeywordField + +from .core.catalog import Catalog + + +class BusinessProcessModel(Catalog): + """Description""" + + type_name: str = Field(default="BusinessProcessModel", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "BusinessProcessModel": + raise ValueError("must be BusinessProcessModel") + return v + + def __setattr__(self, name, value): + if name in BusinessProcessModel._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + BUSINESS_PROCESS_MODEL_TYPE: ClassVar[KeywordField] = KeywordField( + "businessProcessModelType", "businessProcessModelType" + ) + """ + Type attribute for the BusinessProcessModel asset to help distinguish the entity type. + """ + + _convenience_properties: ClassVar[List[str]] = [ + "business_process_model_type", + ] + + @property + def business_process_model_type(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.business_process_model_type + ) + + @business_process_model_type.setter + def business_process_model_type(self, business_process_model_type: Optional[str]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.business_process_model_type = business_process_model_type + + class Attributes(Catalog.Attributes): + business_process_model_type: Optional[str] = Field(default=None, description="") + + attributes: BusinessProcessModel.Attributes = Field( + default_factory=lambda: BusinessProcessModel.Attributes(), + description=( + "Map of attributes in the instance and their values. " + "The specific keys of this map will vary by type, " + "so are described in the sub-types of this schema." + ), + ) + + +BusinessProcessModel.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/business_process_model_entity.py b/pyatlan/model/assets/business_process_model_entity.py new file mode 100644 index 000000000..2975764b5 --- /dev/null +++ b/pyatlan/model/assets/business_process_model_entity.py @@ -0,0 +1,33 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2022 Atlan Pte. Ltd. + + +from __future__ import annotations + +from typing import ClassVar, List + +from pydantic.v1 import Field, validator + +from .business_process_model import BusinessProcessModel + + +class BusinessProcessModelEntity(BusinessProcessModel): + """Description""" + + type_name: str = Field(default="BusinessProcessModelEntity", allow_mutation=False) + + @validator("type_name") + def validate_type_name(cls, v): + if v != "BusinessProcessModelEntity": + raise ValueError("must be BusinessProcessModelEntity") + return v + + def __setattr__(self, name, value): + if name in BusinessProcessModelEntity._convenience_properties: + return object.__setattr__(self, name, value) + super().__setattr__(name, value) + + _convenience_properties: ClassVar[List[str]] = [] + + +BusinessProcessModelEntity.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/core/__init__.py b/pyatlan/model/assets/core/__init__.py index e4350b02f..05b2fdd32 100644 --- a/pyatlan/model/assets/core/__init__.py +++ b/pyatlan/model/assets/core/__init__.py @@ -31,13 +31,13 @@ from .cosmos_mongo_d_b_account import CosmosMongoDBAccount from .cosmos_mongo_d_b_collection import CosmosMongoDBCollection from .cosmos_mongo_d_b_database import CosmosMongoDBDatabase -from .custom import Custom from .data_contract import DataContract from .data_domain import DataDomain from .data_mesh import DataMesh from .data_product import DataProduct from .data_quality import DataQuality from .database import Database +from .databricks_unity_catalog_tag import DatabricksUnityCatalogTag from .dbt import Dbt from .dbt_metric import DbtMetric from .dbt_model import DbtModel @@ -133,7 +133,6 @@ DataQuality.Attributes.update_forward_refs(**localns) BI.Attributes.update_forward_refs(**localns) Resource.Attributes.update_forward_refs(**localns) -Custom.Attributes.update_forward_refs(**localns) DataMesh.Attributes.update_forward_refs(**localns) SQL.Attributes.update_forward_refs(**localns) NoSQL.Attributes.update_forward_refs(**localns) @@ -172,9 +171,10 @@ Function.Attributes.update_forward_refs(**localns) TablePartition.Attributes.update_forward_refs(**localns) Column.Attributes.update_forward_refs(**localns) +DatabricksUnityCatalogTag.Attributes.update_forward_refs(**localns) SnowflakeStream.Attributes.update_forward_refs(**localns) -CalculationView.Attributes.update_forward_refs(**localns) Database.Attributes.update_forward_refs(**localns) +CalculationView.Attributes.update_forward_refs(**localns) Procedure.Attributes.update_forward_refs(**localns) SnowflakeTag.Attributes.update_forward_refs(**localns) CosmosMongoDB.Attributes.update_forward_refs(**localns) diff --git a/pyatlan/model/assets/core/asset.py b/pyatlan/model/assets/core/asset.py index ad7139292..71f445a73 100644 --- a/pyatlan/model/assets/core/asset.py +++ b/pyatlan/model/assets/core/asset.py @@ -917,6 +917,12 @@ def __setattr__(self, name, value): """ List of Monte Carlo incident states associated with this asset. """ + ASSET_MC_IS_MONITORED: ClassVar[BooleanField] = BooleanField( + "assetMcIsMonitored", "assetMcIsMonitored" + ) + """ + Tracks whether this asset is monitored by MC or not + """ ASSET_MC_LAST_SYNC_RUN_AT: ClassVar[NumericField] = NumericField( "assetMcLastSyncRunAt", "assetMcLastSyncRunAt" ) @@ -1275,6 +1281,7 @@ def __setattr__(self, name, value): "asset_mc_incident_severities", "asset_mc_incident_priorities", "asset_mc_incident_states", + "asset_mc_is_monitored", "asset_mc_last_sync_run_at", "starred_by", "starred_details_list", @@ -2839,6 +2846,18 @@ def asset_mc_incident_states(self, asset_mc_incident_states: Optional[Set[str]]) self.attributes = self.Attributes() self.attributes.asset_mc_incident_states = asset_mc_incident_states + @property + def asset_mc_is_monitored(self) -> Optional[bool]: + return ( + None if self.attributes is None else self.attributes.asset_mc_is_monitored + ) + + @asset_mc_is_monitored.setter + def asset_mc_is_monitored(self, asset_mc_is_monitored: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.asset_mc_is_monitored = asset_mc_is_monitored + @property def asset_mc_last_sync_run_at(self) -> Optional[datetime]: return ( @@ -3647,6 +3666,7 @@ class Attributes(Referenceable.Attributes): asset_mc_incident_states: Optional[Set[str]] = Field( default=None, description="" ) + asset_mc_is_monitored: Optional[bool] = Field(default=None, description="") asset_mc_last_sync_run_at: Optional[datetime] = Field( default=None, description="" ) diff --git a/pyatlan/model/assets/core/catalog.py b/pyatlan/model/assets/core/catalog.py index 8ccf9c952..e15dfed84 100644 --- a/pyatlan/model/assets/core/catalog.py +++ b/pyatlan/model/assets/core/catalog.py @@ -29,17 +29,29 @@ def __setattr__(self, name, value): return object.__setattr__(self, name, value) super().__setattr__(name, value) + INPUT_TO_SPARK_JOBS: ClassVar[RelationField] = RelationField("inputToSparkJobs") + """ + TBC + """ + INPUT_TO_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( + "inputToAirflowTasks" + ) + """ + TBC + """ INPUT_TO_PROCESSES: ClassVar[RelationField] = RelationField("inputToProcesses") """ TBC """ - OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( - "outputFromAirflowTasks" + MODEL_IMPLEMENTED_ATTRIBUTES: ClassVar[RelationField] = RelationField( + "modelImplementedAttributes" ) """ TBC """ - INPUT_TO_SPARK_JOBS: ClassVar[RelationField] = RelationField("inputToSparkJobs") + OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( + "outputFromAirflowTasks" + ) """ TBC """ @@ -55,12 +67,6 @@ def __setattr__(self, name, value): """ TBC """ - INPUT_TO_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( - "inputToAirflowTasks" - ) - """ - TBC - """ OUTPUT_FROM_PROCESSES: ClassVar[RelationField] = RelationField( "outputFromProcesses" ) @@ -69,15 +75,40 @@ def __setattr__(self, name, value): """ _convenience_properties: ClassVar[List[str]] = [ + "input_to_spark_jobs", + "input_to_airflow_tasks", "input_to_processes", + "model_implemented_attributes", "output_from_airflow_tasks", - "input_to_spark_jobs", "output_from_spark_jobs", "model_implemented_entities", - "input_to_airflow_tasks", "output_from_processes", ] + @property + def input_to_spark_jobs(self) -> Optional[List[SparkJob]]: + return None if self.attributes is None else self.attributes.input_to_spark_jobs + + @input_to_spark_jobs.setter + def input_to_spark_jobs(self, input_to_spark_jobs: Optional[List[SparkJob]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.input_to_spark_jobs = input_to_spark_jobs + + @property + def input_to_airflow_tasks(self) -> Optional[List[AirflowTask]]: + return ( + None if self.attributes is None else self.attributes.input_to_airflow_tasks + ) + + @input_to_airflow_tasks.setter + def input_to_airflow_tasks( + self, input_to_airflow_tasks: Optional[List[AirflowTask]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.input_to_airflow_tasks = input_to_airflow_tasks + @property def input_to_processes(self) -> Optional[List[Process]]: return None if self.attributes is None else self.attributes.input_to_processes @@ -88,6 +119,22 @@ def input_to_processes(self, input_to_processes: Optional[List[Process]]): self.attributes = self.Attributes() self.attributes.input_to_processes = input_to_processes + @property + def model_implemented_attributes(self) -> Optional[List[ModelAttribute]]: + return ( + None + if self.attributes is None + else self.attributes.model_implemented_attributes + ) + + @model_implemented_attributes.setter + def model_implemented_attributes( + self, model_implemented_attributes: Optional[List[ModelAttribute]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model_implemented_attributes = model_implemented_attributes + @property def output_from_airflow_tasks(self) -> Optional[List[AirflowTask]]: return ( @@ -104,16 +151,6 @@ def output_from_airflow_tasks( self.attributes = self.Attributes() self.attributes.output_from_airflow_tasks = output_from_airflow_tasks - @property - def input_to_spark_jobs(self) -> Optional[List[SparkJob]]: - return None if self.attributes is None else self.attributes.input_to_spark_jobs - - @input_to_spark_jobs.setter - def input_to_spark_jobs(self, input_to_spark_jobs: Optional[List[SparkJob]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.input_to_spark_jobs = input_to_spark_jobs - @property def output_from_spark_jobs(self) -> Optional[List[SparkJob]]: return ( @@ -142,20 +179,6 @@ def model_implemented_entities( self.attributes = self.Attributes() self.attributes.model_implemented_entities = model_implemented_entities - @property - def input_to_airflow_tasks(self) -> Optional[List[AirflowTask]]: - return ( - None if self.attributes is None else self.attributes.input_to_airflow_tasks - ) - - @input_to_airflow_tasks.setter - def input_to_airflow_tasks( - self, input_to_airflow_tasks: Optional[List[AirflowTask]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.input_to_airflow_tasks = input_to_airflow_tasks - @property def output_from_processes(self) -> Optional[List[Process]]: return ( @@ -169,13 +192,19 @@ def output_from_processes(self, output_from_processes: Optional[List[Process]]): self.attributes.output_from_processes = output_from_processes class Attributes(Asset.Attributes): + input_to_spark_jobs: Optional[List[SparkJob]] = Field( + default=None, description="" + ) # relationship + input_to_airflow_tasks: Optional[List[AirflowTask]] = Field( + default=None, description="" + ) # relationship input_to_processes: Optional[List[Process]] = Field( default=None, description="" ) # relationship - output_from_airflow_tasks: Optional[List[AirflowTask]] = Field( + model_implemented_attributes: Optional[List[ModelAttribute]] = Field( default=None, description="" ) # relationship - input_to_spark_jobs: Optional[List[SparkJob]] = Field( + output_from_airflow_tasks: Optional[List[AirflowTask]] = Field( default=None, description="" ) # relationship output_from_spark_jobs: Optional[List[SparkJob]] = Field( @@ -184,9 +213,6 @@ class Attributes(Asset.Attributes): model_implemented_entities: Optional[List[ModelEntity]] = Field( default=None, description="" ) # relationship - input_to_airflow_tasks: Optional[List[AirflowTask]] = Field( - default=None, description="" - ) # relationship output_from_processes: Optional[List[Process]] = Field( default=None, description="" ) # relationship @@ -202,6 +228,7 @@ class Attributes(Asset.Attributes): from .airflow_task import AirflowTask # noqa +from .model_attribute import ModelAttribute # noqa from .model_entity import ModelEntity # noqa from .process import Process # noqa from .spark_job import SparkJob # noqa diff --git a/pyatlan/model/assets/core/custom.py b/pyatlan/model/assets/core/custom.py deleted file mode 100644 index b972182f5..000000000 --- a/pyatlan/model/assets/core/custom.py +++ /dev/null @@ -1,107 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from typing import ClassVar, List, Optional - -from pydantic.v1 import Field, validator - -from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField - -from .catalog import Catalog - - -class Custom(Catalog): - """Description""" - - type_name: str = Field(default="Custom", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "Custom": - raise ValueError("must be Custom") - return v - - def __setattr__(self, name, value): - if name in Custom._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - CUSTOM_SOURCE_ID: ClassVar[KeywordField] = KeywordField( - "customSourceId", "customSourceId" - ) - """ - Unique identifier for the Custom asset from the source system. - """ - CUSTOM_DATASET_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "customDatasetName", "customDatasetName.keyword", "customDatasetName" - ) - """ - Simple name of the dataset in which this asset exists, or empty if it is itself a dataset. - """ - CUSTOM_DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "customDatasetQualifiedName", "customDatasetQualifiedName" - ) - """ - Unique name of the dataset in which this asset exists, or empty if it is itself a dataset. - """ - - _convenience_properties: ClassVar[List[str]] = [ - "custom_source_id", - "custom_dataset_name", - "custom_dataset_qualified_name", - ] - - @property - def custom_source_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.custom_source_id - - @custom_source_id.setter - def custom_source_id(self, custom_source_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.custom_source_id = custom_source_id - - @property - def custom_dataset_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.custom_dataset_name - - @custom_dataset_name.setter - def custom_dataset_name(self, custom_dataset_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.custom_dataset_name = custom_dataset_name - - @property - def custom_dataset_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.custom_dataset_qualified_name - ) - - @custom_dataset_qualified_name.setter - def custom_dataset_qualified_name( - self, custom_dataset_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.custom_dataset_qualified_name = custom_dataset_qualified_name - - class Attributes(Catalog.Attributes): - custom_source_id: Optional[str] = Field(default=None, description="") - custom_dataset_name: Optional[str] = Field(default=None, description="") - custom_dataset_qualified_name: Optional[str] = Field( - default=None, description="" - ) - - attributes: Custom.Attributes = Field( - default_factory=lambda: Custom.Attributes(), - description=( - "Map of attributes in the instance and their values. " - "The specific keys of this map will vary by type, " - "so are described in the sub-types of this schema." - ), - ) diff --git a/pyatlan/model/assets/databricks_unity_catalog_tag.py b/pyatlan/model/assets/core/databricks_unity_catalog_tag.py similarity index 94% rename from pyatlan/model/assets/databricks_unity_catalog_tag.py rename to pyatlan/model/assets/core/databricks_unity_catalog_tag.py index 199a3c9f9..bd9428f81 100644 --- a/pyatlan/model/assets/databricks_unity_catalog_tag.py +++ b/pyatlan/model/assets/core/databricks_unity_catalog_tag.py @@ -18,7 +18,7 @@ ) from pyatlan.model.structs import SourceTagAttribute -from .core.tag import Tag +from .tag import Tag class DatabricksUnityCatalogTag(Tag): @@ -59,12 +59,6 @@ def __setattr__(self, name, value): """ Name of the classification in Atlan that is mapped to this tag. """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") """ Number of times this asset has been queried. @@ -184,7 +178,6 @@ def __setattr__(self, name, value): "tag_attributes", "tag_allowed_values", "mapped_atlan_tag_name", - "asset_application_qualified_name", "query_count", "query_user_count", "query_user_map", @@ -250,24 +243,6 @@ def mapped_atlan_tag_name(self, mapped_atlan_tag_name: Optional[str]): self.attributes = self.Attributes() self.attributes.mapped_atlan_tag_name = mapped_atlan_tag_name - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - @property def query_count(self) -> Optional[int]: return None if self.attributes is None else self.attributes.query_count @@ -501,9 +476,6 @@ class Attributes(Tag.Attributes): ) tag_allowed_values: Optional[Set[str]] = Field(default=None, description="") mapped_atlan_tag_name: Optional[str] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) query_count: Optional[int] = Field(default=None, description="") query_user_count: Optional[int] = Field(default=None, description="") query_user_map: Optional[Dict[str, int]] = Field(default=None, description="") @@ -548,8 +520,6 @@ class Attributes(Tag.Attributes): ) -from .core.dbt_model import DbtModel # noqa -from .core.dbt_source import DbtSource # noqa -from .core.dbt_test import DbtTest # noqa - -DatabricksUnityCatalogTag.Attributes.update_forward_refs() +from .dbt_model import DbtModel # noqa +from .dbt_source import DbtSource # noqa +from .dbt_test import DbtTest # noqa diff --git a/pyatlan/model/assets/core/m_c_monitor.py b/pyatlan/model/assets/core/m_c_monitor.py index c3f37021b..d12bfe509 100644 --- a/pyatlan/model/assets/core/m_c_monitor.py +++ b/pyatlan/model/assets/core/m_c_monitor.py @@ -150,6 +150,12 @@ def __setattr__(self, name, value): """ Priority of this monitor. """ + MC_MONITOR_IS_OOTB: ClassVar[BooleanField] = BooleanField( + "mcMonitorIsOotb", "mcMonitorIsOotb" + ) + """ + Whether the monitor is OOTB or not + """ MC_MONITOR_ASSETS: ClassVar[RelationField] = RelationField("mcMonitorAssets") """ @@ -176,6 +182,7 @@ def __setattr__(self, name, value): "mc_monitor_incident_count", "mc_monitor_alert_count", "mc_monitor_priority", + "mc_monitor_is_ootb", "mc_monitor_assets", ] @@ -431,6 +438,16 @@ def mc_monitor_priority(self, mc_monitor_priority: Optional[str]): self.attributes = self.Attributes() self.attributes.mc_monitor_priority = mc_monitor_priority + @property + def mc_monitor_is_ootb(self) -> Optional[bool]: + return None if self.attributes is None else self.attributes.mc_monitor_is_ootb + + @mc_monitor_is_ootb.setter + def mc_monitor_is_ootb(self, mc_monitor_is_ootb: Optional[bool]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.mc_monitor_is_ootb = mc_monitor_is_ootb + @property def mc_monitor_assets(self) -> Optional[List[Asset]]: return None if self.attributes is None else self.attributes.mc_monitor_assets @@ -471,6 +488,7 @@ class Attributes(MonteCarlo.Attributes): mc_monitor_incident_count: Optional[int] = Field(default=None, description="") mc_monitor_alert_count: Optional[int] = Field(default=None, description="") mc_monitor_priority: Optional[str] = Field(default=None, description="") + mc_monitor_is_ootb: Optional[bool] = Field(default=None, description="") mc_monitor_assets: Optional[List[Asset]] = Field( default=None, description="" ) # relationship diff --git a/pyatlan/model/assets/core/model_attribute.py b/pyatlan/model/assets/core/model_attribute.py index fce0a30e6..e0dbc5f28 100644 --- a/pyatlan/model/assets/core/model_attribute.py +++ b/pyatlan/model/assets/core/model_attribute.py @@ -83,6 +83,12 @@ def __setattr__(self, name, value): When true, this attribute has relationships with other attributes. """ + MODEL_ATTRIBUTE_IMPLEMENTED_BY_ASSETS: ClassVar[RelationField] = RelationField( + "modelAttributeImplementedByAssets" + ) + """ + TBC + """ MODEL_ATTRIBUTE_RELATED_TO_ATTRIBUTES: ClassVar[RelationField] = RelationField( "modelAttributeRelatedToAttributes" ) @@ -123,6 +129,7 @@ def __setattr__(self, name, value): "model_attribute_scale", "model_attribute_data_type", "model_attribute_has_relationships", + "model_attribute_implemented_by_assets", "model_attribute_related_to_attributes", "model_attribute_entities", "model_attribute_related_from_attributes", @@ -244,6 +251,24 @@ def model_attribute_has_relationships( model_attribute_has_relationships ) + @property + def model_attribute_implemented_by_assets(self) -> Optional[List[Catalog]]: + return ( + None + if self.attributes is None + else self.attributes.model_attribute_implemented_by_assets + ) + + @model_attribute_implemented_by_assets.setter + def model_attribute_implemented_by_assets( + self, model_attribute_implemented_by_assets: Optional[List[Catalog]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model_attribute_implemented_by_assets = ( + model_attribute_implemented_by_assets + ) + @property def model_attribute_related_to_attributes( self, @@ -355,6 +380,9 @@ class Attributes(Model.Attributes): model_attribute_has_relationships: Optional[bool] = Field( default=None, description="" ) + model_attribute_implemented_by_assets: Optional[List[Catalog]] = Field( + default=None, description="" + ) # relationship model_attribute_related_to_attributes: Optional[ List[ModelAttributeAssociation] ] = Field( @@ -385,5 +413,6 @@ class Attributes(Model.Attributes): ) +from .catalog import Catalog # noqa from .model_attribute_association import ModelAttributeAssociation # noqa from .model_entity import ModelEntity # noqa diff --git a/pyatlan/model/assets/core/model_attribute_association.py b/pyatlan/model/assets/core/model_attribute_association.py index 3d051015c..d2865c604 100644 --- a/pyatlan/model/assets/core/model_attribute_association.py +++ b/pyatlan/model/assets/core/model_attribute_association.py @@ -8,7 +8,6 @@ from pydantic.v1 import Field, validator -from pyatlan.model.enums import ModelCardinalityType from pyatlan.model.fields.atlan_fields import KeywordField, RelationField from .model import Model @@ -30,18 +29,6 @@ def __setattr__(self, name, value): return object.__setattr__(self, name, value) super().__setattr__(name, value) - MODEL_ATTRIBUTE_ASSOCIATION_CARDINALITY: ClassVar[KeywordField] = KeywordField( - "modelAttributeAssociationCardinality", "modelAttributeAssociationCardinality" - ) - """ - Cardinality of the data attribute association. - """ - MODEL_ATTRIBUTE_ASSOCIATION_LABEL: ClassVar[KeywordField] = KeywordField( - "modelAttributeAssociationLabel", "modelAttributeAssociationLabel" - ) - """ - Label of the data attribute association. - """ MODEL_ATTRIBUTE_ASSOCIATION_TO_QUALIFIED_NAME: ClassVar[KeywordField] = ( KeywordField( "modelAttributeAssociationToQualifiedName", @@ -81,8 +68,6 @@ def __setattr__(self, name, value): """ _convenience_properties: ClassVar[List[str]] = [ - "model_attribute_association_cardinality", - "model_attribute_association_label", "model_attribute_association_to_qualified_name", "model_attribute_association_from_qualified_name", "model_entity_association_qualified_name", @@ -90,42 +75,6 @@ def __setattr__(self, name, value): "model_attribute_association_to", ] - @property - def model_attribute_association_cardinality(self) -> Optional[ModelCardinalityType]: - return ( - None - if self.attributes is None - else self.attributes.model_attribute_association_cardinality - ) - - @model_attribute_association_cardinality.setter - def model_attribute_association_cardinality( - self, model_attribute_association_cardinality: Optional[ModelCardinalityType] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.model_attribute_association_cardinality = ( - model_attribute_association_cardinality - ) - - @property - def model_attribute_association_label(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.model_attribute_association_label - ) - - @model_attribute_association_label.setter - def model_attribute_association_label( - self, model_attribute_association_label: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.model_attribute_association_label = ( - model_attribute_association_label - ) - @property def model_attribute_association_to_qualified_name(self) -> Optional[str]: return ( @@ -215,12 +164,6 @@ def model_attribute_association_to( self.attributes.model_attribute_association_to = model_attribute_association_to class Attributes(Model.Attributes): - model_attribute_association_cardinality: Optional[ModelCardinalityType] = Field( - default=None, description="" - ) - model_attribute_association_label: Optional[str] = Field( - default=None, description="" - ) model_attribute_association_to_qualified_name: Optional[str] = Field( default=None, description="" ) diff --git a/pyatlan/model/assets/core/model_entity_association.py b/pyatlan/model/assets/core/model_entity_association.py index 7b906261c..1789255d9 100644 --- a/pyatlan/model/assets/core/model_entity_association.py +++ b/pyatlan/model/assets/core/model_entity_association.py @@ -9,7 +9,7 @@ from pydantic.v1 import Field, validator from pyatlan.model.enums import ModelCardinalityType -from pyatlan.model.fields.atlan_fields import KeywordField, RelationField +from pyatlan.model.fields.atlan_fields import KeywordField, NumericField, RelationField from .model import Model @@ -34,13 +34,13 @@ def __setattr__(self, name, value): "modelEntityAssociationCardinality", "modelEntityAssociationCardinality" ) """ - Cardinality of the data entity association. + (Deprecated) Cardinality of the data entity association. """ MODEL_ENTITY_ASSOCIATION_LABEL: ClassVar[KeywordField] = KeywordField( "modelEntityAssociationLabel", "modelEntityAssociationLabel" ) """ - Label of the data entity association. + (Deprecated) Label of the data entity association. """ MODEL_ENTITY_ASSOCIATION_TO_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( "modelEntityAssociationToQualifiedName", "modelEntityAssociationToQualifiedName" @@ -48,6 +48,26 @@ def __setattr__(self, name, value): """ Unique name of the association to which this entity is related. """ + MODEL_ENTITY_ASSOCIATION_TO_LABEL: ClassVar[KeywordField] = KeywordField( + "modelEntityAssociationToLabel", "modelEntityAssociationToLabel" + ) + """ + Label when read from the association to which this entity is related. + """ + MODEL_ENTITY_ASSOCIATION_TO_MIN_CARDINALITY: ClassVar[NumericField] = NumericField( + "modelEntityAssociationToMinCardinality", + "modelEntityAssociationToMinCardinality", + ) + """ + Minimum cardinality of the data entity to which the association exists. + """ + MODEL_ENTITY_ASSOCIATION_TO_MAX_CARDINALITY: ClassVar[NumericField] = NumericField( + "modelEntityAssociationToMaxCardinality", + "modelEntityAssociationToMaxCardinality", + ) + """ + Maximum cardinality of the data entity to which the association exists. + """ MODEL_ENTITY_ASSOCIATION_FROM_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( "modelEntityAssociationFromQualifiedName", "modelEntityAssociationFromQualifiedName", @@ -55,6 +75,30 @@ def __setattr__(self, name, value): """ Unique name of the association from which this entity is related. """ + MODEL_ENTITY_ASSOCIATION_FROM_LABEL: ClassVar[KeywordField] = KeywordField( + "modelEntityAssociationFromLabel", "modelEntityAssociationFromLabel" + ) + """ + Label when read from the association from which this entity is related. + """ + MODEL_ENTITY_ASSOCIATION_FROM_MIN_CARDINALITY: ClassVar[NumericField] = ( + NumericField( + "modelEntityAssociationFromMinCardinality", + "modelEntityAssociationFromMinCardinality", + ) + ) + """ + Minimum cardinality of the data entity from which the association exists. + """ + MODEL_ENTITY_ASSOCIATION_FROM_MAX_CARDINALITY: ClassVar[NumericField] = ( + NumericField( + "modelEntityAssociationFromMaxCardinality", + "modelEntityAssociationFromMaxCardinality", + ) + ) + """ + Maximum cardinality of the data entity from which the association exists. + """ MODEL_ENTITY_ASSOCIATION_TO: ClassVar[RelationField] = RelationField( "modelEntityAssociationTo" @@ -73,7 +117,13 @@ def __setattr__(self, name, value): "model_entity_association_cardinality", "model_entity_association_label", "model_entity_association_to_qualified_name", + "model_entity_association_to_label", + "model_entity_association_to_min_cardinality", + "model_entity_association_to_max_cardinality", "model_entity_association_from_qualified_name", + "model_entity_association_from_label", + "model_entity_association_from_min_cardinality", + "model_entity_association_from_max_cardinality", "model_entity_association_to", "model_entity_association_from", ] @@ -130,6 +180,60 @@ def model_entity_association_to_qualified_name( model_entity_association_to_qualified_name ) + @property + def model_entity_association_to_label(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.model_entity_association_to_label + ) + + @model_entity_association_to_label.setter + def model_entity_association_to_label( + self, model_entity_association_to_label: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model_entity_association_to_label = ( + model_entity_association_to_label + ) + + @property + def model_entity_association_to_min_cardinality(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.model_entity_association_to_min_cardinality + ) + + @model_entity_association_to_min_cardinality.setter + def model_entity_association_to_min_cardinality( + self, model_entity_association_to_min_cardinality: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model_entity_association_to_min_cardinality = ( + model_entity_association_to_min_cardinality + ) + + @property + def model_entity_association_to_max_cardinality(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.model_entity_association_to_max_cardinality + ) + + @model_entity_association_to_max_cardinality.setter + def model_entity_association_to_max_cardinality( + self, model_entity_association_to_max_cardinality: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model_entity_association_to_max_cardinality = ( + model_entity_association_to_max_cardinality + ) + @property def model_entity_association_from_qualified_name(self) -> Optional[str]: return ( @@ -148,6 +252,60 @@ def model_entity_association_from_qualified_name( model_entity_association_from_qualified_name ) + @property + def model_entity_association_from_label(self) -> Optional[str]: + return ( + None + if self.attributes is None + else self.attributes.model_entity_association_from_label + ) + + @model_entity_association_from_label.setter + def model_entity_association_from_label( + self, model_entity_association_from_label: Optional[str] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model_entity_association_from_label = ( + model_entity_association_from_label + ) + + @property + def model_entity_association_from_min_cardinality(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.model_entity_association_from_min_cardinality + ) + + @model_entity_association_from_min_cardinality.setter + def model_entity_association_from_min_cardinality( + self, model_entity_association_from_min_cardinality: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model_entity_association_from_min_cardinality = ( + model_entity_association_from_min_cardinality + ) + + @property + def model_entity_association_from_max_cardinality(self) -> Optional[int]: + return ( + None + if self.attributes is None + else self.attributes.model_entity_association_from_max_cardinality + ) + + @model_entity_association_from_max_cardinality.setter + def model_entity_association_from_max_cardinality( + self, model_entity_association_from_max_cardinality: Optional[int] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model_entity_association_from_max_cardinality = ( + model_entity_association_from_max_cardinality + ) + @property def model_entity_association_to(self) -> Optional[ModelEntity]: return ( @@ -190,9 +348,27 @@ class Attributes(Model.Attributes): model_entity_association_to_qualified_name: Optional[str] = Field( default=None, description="" ) + model_entity_association_to_label: Optional[str] = Field( + default=None, description="" + ) + model_entity_association_to_min_cardinality: Optional[int] = Field( + default=None, description="" + ) + model_entity_association_to_max_cardinality: Optional[int] = Field( + default=None, description="" + ) model_entity_association_from_qualified_name: Optional[str] = Field( default=None, description="" ) + model_entity_association_from_label: Optional[str] = Field( + default=None, description="" + ) + model_entity_association_from_min_cardinality: Optional[int] = Field( + default=None, description="" + ) + model_entity_association_from_max_cardinality: Optional[int] = Field( + default=None, description="" + ) model_entity_association_to: Optional[ModelEntity] = Field( default=None, description="" ) # relationship diff --git a/pyatlan/model/assets/custom_dataset.py b/pyatlan/model/assets/custom_dataset.py deleted file mode 100644 index 6293fa84f..000000000 --- a/pyatlan/model/assets/custom_dataset.py +++ /dev/null @@ -1,68 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from typing import ClassVar, List, Optional - -from pydantic.v1 import Field, validator - -from pyatlan.model.fields.atlan_fields import RelationField - -from .core.custom import Custom - - -class CustomDataset(Custom): - """Description""" - - type_name: str = Field(default="CustomDataset", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "CustomDataset": - raise ValueError("must be CustomDataset") - return v - - def __setattr__(self, name, value): - if name in CustomDataset._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - CUSTOM_TABLES: ClassVar[RelationField] = RelationField("customTables") - """ - TBC - """ - - _convenience_properties: ClassVar[List[str]] = [ - "custom_tables", - ] - - @property - def custom_tables(self) -> Optional[List[CustomTable]]: - return None if self.attributes is None else self.attributes.custom_tables - - @custom_tables.setter - def custom_tables(self, custom_tables: Optional[List[CustomTable]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.custom_tables = custom_tables - - class Attributes(Custom.Attributes): - custom_tables: Optional[List[CustomTable]] = Field( - default=None, description="" - ) # relationship - - attributes: CustomDataset.Attributes = Field( - default_factory=lambda: CustomDataset.Attributes(), - description=( - "Map of attributes in the instance and their values. " - "The specific keys of this map will vary by type, " - "so are described in the sub-types of this schema." - ), - ) - - -from .custom_table import CustomTable # noqa - -CustomDataset.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/custom_field.py b/pyatlan/model/assets/custom_field.py deleted file mode 100644 index 1e49c5849..000000000 --- a/pyatlan/model/assets/custom_field.py +++ /dev/null @@ -1,1541 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from datetime import datetime -from typing import ClassVar, Dict, List, Optional, Set - -from pydantic.v1 import Field, validator - -from pyatlan.model.enums import CustomTemperatureType -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - KeywordTextField, - NumericField, - RelationField, - TextField, -) -from pyatlan.model.structs import ColumnValueFrequencyMap, Histogram - -from .core.column import Column - - -class CustomField(Column): - """Description""" - - type_name: str = Field(default="CustomField", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "CustomField": - raise ValueError("must be CustomField") - return v - - def __setattr__(self, name, value): - if name in CustomField._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - CUSTOM_TEMPERATURE: ClassVar[KeywordField] = KeywordField( - "customTemperature", "customTemperature" - ) - """ - Temperature of the CustomTable asset. - """ - DATA_TYPE: ClassVar[KeywordTextField] = KeywordTextField( - "dataType", "dataType", "dataType.text" - ) - """ - Data type of values in this column. - """ - SUB_DATA_TYPE: ClassVar[KeywordField] = KeywordField("subDataType", "subDataType") - """ - Sub-data type of this column. - """ - RAW_DATA_TYPE_DEFINITION: ClassVar[TextField] = TextField( - "rawDataTypeDefinition", "rawDataTypeDefinition" - ) - """ - - """ - ORDER: ClassVar[NumericField] = NumericField("order", "order") - """ - Order (position) in which this column appears in the table (starting at 1). - """ - NESTED_COLUMN_ORDER: ClassVar[KeywordField] = KeywordField( - "nestedColumnOrder", "nestedColumnOrder" - ) - """ - Order (position) in which this column appears in the nested Column (nest level starts at 1). - """ - NESTED_COLUMN_COUNT: ClassVar[NumericField] = NumericField( - "nestedColumnCount", "nestedColumnCount" - ) - """ - Number of columns nested within this (STRUCT or NESTED) column. - """ - COLUMN_HIERARCHY: ClassVar[KeywordField] = KeywordField( - "columnHierarchy", "columnHierarchy" - ) - """ - List of top-level upstream nested columns. - """ - IS_PARTITION: ClassVar[BooleanField] = BooleanField("isPartition", "isPartition") - """ - Whether this column is a partition column (true) or not (false). - """ - PARTITION_ORDER: ClassVar[NumericField] = NumericField( - "partitionOrder", "partitionOrder" - ) - """ - Order (position) of this partition column in the table. - """ - IS_CLUSTERED: ClassVar[BooleanField] = BooleanField("isClustered", "isClustered") - """ - Whether this column is a clustered column (true) or not (false). - """ - IS_PRIMARY: ClassVar[BooleanField] = BooleanField("isPrimary", "isPrimary") - """ - When true, this column is the primary key for the table. - """ - IS_FOREIGN: ClassVar[BooleanField] = BooleanField("isForeign", "isForeign") - """ - When true, this column is a foreign key to another table. NOTE: this must be true when using the foreignKeyTo relationship to specify columns that refer to this column as a foreign key. - """ # noqa: E501 - IS_INDEXED: ClassVar[BooleanField] = BooleanField("isIndexed", "isIndexed") - """ - When true, this column is indexed in the database. - """ - IS_SORT: ClassVar[BooleanField] = BooleanField("isSort", "isSort") - """ - Whether this column is a sort column (true) or not (false). - """ - IS_DIST: ClassVar[BooleanField] = BooleanField("isDist", "isDist") - """ - Whether this column is a distribution column (true) or not (false). - """ - IS_PINNED: ClassVar[BooleanField] = BooleanField("isPinned", "isPinned") - """ - Whether this column is pinned (true) or not (false). - """ - PINNED_BY: ClassVar[KeywordField] = KeywordField("pinnedBy", "pinnedBy") - """ - User who pinned this column. - """ - PINNED_AT: ClassVar[NumericField] = NumericField("pinnedAt", "pinnedAt") - """ - Time (epoch) at which this column was pinned, in milliseconds. - """ - PRECISION: ClassVar[NumericField] = NumericField("precision", "precision") - """ - Total number of digits allowed, when the dataType is numeric. - """ - DEFAULT_VALUE: ClassVar[TextField] = TextField("defaultValue", "defaultValue") - """ - Default value for this column. - """ - IS_NULLABLE: ClassVar[BooleanField] = BooleanField("isNullable", "isNullable") - """ - When true, the values in this column can be null. - """ - NUMERIC_SCALE: ClassVar[NumericField] = NumericField("numericScale", "numericScale") - """ - Number of digits allowed to the right of the decimal point. - """ - MAX_LENGTH: ClassVar[NumericField] = NumericField("maxLength", "maxLength") - """ - Maximum length of a value in this column. - """ - VALIDATIONS: ClassVar[KeywordField] = KeywordField("validations", "validations") - """ - Validations for this column. - """ - PARENT_COLUMN_QUALIFIED_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "parentColumnQualifiedName", - "parentColumnQualifiedName", - "parentColumnQualifiedName.text", - ) - """ - Unique name of the column this column is nested within, for STRUCT and NESTED columns. - """ - PARENT_COLUMN_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "parentColumnName", "parentColumnName.keyword", "parentColumnName" - ) - """ - Simple name of the column this column is nested within, for STRUCT and NESTED columns. - """ - COLUMN_DISTINCT_VALUES_COUNT: ClassVar[NumericField] = NumericField( - "columnDistinctValuesCount", "columnDistinctValuesCount" - ) - """ - Number of rows that contain distinct values. - """ - COLUMN_DISTINCT_VALUES_COUNT_LONG: ClassVar[NumericField] = NumericField( - "columnDistinctValuesCountLong", "columnDistinctValuesCountLong" - ) - """ - Number of rows that contain distinct values. - """ - COLUMN_HISTOGRAM: ClassVar[KeywordField] = KeywordField( - "columnHistogram", "columnHistogram" - ) - """ - List of values in a histogram that represents the contents of this column. - """ - COLUMN_MAX: ClassVar[NumericField] = NumericField("columnMax", "columnMax") - """ - Greatest value in a numeric column. - """ - COLUMN_MIN: ClassVar[NumericField] = NumericField("columnMin", "columnMin") - """ - Least value in a numeric column. - """ - COLUMN_MEAN: ClassVar[NumericField] = NumericField("columnMean", "columnMean") - """ - Arithmetic mean of the values in a numeric column. - """ - COLUMN_SUM: ClassVar[NumericField] = NumericField("columnSum", "columnSum") - """ - Calculated sum of the values in a numeric column. - """ - COLUMN_MEDIAN: ClassVar[NumericField] = NumericField("columnMedian", "columnMedian") - """ - Calculated median of the values in a numeric column. - """ - COLUMN_STANDARD_DEVIATION: ClassVar[NumericField] = NumericField( - "columnStandardDeviation", "columnStandardDeviation" - ) - """ - Calculated standard deviation of the values in a numeric column. - """ - COLUMN_UNIQUE_VALUES_COUNT: ClassVar[NumericField] = NumericField( - "columnUniqueValuesCount", "columnUniqueValuesCount" - ) - """ - Number of rows in which a value in this column appears only once. - """ - COLUMN_UNIQUE_VALUES_COUNT_LONG: ClassVar[NumericField] = NumericField( - "columnUniqueValuesCountLong", "columnUniqueValuesCountLong" - ) - """ - Number of rows in which a value in this column appears only once. - """ - COLUMN_AVERAGE: ClassVar[NumericField] = NumericField( - "columnAverage", "columnAverage" - ) - """ - Average value in this column. - """ - COLUMN_AVERAGE_LENGTH: ClassVar[NumericField] = NumericField( - "columnAverageLength", "columnAverageLength" - ) - """ - Average length of values in a string column. - """ - COLUMN_DUPLICATE_VALUES_COUNT: ClassVar[NumericField] = NumericField( - "columnDuplicateValuesCount", "columnDuplicateValuesCount" - ) - """ - Number of rows that contain duplicate values. - """ - COLUMN_DUPLICATE_VALUES_COUNT_LONG: ClassVar[NumericField] = NumericField( - "columnDuplicateValuesCountLong", "columnDuplicateValuesCountLong" - ) - """ - Number of rows that contain duplicate values. - """ - COLUMN_MAXIMUM_STRING_LENGTH: ClassVar[NumericField] = NumericField( - "columnMaximumStringLength", "columnMaximumStringLength" - ) - """ - Length of the longest value in a string column. - """ - COLUMN_MAXS: ClassVar[TextField] = TextField("columnMaxs", "columnMaxs") - """ - List of the greatest values in a column. - """ - COLUMN_MINIMUM_STRING_LENGTH: ClassVar[NumericField] = NumericField( - "columnMinimumStringLength", "columnMinimumStringLength" - ) - """ - Length of the shortest value in a string column. - """ - COLUMN_MINS: ClassVar[TextField] = TextField("columnMins", "columnMins") - """ - List of the least values in a column. - """ - COLUMN_MISSING_VALUES_COUNT: ClassVar[NumericField] = NumericField( - "columnMissingValuesCount", "columnMissingValuesCount" - ) - """ - Number of rows in a column that do not contain content. - """ - COLUMN_MISSING_VALUES_COUNT_LONG: ClassVar[NumericField] = NumericField( - "columnMissingValuesCountLong", "columnMissingValuesCountLong" - ) - """ - Number of rows in a column that do not contain content. - """ - COLUMN_MISSING_VALUES_PERCENTAGE: ClassVar[NumericField] = NumericField( - "columnMissingValuesPercentage", "columnMissingValuesPercentage" - ) - """ - Percentage of rows in a column that do not contain content. - """ - COLUMN_UNIQUENESS_PERCENTAGE: ClassVar[NumericField] = NumericField( - "columnUniquenessPercentage", "columnUniquenessPercentage" - ) - """ - Ratio indicating how unique data in this column is: 0 indicates that all values are the same, 100 indicates that all values in this column are unique. - """ # noqa: E501 - COLUMN_VARIANCE: ClassVar[NumericField] = NumericField( - "columnVariance", "columnVariance" - ) - """ - Calculated variance of the values in a numeric column. - """ - COLUMN_TOP_VALUES: ClassVar[KeywordField] = KeywordField( - "columnTopValues", "columnTopValues" - ) - """ - List of top values in this column. - """ - COLUMN_DEPTH_LEVEL: ClassVar[NumericField] = NumericField( - "columnDepthLevel", "columnDepthLevel" - ) - """ - Level of nesting of this column, used for STRUCT and NESTED columns. - """ - NOSQL_COLLECTION_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "nosqlCollectionName", "nosqlCollectionName.keyword", "nosqlCollectionName" - ) - """ - Simple name of the cosmos/mongo collection in which this SQL asset (column) exists, or empty if it does not exist within a cosmos/mongo collection. - """ # noqa: E501 - NOSQL_COLLECTION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "nosqlCollectionQualifiedName", "nosqlCollectionQualifiedName" - ) - """ - Unique name of the cosmos/mongo collection in which this SQL asset (column) exists, or empty if it does not exist within a cosmos/mongo collection. - """ # noqa: E501 - QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") - """ - Number of times this asset has been queried. - """ - QUERY_USER_COUNT: ClassVar[NumericField] = NumericField( - "queryUserCount", "queryUserCount" - ) - """ - Number of unique users who have queried this asset. - """ - QUERY_USER_MAP: ClassVar[KeywordField] = KeywordField( - "queryUserMap", "queryUserMap" - ) - """ - Map of unique users who have queried this asset to the number of times they have queried it. - """ - QUERY_COUNT_UPDATED_AT: ClassVar[NumericField] = NumericField( - "queryCountUpdatedAt", "queryCountUpdatedAt" - ) - """ - Time (epoch) at which the query count was last updated, in milliseconds. - """ - DATABASE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "databaseName", "databaseName.keyword", "databaseName" - ) - """ - Simple name of the database in which this SQL asset exists, or empty if it does not exist within a database. - """ - DATABASE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "databaseQualifiedName", "databaseQualifiedName" - ) - """ - Unique name of the database in which this SQL asset exists, or empty if it does not exist within a database. - """ - SCHEMA_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "schemaName", "schemaName.keyword", "schemaName" - ) - """ - Simple name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. - """ - SCHEMA_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "schemaQualifiedName", "schemaQualifiedName" - ) - """ - Unique name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. - """ - TABLE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "tableName", "tableName.keyword", "tableName" - ) - """ - Simple name of the table in which this SQL asset exists, or empty if it does not exist within a table. - """ - TABLE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "tableQualifiedName", "tableQualifiedName" - ) - """ - Unique name of the table in which this SQL asset exists, or empty if it does not exist within a table. - """ - VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "viewName", "viewName.keyword", "viewName" - ) - """ - Simple name of the view in which this SQL asset exists, or empty if it does not exist within a view. - """ - VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "viewQualifiedName", "viewQualifiedName" - ) - """ - Unique name of the view in which this SQL asset exists, or empty if it does not exist within a view. - """ - CALCULATION_VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "calculationViewName", "calculationViewName.keyword", "calculationViewName" - ) - """ - Simple name of the calculation view in which this SQL asset exists, or empty if it does not exist within a calculation view. - """ # noqa: E501 - CALCULATION_VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "calculationViewQualifiedName", "calculationViewQualifiedName" - ) - """ - Unique name of the calculation view in which this SQL asset exists, or empty if it does not exist within a calculation view. - """ # noqa: E501 - IS_PROFILED: ClassVar[BooleanField] = BooleanField("isProfiled", "isProfiled") - """ - Whether this asset has been profiled (true) or not (false). - """ - LAST_PROFILED_AT: ClassVar[NumericField] = NumericField( - "lastProfiledAt", "lastProfiledAt" - ) - """ - Time (epoch) at which this asset was last profiled, in milliseconds. - """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ - CUSTOM_SOURCE_ID: ClassVar[KeywordField] = KeywordField( - "customSourceId", "customSourceId" - ) - """ - Unique identifier for the Custom asset from the source system. - """ - CUSTOM_DATASET_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "customDatasetName", "customDatasetName.keyword", "customDatasetName" - ) - """ - Simple name of the dataset in which this asset exists, or empty if it is itself a dataset. - """ - CUSTOM_DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "customDatasetQualifiedName", "customDatasetQualifiedName" - ) - """ - Unique name of the dataset in which this asset exists, or empty if it is itself a dataset. - """ - - CUSTOM_TABLE: ClassVar[RelationField] = RelationField("customTable") - """ - TBC - """ - - _convenience_properties: ClassVar[List[str]] = [ - "custom_temperature", - "data_type", - "sub_data_type", - "raw_data_type_definition", - "order", - "nested_column_order", - "nested_column_count", - "column_hierarchy", - "is_partition", - "partition_order", - "is_clustered", - "is_primary", - "is_foreign", - "is_indexed", - "is_sort", - "is_dist", - "is_pinned", - "pinned_by", - "pinned_at", - "precision", - "default_value", - "is_nullable", - "numeric_scale", - "max_length", - "validations", - "parent_column_qualified_name", - "parent_column_name", - "column_distinct_values_count", - "column_distinct_values_count_long", - "column_histogram", - "column_max", - "column_min", - "column_mean", - "column_sum", - "column_median", - "column_standard_deviation", - "column_unique_values_count", - "column_unique_values_count_long", - "column_average", - "column_average_length", - "column_duplicate_values_count", - "column_duplicate_values_count_long", - "column_maximum_string_length", - "column_maxs", - "column_minimum_string_length", - "column_mins", - "column_missing_values_count", - "column_missing_values_count_long", - "column_missing_values_percentage", - "column_uniqueness_percentage", - "column_variance", - "column_top_values", - "column_depth_level", - "nosql_collection_name", - "nosql_collection_qualified_name", - "query_count", - "query_user_count", - "query_user_map", - "query_count_updated_at", - "database_name", - "database_qualified_name", - "schema_name", - "schema_qualified_name", - "table_name", - "table_qualified_name", - "view_name", - "view_qualified_name", - "calculation_view_name", - "calculation_view_qualified_name", - "is_profiled", - "last_profiled_at", - "asset_application_qualified_name", - "custom_source_id", - "custom_dataset_name", - "custom_dataset_qualified_name", - "custom_table", - ] - - @property - def custom_temperature(self) -> Optional[CustomTemperatureType]: - return None if self.attributes is None else self.attributes.custom_temperature - - @custom_temperature.setter - def custom_temperature(self, custom_temperature: Optional[CustomTemperatureType]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.custom_temperature = custom_temperature - - @property - def data_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.data_type - - @data_type.setter - def data_type(self, data_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.data_type = data_type - - @property - def sub_data_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.sub_data_type - - @sub_data_type.setter - def sub_data_type(self, sub_data_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.sub_data_type = sub_data_type - - @property - def raw_data_type_definition(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.raw_data_type_definition - ) - - @raw_data_type_definition.setter - def raw_data_type_definition(self, raw_data_type_definition: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.raw_data_type_definition = raw_data_type_definition - - @property - def order(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.order - - @order.setter - def order(self, order: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.order = order - - @property - def nested_column_order(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.nested_column_order - - @nested_column_order.setter - def nested_column_order(self, nested_column_order: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.nested_column_order = nested_column_order - - @property - def nested_column_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.nested_column_count - - @nested_column_count.setter - def nested_column_count(self, nested_column_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.nested_column_count = nested_column_count - - @property - def column_hierarchy(self) -> Optional[List[Dict[str, str]]]: - return None if self.attributes is None else self.attributes.column_hierarchy - - @column_hierarchy.setter - def column_hierarchy(self, column_hierarchy: Optional[List[Dict[str, str]]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_hierarchy = column_hierarchy - - @property - def is_partition(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_partition - - @is_partition.setter - def is_partition(self, is_partition: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_partition = is_partition - - @property - def partition_order(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.partition_order - - @partition_order.setter - def partition_order(self, partition_order: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.partition_order = partition_order - - @property - def is_clustered(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_clustered - - @is_clustered.setter - def is_clustered(self, is_clustered: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_clustered = is_clustered - - @property - def is_primary(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_primary - - @is_primary.setter - def is_primary(self, is_primary: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_primary = is_primary - - @property - def is_foreign(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_foreign - - @is_foreign.setter - def is_foreign(self, is_foreign: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_foreign = is_foreign - - @property - def is_indexed(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_indexed - - @is_indexed.setter - def is_indexed(self, is_indexed: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_indexed = is_indexed - - @property - def is_sort(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_sort - - @is_sort.setter - def is_sort(self, is_sort: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_sort = is_sort - - @property - def is_dist(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_dist - - @is_dist.setter - def is_dist(self, is_dist: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_dist = is_dist - - @property - def is_pinned(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_pinned - - @is_pinned.setter - def is_pinned(self, is_pinned: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_pinned = is_pinned - - @property - def pinned_by(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.pinned_by - - @pinned_by.setter - def pinned_by(self, pinned_by: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.pinned_by = pinned_by - - @property - def pinned_at(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.pinned_at - - @pinned_at.setter - def pinned_at(self, pinned_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.pinned_at = pinned_at - - @property - def precision(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.precision - - @precision.setter - def precision(self, precision: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.precision = precision - - @property - def default_value(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.default_value - - @default_value.setter - def default_value(self, default_value: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.default_value = default_value - - @property - def is_nullable(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_nullable - - @is_nullable.setter - def is_nullable(self, is_nullable: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_nullable = is_nullable - - @property - def numeric_scale(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.numeric_scale - - @numeric_scale.setter - def numeric_scale(self, numeric_scale: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.numeric_scale = numeric_scale - - @property - def max_length(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.max_length - - @max_length.setter - def max_length(self, max_length: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.max_length = max_length - - @property - def validations(self) -> Optional[Dict[str, str]]: - return None if self.attributes is None else self.attributes.validations - - @validations.setter - def validations(self, validations: Optional[Dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.validations = validations - - @property - def parent_column_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.parent_column_qualified_name - ) - - @parent_column_qualified_name.setter - def parent_column_qualified_name(self, parent_column_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.parent_column_qualified_name = parent_column_qualified_name - - @property - def parent_column_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.parent_column_name - - @parent_column_name.setter - def parent_column_name(self, parent_column_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.parent_column_name = parent_column_name - - @property - def column_distinct_values_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_distinct_values_count - ) - - @column_distinct_values_count.setter - def column_distinct_values_count(self, column_distinct_values_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_distinct_values_count = column_distinct_values_count - - @property - def column_distinct_values_count_long(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_distinct_values_count_long - ) - - @column_distinct_values_count_long.setter - def column_distinct_values_count_long( - self, column_distinct_values_count_long: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_distinct_values_count_long = ( - column_distinct_values_count_long - ) - - @property - def column_histogram(self) -> Optional[Histogram]: - return None if self.attributes is None else self.attributes.column_histogram - - @column_histogram.setter - def column_histogram(self, column_histogram: Optional[Histogram]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_histogram = column_histogram - - @property - def column_max(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.column_max - - @column_max.setter - def column_max(self, column_max: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_max = column_max - - @property - def column_min(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.column_min - - @column_min.setter - def column_min(self, column_min: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_min = column_min - - @property - def column_mean(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.column_mean - - @column_mean.setter - def column_mean(self, column_mean: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_mean = column_mean - - @property - def column_sum(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.column_sum - - @column_sum.setter - def column_sum(self, column_sum: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_sum = column_sum - - @property - def column_median(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.column_median - - @column_median.setter - def column_median(self, column_median: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_median = column_median - - @property - def column_standard_deviation(self) -> Optional[float]: - return ( - None - if self.attributes is None - else self.attributes.column_standard_deviation - ) - - @column_standard_deviation.setter - def column_standard_deviation(self, column_standard_deviation: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_standard_deviation = column_standard_deviation - - @property - def column_unique_values_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_unique_values_count - ) - - @column_unique_values_count.setter - def column_unique_values_count(self, column_unique_values_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_unique_values_count = column_unique_values_count - - @property - def column_unique_values_count_long(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_unique_values_count_long - ) - - @column_unique_values_count_long.setter - def column_unique_values_count_long( - self, column_unique_values_count_long: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_unique_values_count_long = ( - column_unique_values_count_long - ) - - @property - def column_average(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.column_average - - @column_average.setter - def column_average(self, column_average: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_average = column_average - - @property - def column_average_length(self) -> Optional[float]: - return ( - None if self.attributes is None else self.attributes.column_average_length - ) - - @column_average_length.setter - def column_average_length(self, column_average_length: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_average_length = column_average_length - - @property - def column_duplicate_values_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_duplicate_values_count - ) - - @column_duplicate_values_count.setter - def column_duplicate_values_count( - self, column_duplicate_values_count: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_duplicate_values_count = column_duplicate_values_count - - @property - def column_duplicate_values_count_long(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_duplicate_values_count_long - ) - - @column_duplicate_values_count_long.setter - def column_duplicate_values_count_long( - self, column_duplicate_values_count_long: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_duplicate_values_count_long = ( - column_duplicate_values_count_long - ) - - @property - def column_maximum_string_length(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_maximum_string_length - ) - - @column_maximum_string_length.setter - def column_maximum_string_length(self, column_maximum_string_length: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_maximum_string_length = column_maximum_string_length - - @property - def column_maxs(self) -> Optional[Set[str]]: - return None if self.attributes is None else self.attributes.column_maxs - - @column_maxs.setter - def column_maxs(self, column_maxs: Optional[Set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_maxs = column_maxs - - @property - def column_minimum_string_length(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_minimum_string_length - ) - - @column_minimum_string_length.setter - def column_minimum_string_length(self, column_minimum_string_length: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_minimum_string_length = column_minimum_string_length - - @property - def column_mins(self) -> Optional[Set[str]]: - return None if self.attributes is None else self.attributes.column_mins - - @column_mins.setter - def column_mins(self, column_mins: Optional[Set[str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_mins = column_mins - - @property - def column_missing_values_count(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_missing_values_count - ) - - @column_missing_values_count.setter - def column_missing_values_count(self, column_missing_values_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_missing_values_count = column_missing_values_count - - @property - def column_missing_values_count_long(self) -> Optional[int]: - return ( - None - if self.attributes is None - else self.attributes.column_missing_values_count_long - ) - - @column_missing_values_count_long.setter - def column_missing_values_count_long( - self, column_missing_values_count_long: Optional[int] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_missing_values_count_long = ( - column_missing_values_count_long - ) - - @property - def column_missing_values_percentage(self) -> Optional[float]: - return ( - None - if self.attributes is None - else self.attributes.column_missing_values_percentage - ) - - @column_missing_values_percentage.setter - def column_missing_values_percentage( - self, column_missing_values_percentage: Optional[float] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_missing_values_percentage = ( - column_missing_values_percentage - ) - - @property - def column_uniqueness_percentage(self) -> Optional[float]: - return ( - None - if self.attributes is None - else self.attributes.column_uniqueness_percentage - ) - - @column_uniqueness_percentage.setter - def column_uniqueness_percentage( - self, column_uniqueness_percentage: Optional[float] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_uniqueness_percentage = column_uniqueness_percentage - - @property - def column_variance(self) -> Optional[float]: - return None if self.attributes is None else self.attributes.column_variance - - @column_variance.setter - def column_variance(self, column_variance: Optional[float]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_variance = column_variance - - @property - def column_top_values(self) -> Optional[List[ColumnValueFrequencyMap]]: - return None if self.attributes is None else self.attributes.column_top_values - - @column_top_values.setter - def column_top_values( - self, column_top_values: Optional[List[ColumnValueFrequencyMap]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_top_values = column_top_values - - @property - def column_depth_level(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.column_depth_level - - @column_depth_level.setter - def column_depth_level(self, column_depth_level: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_depth_level = column_depth_level - - @property - def nosql_collection_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.nosql_collection_name - ) - - @nosql_collection_name.setter - def nosql_collection_name(self, nosql_collection_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.nosql_collection_name = nosql_collection_name - - @property - def nosql_collection_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.nosql_collection_qualified_name - ) - - @nosql_collection_qualified_name.setter - def nosql_collection_qualified_name( - self, nosql_collection_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.nosql_collection_qualified_name = ( - nosql_collection_qualified_name - ) - - @property - def query_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_count - - @query_count.setter - def query_count(self, query_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_count = query_count - - @property - def query_user_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_user_count - - @query_user_count.setter - def query_user_count(self, query_user_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_user_count = query_user_count - - @property - def query_user_map(self) -> Optional[Dict[str, int]]: - return None if self.attributes is None else self.attributes.query_user_map - - @query_user_map.setter - def query_user_map(self, query_user_map: Optional[Dict[str, int]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_user_map = query_user_map - - @property - def query_count_updated_at(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.query_count_updated_at - ) - - @query_count_updated_at.setter - def query_count_updated_at(self, query_count_updated_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_count_updated_at = query_count_updated_at - - @property - def database_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.database_name - - @database_name.setter - def database_name(self, database_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.database_name = database_name - - @property - def database_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.database_qualified_name - ) - - @database_qualified_name.setter - def database_qualified_name(self, database_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.database_qualified_name = database_qualified_name - - @property - def schema_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.schema_name - - @schema_name.setter - def schema_name(self, schema_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_name = schema_name - - @property - def schema_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.schema_qualified_name - ) - - @schema_qualified_name.setter - def schema_qualified_name(self, schema_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_qualified_name = schema_qualified_name - - @property - def table_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.table_name - - @table_name.setter - def table_name(self, table_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_name = table_name - - @property - def table_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.table_qualified_name - - @table_qualified_name.setter - def table_qualified_name(self, table_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_qualified_name = table_qualified_name - - @property - def view_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.view_name - - @view_name.setter - def view_name(self, view_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.view_name = view_name - - @property - def view_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.view_qualified_name - - @view_qualified_name.setter - def view_qualified_name(self, view_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.view_qualified_name = view_qualified_name - - @property - def calculation_view_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.calculation_view_name - ) - - @calculation_view_name.setter - def calculation_view_name(self, calculation_view_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.calculation_view_name = calculation_view_name - - @property - def calculation_view_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.calculation_view_qualified_name - ) - - @calculation_view_qualified_name.setter - def calculation_view_qualified_name( - self, calculation_view_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.calculation_view_qualified_name = ( - calculation_view_qualified_name - ) - - @property - def is_profiled(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_profiled - - @is_profiled.setter - def is_profiled(self, is_profiled: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_profiled = is_profiled - - @property - def last_profiled_at(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.last_profiled_at - - @last_profiled_at.setter - def last_profiled_at(self, last_profiled_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.last_profiled_at = last_profiled_at - - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - - @property - def custom_source_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.custom_source_id - - @custom_source_id.setter - def custom_source_id(self, custom_source_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.custom_source_id = custom_source_id - - @property - def custom_dataset_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.custom_dataset_name - - @custom_dataset_name.setter - def custom_dataset_name(self, custom_dataset_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.custom_dataset_name = custom_dataset_name - - @property - def custom_dataset_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.custom_dataset_qualified_name - ) - - @custom_dataset_qualified_name.setter - def custom_dataset_qualified_name( - self, custom_dataset_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.custom_dataset_qualified_name = custom_dataset_qualified_name - - @property - def custom_table(self) -> Optional[CustomTable]: - return None if self.attributes is None else self.attributes.custom_table - - @custom_table.setter - def custom_table(self, custom_table: Optional[CustomTable]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.custom_table = custom_table - - class Attributes(Column.Attributes): - custom_temperature: Optional[CustomTemperatureType] = Field( - default=None, description="" - ) - data_type: Optional[str] = Field(default=None, description="") - sub_data_type: Optional[str] = Field(default=None, description="") - raw_data_type_definition: Optional[str] = Field(default=None, description="") - order: Optional[int] = Field(default=None, description="") - nested_column_order: Optional[str] = Field(default=None, description="") - nested_column_count: Optional[int] = Field(default=None, description="") - column_hierarchy: Optional[List[Dict[str, str]]] = Field( - default=None, description="" - ) - is_partition: Optional[bool] = Field(default=None, description="") - partition_order: Optional[int] = Field(default=None, description="") - is_clustered: Optional[bool] = Field(default=None, description="") - is_primary: Optional[bool] = Field(default=None, description="") - is_foreign: Optional[bool] = Field(default=None, description="") - is_indexed: Optional[bool] = Field(default=None, description="") - is_sort: Optional[bool] = Field(default=None, description="") - is_dist: Optional[bool] = Field(default=None, description="") - is_pinned: Optional[bool] = Field(default=None, description="") - pinned_by: Optional[str] = Field(default=None, description="") - pinned_at: Optional[datetime] = Field(default=None, description="") - precision: Optional[int] = Field(default=None, description="") - default_value: Optional[str] = Field(default=None, description="") - is_nullable: Optional[bool] = Field(default=None, description="") - numeric_scale: Optional[float] = Field(default=None, description="") - max_length: Optional[int] = Field(default=None, description="") - validations: Optional[Dict[str, str]] = Field(default=None, description="") - parent_column_qualified_name: Optional[str] = Field( - default=None, description="" - ) - parent_column_name: Optional[str] = Field(default=None, description="") - column_distinct_values_count: Optional[int] = Field( - default=None, description="" - ) - column_distinct_values_count_long: Optional[int] = Field( - default=None, description="" - ) - column_histogram: Optional[Histogram] = Field(default=None, description="") - column_max: Optional[float] = Field(default=None, description="") - column_min: Optional[float] = Field(default=None, description="") - column_mean: Optional[float] = Field(default=None, description="") - column_sum: Optional[float] = Field(default=None, description="") - column_median: Optional[float] = Field(default=None, description="") - column_standard_deviation: Optional[float] = Field(default=None, description="") - column_unique_values_count: Optional[int] = Field(default=None, description="") - column_unique_values_count_long: Optional[int] = Field( - default=None, description="" - ) - column_average: Optional[float] = Field(default=None, description="") - column_average_length: Optional[float] = Field(default=None, description="") - column_duplicate_values_count: Optional[int] = Field( - default=None, description="" - ) - column_duplicate_values_count_long: Optional[int] = Field( - default=None, description="" - ) - column_maximum_string_length: Optional[int] = Field( - default=None, description="" - ) - column_maxs: Optional[Set[str]] = Field(default=None, description="") - column_minimum_string_length: Optional[int] = Field( - default=None, description="" - ) - column_mins: Optional[Set[str]] = Field(default=None, description="") - column_missing_values_count: Optional[int] = Field(default=None, description="") - column_missing_values_count_long: Optional[int] = Field( - default=None, description="" - ) - column_missing_values_percentage: Optional[float] = Field( - default=None, description="" - ) - column_uniqueness_percentage: Optional[float] = Field( - default=None, description="" - ) - column_variance: Optional[float] = Field(default=None, description="") - column_top_values: Optional[List[ColumnValueFrequencyMap]] = Field( - default=None, description="" - ) - column_depth_level: Optional[int] = Field(default=None, description="") - nosql_collection_name: Optional[str] = Field(default=None, description="") - nosql_collection_qualified_name: Optional[str] = Field( - default=None, description="" - ) - query_count: Optional[int] = Field(default=None, description="") - query_user_count: Optional[int] = Field(default=None, description="") - query_user_map: Optional[Dict[str, int]] = Field(default=None, description="") - query_count_updated_at: Optional[datetime] = Field(default=None, description="") - database_name: Optional[str] = Field(default=None, description="") - database_qualified_name: Optional[str] = Field(default=None, description="") - schema_name: Optional[str] = Field(default=None, description="") - schema_qualified_name: Optional[str] = Field(default=None, description="") - table_name: Optional[str] = Field(default=None, description="") - table_qualified_name: Optional[str] = Field(default=None, description="") - view_name: Optional[str] = Field(default=None, description="") - view_qualified_name: Optional[str] = Field(default=None, description="") - calculation_view_name: Optional[str] = Field(default=None, description="") - calculation_view_qualified_name: Optional[str] = Field( - default=None, description="" - ) - is_profiled: Optional[bool] = Field(default=None, description="") - last_profiled_at: Optional[datetime] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) - custom_source_id: Optional[str] = Field(default=None, description="") - custom_dataset_name: Optional[str] = Field(default=None, description="") - custom_dataset_qualified_name: Optional[str] = Field( - default=None, description="" - ) - custom_table: Optional[CustomTable] = Field( - default=None, description="" - ) # relationship - - attributes: CustomField.Attributes = Field( - default_factory=lambda: CustomField.Attributes(), - description=( - "Map of attributes in the instance and their values. " - "The specific keys of this map will vary by type, " - "so are described in the sub-types of this schema." - ), - ) - - -from .custom_table import CustomTable # noqa - -CustomField.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/custom_table.py b/pyatlan/model/assets/custom_table.py deleted file mode 100644 index 3c1a6e7b2..000000000 --- a/pyatlan/model/assets/custom_table.py +++ /dev/null @@ -1,960 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2022 Atlan Pte. Ltd. - - -from __future__ import annotations - -from datetime import datetime -from typing import ClassVar, Dict, List, Optional - -from pydantic.v1 import Field, validator - -from pyatlan.model.enums import TableType -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - KeywordTextField, - NumericField, - RelationField, - TextField, -) -from pyatlan.model.structs import CustomRatings - -from .core.table import Table - - -class CustomTable(Table): - """Description""" - - type_name: str = Field(default="CustomTable", allow_mutation=False) - - @validator("type_name") - def validate_type_name(cls, v): - if v != "CustomTable": - raise ValueError("must be CustomTable") - return v - - def __setattr__(self, name, value): - if name in CustomTable._convenience_properties: - return object.__setattr__(self, name, value) - super().__setattr__(name, value) - - CUSTOM_RATINGS: ClassVar[KeywordField] = KeywordField( - "customRatings", "customRatings" - ) - """ - Ratings for the CustomTable asset from the source system. - """ - COLUMN_COUNT: ClassVar[NumericField] = NumericField("columnCount", "columnCount") - """ - Number of columns in this table. - """ - ROW_COUNT: ClassVar[NumericField] = NumericField("rowCount", "rowCount") - """ - Number of rows in this table. - """ - SIZE_BYTES: ClassVar[NumericField] = NumericField("sizeBytes", "sizeBytes") - """ - Size of this table, in bytes. - """ - ALIAS: ClassVar[KeywordField] = KeywordField("alias", "alias") - """ - Alias for this table. - """ - IS_TEMPORARY: ClassVar[BooleanField] = BooleanField("isTemporary", "isTemporary") - """ - Whether this table is temporary (true) or not (false). - """ - IS_QUERY_PREVIEW: ClassVar[BooleanField] = BooleanField( - "isQueryPreview", "isQueryPreview" - ) - """ - Whether preview queries are allowed for this table (true) or not (false). - """ - QUERY_PREVIEW_CONFIG: ClassVar[KeywordField] = KeywordField( - "queryPreviewConfig", "queryPreviewConfig" - ) - """ - Configuration for preview queries. - """ - EXTERNAL_LOCATION: ClassVar[TextField] = TextField( - "externalLocation", "externalLocation" - ) - """ - External location of this table, for example: an S3 object location. - """ - EXTERNAL_LOCATION_REGION: ClassVar[TextField] = TextField( - "externalLocationRegion", "externalLocationRegion" - ) - """ - Region of the external location of this table, for example: S3 region. - """ - EXTERNAL_LOCATION_FORMAT: ClassVar[KeywordField] = KeywordField( - "externalLocationFormat", "externalLocationFormat" - ) - """ - Format of the external location of this table, for example: JSON, CSV, PARQUET, etc. - """ - IS_PARTITIONED: ClassVar[BooleanField] = BooleanField( - "isPartitioned", "isPartitioned" - ) - """ - Whether this table is partitioned (true) or not (false). - """ - PARTITION_STRATEGY: ClassVar[KeywordField] = KeywordField( - "partitionStrategy", "partitionStrategy" - ) - """ - Partition strategy for this table. - """ - PARTITION_COUNT: ClassVar[NumericField] = NumericField( - "partitionCount", "partitionCount" - ) - """ - Number of partitions in this table. - """ - PARTITION_LIST: ClassVar[TextField] = TextField("partitionList", "partitionList") - """ - List of partitions in this table. - """ - IS_SHARDED: ClassVar[BooleanField] = BooleanField("isSharded", "isSharded") - """ - Whether this table is a sharded table (true) or not (false). - """ - TABLE_TYPE: ClassVar[KeywordField] = KeywordField("tableType", "tableType") - """ - Type of the table. - """ - ICEBERG_CATALOG_NAME: ClassVar[KeywordField] = KeywordField( - "icebergCatalogName", "icebergCatalogName" - ) - """ - iceberg table catalog name (can be any user defined name) - """ - ICEBERG_TABLE_TYPE: ClassVar[KeywordField] = KeywordField( - "icebergTableType", "icebergTableType" - ) - """ - iceberg table type (managed vs unmanaged) - """ - ICEBERG_CATALOG_SOURCE: ClassVar[KeywordField] = KeywordField( - "icebergCatalogSource", "icebergCatalogSource" - ) - """ - iceberg table catalog type (glue, polaris, snowflake) - """ - ICEBERG_CATALOG_TABLE_NAME: ClassVar[KeywordField] = KeywordField( - "icebergCatalogTableName", "icebergCatalogTableName" - ) - """ - catalog table name (actual table name on the catalog side). - """ - ICEBERG_CATALOG_TABLE_NAMESPACE: ClassVar[KeywordField] = KeywordField( - "icebergCatalogTableNamespace", "icebergCatalogTableNamespace" - ) - """ - catalog table namespace (actual database name on the catalog side). - """ - TABLE_EXTERNAL_VOLUME_NAME: ClassVar[KeywordField] = KeywordField( - "tableExternalVolumeName", "tableExternalVolumeName" - ) - """ - external volume name for the table. - """ - ICEBERG_TABLE_BASE_LOCATION: ClassVar[KeywordField] = KeywordField( - "icebergTableBaseLocation", "icebergTableBaseLocation" - ) - """ - iceberg table base location inside the external volume. - """ - TABLE_RETENTION_TIME: ClassVar[NumericField] = NumericField( - "tableRetentionTime", "tableRetentionTime" - ) - """ - Data retention time in days. - """ - QUERY_COUNT: ClassVar[NumericField] = NumericField("queryCount", "queryCount") - """ - Number of times this asset has been queried. - """ - QUERY_USER_COUNT: ClassVar[NumericField] = NumericField( - "queryUserCount", "queryUserCount" - ) - """ - Number of unique users who have queried this asset. - """ - QUERY_USER_MAP: ClassVar[KeywordField] = KeywordField( - "queryUserMap", "queryUserMap" - ) - """ - Map of unique users who have queried this asset to the number of times they have queried it. - """ - QUERY_COUNT_UPDATED_AT: ClassVar[NumericField] = NumericField( - "queryCountUpdatedAt", "queryCountUpdatedAt" - ) - """ - Time (epoch) at which the query count was last updated, in milliseconds. - """ - DATABASE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "databaseName", "databaseName.keyword", "databaseName" - ) - """ - Simple name of the database in which this SQL asset exists, or empty if it does not exist within a database. - """ - DATABASE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "databaseQualifiedName", "databaseQualifiedName" - ) - """ - Unique name of the database in which this SQL asset exists, or empty if it does not exist within a database. - """ - SCHEMA_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "schemaName", "schemaName.keyword", "schemaName" - ) - """ - Simple name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. - """ - SCHEMA_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "schemaQualifiedName", "schemaQualifiedName" - ) - """ - Unique name of the schema in which this SQL asset exists, or empty if it does not exist within a schema. - """ - TABLE_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "tableName", "tableName.keyword", "tableName" - ) - """ - Simple name of the table in which this SQL asset exists, or empty if it does not exist within a table. - """ - TABLE_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "tableQualifiedName", "tableQualifiedName" - ) - """ - Unique name of the table in which this SQL asset exists, or empty if it does not exist within a table. - """ - VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "viewName", "viewName.keyword", "viewName" - ) - """ - Simple name of the view in which this SQL asset exists, or empty if it does not exist within a view. - """ - VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "viewQualifiedName", "viewQualifiedName" - ) - """ - Unique name of the view in which this SQL asset exists, or empty if it does not exist within a view. - """ - CALCULATION_VIEW_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "calculationViewName", "calculationViewName.keyword", "calculationViewName" - ) - """ - Simple name of the calculation view in which this SQL asset exists, or empty if it does not exist within a calculation view. - """ # noqa: E501 - CALCULATION_VIEW_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "calculationViewQualifiedName", "calculationViewQualifiedName" - ) - """ - Unique name of the calculation view in which this SQL asset exists, or empty if it does not exist within a calculation view. - """ # noqa: E501 - IS_PROFILED: ClassVar[BooleanField] = BooleanField("isProfiled", "isProfiled") - """ - Whether this asset has been profiled (true) or not (false). - """ - LAST_PROFILED_AT: ClassVar[NumericField] = NumericField( - "lastProfiledAt", "lastProfiledAt" - ) - """ - Time (epoch) at which this asset was last profiled, in milliseconds. - """ - ASSET_APPLICATION_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "assetApplicationQualifiedName", "assetApplicationQualifiedName" - ) - """ - Qualified name of the Application Container that contains this asset. - """ - CUSTOM_SOURCE_ID: ClassVar[KeywordField] = KeywordField( - "customSourceId", "customSourceId" - ) - """ - Unique identifier for the Custom asset from the source system. - """ - CUSTOM_DATASET_NAME: ClassVar[KeywordTextField] = KeywordTextField( - "customDatasetName", "customDatasetName.keyword", "customDatasetName" - ) - """ - Simple name of the dataset in which this asset exists, or empty if it is itself a dataset. - """ - CUSTOM_DATASET_QUALIFIED_NAME: ClassVar[KeywordField] = KeywordField( - "customDatasetQualifiedName", "customDatasetQualifiedName" - ) - """ - Unique name of the dataset in which this asset exists, or empty if it is itself a dataset. - """ - - CUSTOM_FIELDS: ClassVar[RelationField] = RelationField("customFields") - """ - TBC - """ - CUSTOM_DATASET: ClassVar[RelationField] = RelationField("customDataset") - """ - TBC - """ - - _convenience_properties: ClassVar[List[str]] = [ - "custom_ratings", - "column_count", - "row_count", - "size_bytes", - "alias", - "is_temporary", - "is_query_preview", - "query_preview_config", - "external_location", - "external_location_region", - "external_location_format", - "is_partitioned", - "partition_strategy", - "partition_count", - "partition_list", - "is_sharded", - "table_type", - "iceberg_catalog_name", - "iceberg_table_type", - "iceberg_catalog_source", - "iceberg_catalog_table_name", - "iceberg_catalog_table_namespace", - "table_external_volume_name", - "iceberg_table_base_location", - "table_retention_time", - "query_count", - "query_user_count", - "query_user_map", - "query_count_updated_at", - "database_name", - "database_qualified_name", - "schema_name", - "schema_qualified_name", - "table_name", - "table_qualified_name", - "view_name", - "view_qualified_name", - "calculation_view_name", - "calculation_view_qualified_name", - "is_profiled", - "last_profiled_at", - "asset_application_qualified_name", - "custom_source_id", - "custom_dataset_name", - "custom_dataset_qualified_name", - "custom_fields", - "custom_dataset", - ] - - @property - def custom_ratings(self) -> Optional[List[CustomRatings]]: - return None if self.attributes is None else self.attributes.custom_ratings - - @custom_ratings.setter - def custom_ratings(self, custom_ratings: Optional[List[CustomRatings]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.custom_ratings = custom_ratings - - @property - def column_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.column_count - - @column_count.setter - def column_count(self, column_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.column_count = column_count - - @property - def row_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.row_count - - @row_count.setter - def row_count(self, row_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.row_count = row_count - - @property - def size_bytes(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.size_bytes - - @size_bytes.setter - def size_bytes(self, size_bytes: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.size_bytes = size_bytes - - @property - def alias(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.alias - - @alias.setter - def alias(self, alias: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.alias = alias - - @property - def is_temporary(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_temporary - - @is_temporary.setter - def is_temporary(self, is_temporary: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_temporary = is_temporary - - @property - def is_query_preview(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_query_preview - - @is_query_preview.setter - def is_query_preview(self, is_query_preview: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_query_preview = is_query_preview - - @property - def query_preview_config(self) -> Optional[Dict[str, str]]: - return None if self.attributes is None else self.attributes.query_preview_config - - @query_preview_config.setter - def query_preview_config(self, query_preview_config: Optional[Dict[str, str]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_preview_config = query_preview_config - - @property - def external_location(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.external_location - - @external_location.setter - def external_location(self, external_location: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.external_location = external_location - - @property - def external_location_region(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.external_location_region - ) - - @external_location_region.setter - def external_location_region(self, external_location_region: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.external_location_region = external_location_region - - @property - def external_location_format(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.external_location_format - ) - - @external_location_format.setter - def external_location_format(self, external_location_format: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.external_location_format = external_location_format - - @property - def is_partitioned(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_partitioned - - @is_partitioned.setter - def is_partitioned(self, is_partitioned: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_partitioned = is_partitioned - - @property - def partition_strategy(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.partition_strategy - - @partition_strategy.setter - def partition_strategy(self, partition_strategy: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.partition_strategy = partition_strategy - - @property - def partition_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.partition_count - - @partition_count.setter - def partition_count(self, partition_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.partition_count = partition_count - - @property - def partition_list(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.partition_list - - @partition_list.setter - def partition_list(self, partition_list: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.partition_list = partition_list - - @property - def is_sharded(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_sharded - - @is_sharded.setter - def is_sharded(self, is_sharded: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_sharded = is_sharded - - @property - def table_type(self) -> Optional[TableType]: - return None if self.attributes is None else self.attributes.table_type - - @table_type.setter - def table_type(self, table_type: Optional[TableType]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_type = table_type - - @property - def iceberg_catalog_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.iceberg_catalog_name - - @iceberg_catalog_name.setter - def iceberg_catalog_name(self, iceberg_catalog_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.iceberg_catalog_name = iceberg_catalog_name - - @property - def iceberg_table_type(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.iceberg_table_type - - @iceberg_table_type.setter - def iceberg_table_type(self, iceberg_table_type: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.iceberg_table_type = iceberg_table_type - - @property - def iceberg_catalog_source(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.iceberg_catalog_source - ) - - @iceberg_catalog_source.setter - def iceberg_catalog_source(self, iceberg_catalog_source: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.iceberg_catalog_source = iceberg_catalog_source - - @property - def iceberg_catalog_table_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.iceberg_catalog_table_name - ) - - @iceberg_catalog_table_name.setter - def iceberg_catalog_table_name(self, iceberg_catalog_table_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.iceberg_catalog_table_name = iceberg_catalog_table_name - - @property - def iceberg_catalog_table_namespace(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.iceberg_catalog_table_namespace - ) - - @iceberg_catalog_table_namespace.setter - def iceberg_catalog_table_namespace( - self, iceberg_catalog_table_namespace: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.iceberg_catalog_table_namespace = ( - iceberg_catalog_table_namespace - ) - - @property - def table_external_volume_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.table_external_volume_name - ) - - @table_external_volume_name.setter - def table_external_volume_name(self, table_external_volume_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_external_volume_name = table_external_volume_name - - @property - def iceberg_table_base_location(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.iceberg_table_base_location - ) - - @iceberg_table_base_location.setter - def iceberg_table_base_location(self, iceberg_table_base_location: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.iceberg_table_base_location = iceberg_table_base_location - - @property - def table_retention_time(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.table_retention_time - - @table_retention_time.setter - def table_retention_time(self, table_retention_time: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_retention_time = table_retention_time - - @property - def query_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_count - - @query_count.setter - def query_count(self, query_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_count = query_count - - @property - def query_user_count(self) -> Optional[int]: - return None if self.attributes is None else self.attributes.query_user_count - - @query_user_count.setter - def query_user_count(self, query_user_count: Optional[int]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_user_count = query_user_count - - @property - def query_user_map(self) -> Optional[Dict[str, int]]: - return None if self.attributes is None else self.attributes.query_user_map - - @query_user_map.setter - def query_user_map(self, query_user_map: Optional[Dict[str, int]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_user_map = query_user_map - - @property - def query_count_updated_at(self) -> Optional[datetime]: - return ( - None if self.attributes is None else self.attributes.query_count_updated_at - ) - - @query_count_updated_at.setter - def query_count_updated_at(self, query_count_updated_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.query_count_updated_at = query_count_updated_at - - @property - def database_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.database_name - - @database_name.setter - def database_name(self, database_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.database_name = database_name - - @property - def database_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.database_qualified_name - ) - - @database_qualified_name.setter - def database_qualified_name(self, database_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.database_qualified_name = database_qualified_name - - @property - def schema_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.schema_name - - @schema_name.setter - def schema_name(self, schema_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_name = schema_name - - @property - def schema_qualified_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.schema_qualified_name - ) - - @schema_qualified_name.setter - def schema_qualified_name(self, schema_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.schema_qualified_name = schema_qualified_name - - @property - def table_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.table_name - - @table_name.setter - def table_name(self, table_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_name = table_name - - @property - def table_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.table_qualified_name - - @table_qualified_name.setter - def table_qualified_name(self, table_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.table_qualified_name = table_qualified_name - - @property - def view_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.view_name - - @view_name.setter - def view_name(self, view_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.view_name = view_name - - @property - def view_qualified_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.view_qualified_name - - @view_qualified_name.setter - def view_qualified_name(self, view_qualified_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.view_qualified_name = view_qualified_name - - @property - def calculation_view_name(self) -> Optional[str]: - return ( - None if self.attributes is None else self.attributes.calculation_view_name - ) - - @calculation_view_name.setter - def calculation_view_name(self, calculation_view_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.calculation_view_name = calculation_view_name - - @property - def calculation_view_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.calculation_view_qualified_name - ) - - @calculation_view_qualified_name.setter - def calculation_view_qualified_name( - self, calculation_view_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.calculation_view_qualified_name = ( - calculation_view_qualified_name - ) - - @property - def is_profiled(self) -> Optional[bool]: - return None if self.attributes is None else self.attributes.is_profiled - - @is_profiled.setter - def is_profiled(self, is_profiled: Optional[bool]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.is_profiled = is_profiled - - @property - def last_profiled_at(self) -> Optional[datetime]: - return None if self.attributes is None else self.attributes.last_profiled_at - - @last_profiled_at.setter - def last_profiled_at(self, last_profiled_at: Optional[datetime]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.last_profiled_at = last_profiled_at - - @property - def asset_application_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.asset_application_qualified_name - ) - - @asset_application_qualified_name.setter - def asset_application_qualified_name( - self, asset_application_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.asset_application_qualified_name = ( - asset_application_qualified_name - ) - - @property - def custom_source_id(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.custom_source_id - - @custom_source_id.setter - def custom_source_id(self, custom_source_id: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.custom_source_id = custom_source_id - - @property - def custom_dataset_name(self) -> Optional[str]: - return None if self.attributes is None else self.attributes.custom_dataset_name - - @custom_dataset_name.setter - def custom_dataset_name(self, custom_dataset_name: Optional[str]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.custom_dataset_name = custom_dataset_name - - @property - def custom_dataset_qualified_name(self) -> Optional[str]: - return ( - None - if self.attributes is None - else self.attributes.custom_dataset_qualified_name - ) - - @custom_dataset_qualified_name.setter - def custom_dataset_qualified_name( - self, custom_dataset_qualified_name: Optional[str] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.custom_dataset_qualified_name = custom_dataset_qualified_name - - @property - def custom_fields(self) -> Optional[List[CustomField]]: - return None if self.attributes is None else self.attributes.custom_fields - - @custom_fields.setter - def custom_fields(self, custom_fields: Optional[List[CustomField]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.custom_fields = custom_fields - - @property - def custom_dataset(self) -> Optional[CustomDataset]: - return None if self.attributes is None else self.attributes.custom_dataset - - @custom_dataset.setter - def custom_dataset(self, custom_dataset: Optional[CustomDataset]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.custom_dataset = custom_dataset - - class Attributes(Table.Attributes): - custom_ratings: Optional[List[CustomRatings]] = Field( - default=None, description="" - ) - column_count: Optional[int] = Field(default=None, description="") - row_count: Optional[int] = Field(default=None, description="") - size_bytes: Optional[int] = Field(default=None, description="") - alias: Optional[str] = Field(default=None, description="") - is_temporary: Optional[bool] = Field(default=None, description="") - is_query_preview: Optional[bool] = Field(default=None, description="") - query_preview_config: Optional[Dict[str, str]] = Field( - default=None, description="" - ) - external_location: Optional[str] = Field(default=None, description="") - external_location_region: Optional[str] = Field(default=None, description="") - external_location_format: Optional[str] = Field(default=None, description="") - is_partitioned: Optional[bool] = Field(default=None, description="") - partition_strategy: Optional[str] = Field(default=None, description="") - partition_count: Optional[int] = Field(default=None, description="") - partition_list: Optional[str] = Field(default=None, description="") - is_sharded: Optional[bool] = Field(default=None, description="") - table_type: Optional[TableType] = Field(default=None, description="") - iceberg_catalog_name: Optional[str] = Field(default=None, description="") - iceberg_table_type: Optional[str] = Field(default=None, description="") - iceberg_catalog_source: Optional[str] = Field(default=None, description="") - iceberg_catalog_table_name: Optional[str] = Field(default=None, description="") - iceberg_catalog_table_namespace: Optional[str] = Field( - default=None, description="" - ) - table_external_volume_name: Optional[str] = Field(default=None, description="") - iceberg_table_base_location: Optional[str] = Field(default=None, description="") - table_retention_time: Optional[int] = Field(default=None, description="") - query_count: Optional[int] = Field(default=None, description="") - query_user_count: Optional[int] = Field(default=None, description="") - query_user_map: Optional[Dict[str, int]] = Field(default=None, description="") - query_count_updated_at: Optional[datetime] = Field(default=None, description="") - database_name: Optional[str] = Field(default=None, description="") - database_qualified_name: Optional[str] = Field(default=None, description="") - schema_name: Optional[str] = Field(default=None, description="") - schema_qualified_name: Optional[str] = Field(default=None, description="") - table_name: Optional[str] = Field(default=None, description="") - table_qualified_name: Optional[str] = Field(default=None, description="") - view_name: Optional[str] = Field(default=None, description="") - view_qualified_name: Optional[str] = Field(default=None, description="") - calculation_view_name: Optional[str] = Field(default=None, description="") - calculation_view_qualified_name: Optional[str] = Field( - default=None, description="" - ) - is_profiled: Optional[bool] = Field(default=None, description="") - last_profiled_at: Optional[datetime] = Field(default=None, description="") - asset_application_qualified_name: Optional[str] = Field( - default=None, description="" - ) - custom_source_id: Optional[str] = Field(default=None, description="") - custom_dataset_name: Optional[str] = Field(default=None, description="") - custom_dataset_qualified_name: Optional[str] = Field( - default=None, description="" - ) - custom_fields: Optional[List[CustomField]] = Field( - default=None, description="" - ) # relationship - custom_dataset: Optional[CustomDataset] = Field( - default=None, description="" - ) # relationship - - attributes: CustomTable.Attributes = Field( - default_factory=lambda: CustomTable.Attributes(), - description=( - "Map of attributes in the instance and their values. " - "The specific keys of this map will vary by type, " - "so are described in the sub-types of this schema." - ), - ) - - -from .custom_dataset import CustomDataset # noqa -from .custom_field import CustomField # noqa - -CustomTable.Attributes.update_forward_refs() diff --git a/pyatlan/model/assets/data_studio.py b/pyatlan/model/assets/data_studio.py index 23101fd77..225217b17 100644 --- a/pyatlan/model/assets/data_studio.py +++ b/pyatlan/model/assets/data_studio.py @@ -80,17 +80,29 @@ def __setattr__(self, name, value): List of tags that have been applied to the asset in Google. """ + INPUT_TO_SPARK_JOBS: ClassVar[RelationField] = RelationField("inputToSparkJobs") + """ + TBC + """ + INPUT_TO_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( + "inputToAirflowTasks" + ) + """ + TBC + """ INPUT_TO_PROCESSES: ClassVar[RelationField] = RelationField("inputToProcesses") """ TBC """ - OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( - "outputFromAirflowTasks" + MODEL_IMPLEMENTED_ATTRIBUTES: ClassVar[RelationField] = RelationField( + "modelImplementedAttributes" ) """ TBC """ - INPUT_TO_SPARK_JOBS: ClassVar[RelationField] = RelationField("inputToSparkJobs") + OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( + "outputFromAirflowTasks" + ) """ TBC """ @@ -106,12 +118,6 @@ def __setattr__(self, name, value): """ TBC """ - INPUT_TO_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( - "inputToAirflowTasks" - ) - """ - TBC - """ OUTPUT_FROM_PROCESSES: ClassVar[RelationField] = RelationField( "outputFromProcesses" ) @@ -128,12 +134,13 @@ def __setattr__(self, name, value): "google_location_type", "google_labels", "google_tags", + "input_to_spark_jobs", + "input_to_airflow_tasks", "input_to_processes", + "model_implemented_attributes", "output_from_airflow_tasks", - "input_to_spark_jobs", "output_from_spark_jobs", "model_implemented_entities", - "input_to_airflow_tasks", "output_from_processes", ] @@ -219,6 +226,30 @@ def google_tags(self, google_tags: Optional[List[GoogleTag]]): self.attributes = self.Attributes() self.attributes.google_tags = google_tags + @property + def input_to_spark_jobs(self) -> Optional[List[SparkJob]]: + return None if self.attributes is None else self.attributes.input_to_spark_jobs + + @input_to_spark_jobs.setter + def input_to_spark_jobs(self, input_to_spark_jobs: Optional[List[SparkJob]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.input_to_spark_jobs = input_to_spark_jobs + + @property + def input_to_airflow_tasks(self) -> Optional[List[AirflowTask]]: + return ( + None if self.attributes is None else self.attributes.input_to_airflow_tasks + ) + + @input_to_airflow_tasks.setter + def input_to_airflow_tasks( + self, input_to_airflow_tasks: Optional[List[AirflowTask]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.input_to_airflow_tasks = input_to_airflow_tasks + @property def input_to_processes(self) -> Optional[List[Process]]: return None if self.attributes is None else self.attributes.input_to_processes @@ -229,6 +260,22 @@ def input_to_processes(self, input_to_processes: Optional[List[Process]]): self.attributes = self.Attributes() self.attributes.input_to_processes = input_to_processes + @property + def model_implemented_attributes(self) -> Optional[List[ModelAttribute]]: + return ( + None + if self.attributes is None + else self.attributes.model_implemented_attributes + ) + + @model_implemented_attributes.setter + def model_implemented_attributes( + self, model_implemented_attributes: Optional[List[ModelAttribute]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model_implemented_attributes = model_implemented_attributes + @property def output_from_airflow_tasks(self) -> Optional[List[AirflowTask]]: return ( @@ -245,16 +292,6 @@ def output_from_airflow_tasks( self.attributes = self.Attributes() self.attributes.output_from_airflow_tasks = output_from_airflow_tasks - @property - def input_to_spark_jobs(self) -> Optional[List[SparkJob]]: - return None if self.attributes is None else self.attributes.input_to_spark_jobs - - @input_to_spark_jobs.setter - def input_to_spark_jobs(self, input_to_spark_jobs: Optional[List[SparkJob]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.input_to_spark_jobs = input_to_spark_jobs - @property def output_from_spark_jobs(self) -> Optional[List[SparkJob]]: return ( @@ -283,20 +320,6 @@ def model_implemented_entities( self.attributes = self.Attributes() self.attributes.model_implemented_entities = model_implemented_entities - @property - def input_to_airflow_tasks(self) -> Optional[List[AirflowTask]]: - return ( - None if self.attributes is None else self.attributes.input_to_airflow_tasks - ) - - @input_to_airflow_tasks.setter - def input_to_airflow_tasks( - self, input_to_airflow_tasks: Optional[List[AirflowTask]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.input_to_airflow_tasks = input_to_airflow_tasks - @property def output_from_processes(self) -> Optional[List[Process]]: return ( @@ -318,13 +341,19 @@ class Attributes(Google.Attributes): google_location_type: Optional[str] = Field(default=None, description="") google_labels: Optional[List[GoogleLabel]] = Field(default=None, description="") google_tags: Optional[List[GoogleTag]] = Field(default=None, description="") + input_to_spark_jobs: Optional[List[SparkJob]] = Field( + default=None, description="" + ) # relationship + input_to_airflow_tasks: Optional[List[AirflowTask]] = Field( + default=None, description="" + ) # relationship input_to_processes: Optional[List[Process]] = Field( default=None, description="" ) # relationship - output_from_airflow_tasks: Optional[List[AirflowTask]] = Field( + model_implemented_attributes: Optional[List[ModelAttribute]] = Field( default=None, description="" ) # relationship - input_to_spark_jobs: Optional[List[SparkJob]] = Field( + output_from_airflow_tasks: Optional[List[AirflowTask]] = Field( default=None, description="" ) # relationship output_from_spark_jobs: Optional[List[SparkJob]] = Field( @@ -333,9 +362,6 @@ class Attributes(Google.Attributes): model_implemented_entities: Optional[List[ModelEntity]] = Field( default=None, description="" ) # relationship - input_to_airflow_tasks: Optional[List[AirflowTask]] = Field( - default=None, description="" - ) # relationship output_from_processes: Optional[List[Process]] = Field( default=None, description="" ) # relationship @@ -351,6 +377,7 @@ class Attributes(Google.Attributes): from .core.airflow_task import AirflowTask # noqa +from .core.model_attribute import ModelAttribute # noqa from .core.model_entity import ModelEntity # noqa from .core.process import Process # noqa from .core.spark_job import SparkJob # noqa diff --git a/pyatlan/model/assets/g_c_s.py b/pyatlan/model/assets/g_c_s.py index 6d96962c7..f5c4775a6 100644 --- a/pyatlan/model/assets/g_c_s.py +++ b/pyatlan/model/assets/g_c_s.py @@ -115,17 +115,29 @@ def __setattr__(self, name, value): List of tags that have been applied to the asset in Google. """ + INPUT_TO_SPARK_JOBS: ClassVar[RelationField] = RelationField("inputToSparkJobs") + """ + TBC + """ + INPUT_TO_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( + "inputToAirflowTasks" + ) + """ + TBC + """ INPUT_TO_PROCESSES: ClassVar[RelationField] = RelationField("inputToProcesses") """ TBC """ - OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( - "outputFromAirflowTasks" + MODEL_IMPLEMENTED_ATTRIBUTES: ClassVar[RelationField] = RelationField( + "modelImplementedAttributes" ) """ TBC """ - INPUT_TO_SPARK_JOBS: ClassVar[RelationField] = RelationField("inputToSparkJobs") + OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( + "outputFromAirflowTasks" + ) """ TBC """ @@ -141,12 +153,6 @@ def __setattr__(self, name, value): """ TBC """ - INPUT_TO_AIRFLOW_TASKS: ClassVar[RelationField] = RelationField( - "inputToAirflowTasks" - ) - """ - TBC - """ OUTPUT_FROM_PROCESSES: ClassVar[RelationField] = RelationField( "outputFromProcesses" ) @@ -169,12 +175,13 @@ def __setattr__(self, name, value): "google_location_type", "google_labels", "google_tags", + "input_to_spark_jobs", + "input_to_airflow_tasks", "input_to_processes", + "model_implemented_attributes", "output_from_airflow_tasks", - "input_to_spark_jobs", "output_from_spark_jobs", "model_implemented_entities", - "input_to_airflow_tasks", "output_from_processes", ] @@ -322,6 +329,30 @@ def google_tags(self, google_tags: Optional[List[GoogleTag]]): self.attributes = self.Attributes() self.attributes.google_tags = google_tags + @property + def input_to_spark_jobs(self) -> Optional[List[SparkJob]]: + return None if self.attributes is None else self.attributes.input_to_spark_jobs + + @input_to_spark_jobs.setter + def input_to_spark_jobs(self, input_to_spark_jobs: Optional[List[SparkJob]]): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.input_to_spark_jobs = input_to_spark_jobs + + @property + def input_to_airflow_tasks(self) -> Optional[List[AirflowTask]]: + return ( + None if self.attributes is None else self.attributes.input_to_airflow_tasks + ) + + @input_to_airflow_tasks.setter + def input_to_airflow_tasks( + self, input_to_airflow_tasks: Optional[List[AirflowTask]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.input_to_airflow_tasks = input_to_airflow_tasks + @property def input_to_processes(self) -> Optional[List[Process]]: return None if self.attributes is None else self.attributes.input_to_processes @@ -332,6 +363,22 @@ def input_to_processes(self, input_to_processes: Optional[List[Process]]): self.attributes = self.Attributes() self.attributes.input_to_processes = input_to_processes + @property + def model_implemented_attributes(self) -> Optional[List[ModelAttribute]]: + return ( + None + if self.attributes is None + else self.attributes.model_implemented_attributes + ) + + @model_implemented_attributes.setter + def model_implemented_attributes( + self, model_implemented_attributes: Optional[List[ModelAttribute]] + ): + if self.attributes is None: + self.attributes = self.Attributes() + self.attributes.model_implemented_attributes = model_implemented_attributes + @property def output_from_airflow_tasks(self) -> Optional[List[AirflowTask]]: return ( @@ -348,16 +395,6 @@ def output_from_airflow_tasks( self.attributes = self.Attributes() self.attributes.output_from_airflow_tasks = output_from_airflow_tasks - @property - def input_to_spark_jobs(self) -> Optional[List[SparkJob]]: - return None if self.attributes is None else self.attributes.input_to_spark_jobs - - @input_to_spark_jobs.setter - def input_to_spark_jobs(self, input_to_spark_jobs: Optional[List[SparkJob]]): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.input_to_spark_jobs = input_to_spark_jobs - @property def output_from_spark_jobs(self) -> Optional[List[SparkJob]]: return ( @@ -386,20 +423,6 @@ def model_implemented_entities( self.attributes = self.Attributes() self.attributes.model_implemented_entities = model_implemented_entities - @property - def input_to_airflow_tasks(self) -> Optional[List[AirflowTask]]: - return ( - None if self.attributes is None else self.attributes.input_to_airflow_tasks - ) - - @input_to_airflow_tasks.setter - def input_to_airflow_tasks( - self, input_to_airflow_tasks: Optional[List[AirflowTask]] - ): - if self.attributes is None: - self.attributes = self.Attributes() - self.attributes.input_to_airflow_tasks = input_to_airflow_tasks - @property def output_from_processes(self) -> Optional[List[Process]]: return ( @@ -427,13 +450,19 @@ class Attributes(Google.Attributes): google_location_type: Optional[str] = Field(default=None, description="") google_labels: Optional[List[GoogleLabel]] = Field(default=None, description="") google_tags: Optional[List[GoogleTag]] = Field(default=None, description="") + input_to_spark_jobs: Optional[List[SparkJob]] = Field( + default=None, description="" + ) # relationship + input_to_airflow_tasks: Optional[List[AirflowTask]] = Field( + default=None, description="" + ) # relationship input_to_processes: Optional[List[Process]] = Field( default=None, description="" ) # relationship - output_from_airflow_tasks: Optional[List[AirflowTask]] = Field( + model_implemented_attributes: Optional[List[ModelAttribute]] = Field( default=None, description="" ) # relationship - input_to_spark_jobs: Optional[List[SparkJob]] = Field( + output_from_airflow_tasks: Optional[List[AirflowTask]] = Field( default=None, description="" ) # relationship output_from_spark_jobs: Optional[List[SparkJob]] = Field( @@ -442,9 +471,6 @@ class Attributes(Google.Attributes): model_implemented_entities: Optional[List[ModelEntity]] = Field( default=None, description="" ) # relationship - input_to_airflow_tasks: Optional[List[AirflowTask]] = Field( - default=None, description="" - ) # relationship output_from_processes: Optional[List[Process]] = Field( default=None, description="" ) # relationship @@ -460,6 +486,7 @@ class Attributes(Google.Attributes): from .core.airflow_task import AirflowTask # noqa +from .core.model_attribute import ModelAttribute # noqa from .core.model_entity import ModelEntity # noqa from .core.process import Process # noqa from .core.spark_job import SparkJob # noqa diff --git a/pyatlan/model/enums.py b/pyatlan/model/enums.py index b6ccb835d..debff5d7a 100644 --- a/pyatlan/model/enums.py +++ b/pyatlan/model/enums.py @@ -2529,11 +2529,6 @@ class CertificateStatus(str, Enum): VERIFIED = "VERIFIED" -class CustomTemperatureType(str, Enum): - COLD = "COLD" - HOT = "HOT" - - class DataProductCriticality(str, Enum): LOW = "Low" MEDIUM = "Medium" diff --git a/pyatlan/model/structs.py b/pyatlan/model/structs.py index f560231db..5ad1bd19d 100644 --- a/pyatlan/model/structs.py +++ b/pyatlan/model/structs.py @@ -42,19 +42,6 @@ def flatten_structs_attributes(cls, values: Dict[str, Any]) -> Dict[str, Any]: return values -class MCRuleSchedule(AtlanObject): - """Description""" - - mc_rule_schedule_type: Optional[str] = Field(default=None, description="") - mc_rule_schedule_interval_in_minutes: Optional[int] = Field( - default=None, description="" - ) - mc_rule_schedule_start_time: Optional[datetime] = Field( - default=None, description="" - ) - mc_rule_schedule_crontab: Optional[str] = Field(default=None, description="") - - class DbtJobRun(AtlanObject): """Description""" @@ -71,18 +58,24 @@ class DbtJobRun(AtlanObject): dbt_compiled_code: Optional[str] = Field(default=None, description="") -class AwsCloudWatchMetric(AtlanObject): +class MCRuleSchedule(AtlanObject): """Description""" - aws_cloud_watch_metric_name: str = Field(description="") - aws_cloud_watch_metric_scope: str = Field(description="") + mc_rule_schedule_type: Optional[str] = Field(default=None, description="") + mc_rule_schedule_interval_in_minutes: Optional[int] = Field( + default=None, description="" + ) + mc_rule_schedule_start_time: Optional[datetime] = Field( + default=None, description="" + ) + mc_rule_schedule_crontab: Optional[str] = Field(default=None, description="") -class CustomRatings(AtlanObject): +class AwsCloudWatchMetric(AtlanObject): """Description""" - custom_rating_from: Optional[str] = Field(default=None, description="") - custom_rating_of: Optional[int] = Field(default=None, description="") + aws_cloud_watch_metric_name: str = Field(description="") + aws_cloud_watch_metric_scope: str = Field(description="") class Action(AtlanObject): @@ -117,11 +110,38 @@ class ColumnValueFrequencyMap(AtlanObject): column_value_frequency: Optional[int] = Field(default=None, description="") -class SourceTagAttachmentValue(AtlanObject): +class BadgeCondition(AtlanObject): """Description""" - tag_attachment_key: Optional[str] = Field(default=None, description="") - tag_attachment_value: Optional[str] = Field(default=None, description="") + @classmethod + def create( + cls, + *, + badge_condition_operator: BadgeComparisonOperator, + badge_condition_value: str, + badge_condition_colorhex: Union[BadgeConditionColor, str], + ) -> "BadgeCondition": + validate_required_fields( + [ + "badge_condition_operator", + "badge_condition_value", + "badge_condition_colorhex", + ], + [badge_condition_operator, badge_condition_value, badge_condition_colorhex], + ) + return cls( + badge_condition_operator=badge_condition_operator.value, + badge_condition_value=badge_condition_value, + badge_condition_colorhex=( + badge_condition_colorhex.value + if isinstance(badge_condition_colorhex, BadgeConditionColor) + else badge_condition_colorhex + ), + ) + + badge_condition_operator: Optional[str] = Field(default=None, description="") + badge_condition_value: Optional[str] = Field(default=None, description="") + badge_condition_colorhex: Optional[str] = Field(default=None, description="") class SourceTagAttachment(AtlanObject): @@ -267,52 +287,25 @@ def of( ) -class BadgeCondition(AtlanObject): +class SourceTagAttachmentValue(AtlanObject): """Description""" - @classmethod - def create( - cls, - *, - badge_condition_operator: BadgeComparisonOperator, - badge_condition_value: str, - badge_condition_colorhex: Union[BadgeConditionColor, str], - ) -> "BadgeCondition": - validate_required_fields( - [ - "badge_condition_operator", - "badge_condition_value", - "badge_condition_colorhex", - ], - [badge_condition_operator, badge_condition_value, badge_condition_colorhex], - ) - return cls( - badge_condition_operator=badge_condition_operator.value, - badge_condition_value=badge_condition_value, - badge_condition_colorhex=( - badge_condition_colorhex.value - if isinstance(badge_condition_colorhex, BadgeConditionColor) - else badge_condition_colorhex - ), - ) - - badge_condition_operator: Optional[str] = Field(default=None, description="") - badge_condition_value: Optional[str] = Field(default=None, description="") - badge_condition_colorhex: Optional[str] = Field(default=None, description="") + tag_attachment_key: Optional[str] = Field(default=None, description="") + tag_attachment_value: Optional[str] = Field(default=None, description="") -class AzureTag(AtlanObject): +class StarredDetails(AtlanObject): """Description""" - azure_tag_key: str = Field(description="") - azure_tag_value: str = Field(description="") + asset_starred_by: Optional[str] = Field(default=None, description="") + asset_starred_at: Optional[datetime] = Field(default=None, description="") -class StarredDetails(AtlanObject): +class AzureTag(AtlanObject): """Description""" - asset_starred_by: Optional[str] = Field(default=None, description="") - asset_starred_at: Optional[datetime] = Field(default=None, description="") + azure_tag_key: str = Field(description="") + azure_tag_value: str = Field(description="") class AuthPolicyCondition(AtlanObject): @@ -402,13 +395,11 @@ class SourceTagAttribute(AtlanObject): ) -MCRuleSchedule.update_forward_refs() - DbtJobRun.update_forward_refs() -AwsCloudWatchMetric.update_forward_refs() +MCRuleSchedule.update_forward_refs() -CustomRatings.update_forward_refs() +AwsCloudWatchMetric.update_forward_refs() Action.update_forward_refs() @@ -418,16 +409,16 @@ class SourceTagAttribute(AtlanObject): ColumnValueFrequencyMap.update_forward_refs() -SourceTagAttachmentValue.update_forward_refs() +BadgeCondition.update_forward_refs() SourceTagAttachment.update_forward_refs() -BadgeCondition.update_forward_refs() - -AzureTag.update_forward_refs() +SourceTagAttachmentValue.update_forward_refs() StarredDetails.update_forward_refs() +AzureTag.update_forward_refs() + AuthPolicyCondition.update_forward_refs() AwsTag.update_forward_refs()