diff --git a/pyatlan_v9/model/assets/__init__.py b/pyatlan_v9/model/assets/__init__.py index 675acfd66..ed6cb1d02 100644 --- a/pyatlan_v9/model/assets/__init__.py +++ b/pyatlan_v9/model/assets/__init__.py @@ -1,1063 +1,106 @@ -# Auto-generated by PythonMsgspecRenderer.pkl — DO NOT EDIT +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT # SPDX-License-Identifier: Apache-2.0 # Copyright 2024 Atlan Pte. Ltd. -# isort: skip_file +# +# NOTE: The lazy-loading logic below (_build_lazy_index / __getattr__) is +# intentionally NOT auto-generated — preserve it when regenerating this file. +# It replaces the previous eager _discover_and_export() call which imported +# all ~80 _init_*.py modules at package import time regardless of which types +# were actually needed, causing unnecessary memory usage in apps that only use +# a handful of models. -import lazy_loader as lazy +""" +PyAtlan Models - Auto-generated asset model classes. -from .entity import AtlasClassification, Entity, TermAssignment -from .related_entity import RelatedEntity, SaveSemantic +This module lazily re-exports all types from _init_*.py modules. +New modules are automatically picked up without needing to regenerate this file. -__PYATLAN_V9_ASSETS__ = { - "_init_access_control": [ - "AccessControl", - "AuthPolicy", - "AuthService", - "Persona", - "Purpose", - "RelatedAccessControl", - "RelatedAuthPolicy", - "RelatedAuthService", - "RelatedPersona", - "RelatedPurpose", - ], - "_init_adf": [ - "ADF", - "AdfActivity", - "AdfDataflow", - "AdfDataset", - "AdfLinkedservice", - "AdfPipeline", - "RelatedADF", - "RelatedAdfActivity", - "RelatedAdfDataflow", - "RelatedAdfDataset", - "RelatedAdfLinkedservice", - "RelatedAdfPipeline", - ], - "_init_adls": [ - "ADLS", - "ADLSAccount", - "ADLSContainer", - "ADLSObject", - "RelatedADLS", - "RelatedADLSAccount", - "RelatedADLSContainer", - "RelatedADLSObject", - ], - "_init_ai": [ - "AI", - "AIApplication", - "AIModel", - "AIModelVersion", - "RelatedAI", - "RelatedAIApplication", - "RelatedAIModel", - "RelatedAIModelVersion", - ], - "_init_airflow": [ - "Airflow", - "AirflowDag", - "AirflowTask", - "RelatedAirflow", - "RelatedAirflowDag", - "RelatedAirflowTask", - ], - "_init_anaplan": [ - "Anaplan", - "AnaplanApp", - "AnaplanDimension", - "AnaplanLineItem", - "AnaplanList", - "AnaplanModel", - "AnaplanModule", - "AnaplanPage", - "AnaplanSystemDimension", - "AnaplanView", - "AnaplanWorkspace", - "RelatedAnaplan", - "RelatedAnaplanApp", - "RelatedAnaplanDimension", - "RelatedAnaplanLineItem", - "RelatedAnaplanList", - "RelatedAnaplanModel", - "RelatedAnaplanModule", - "RelatedAnaplanPage", - "RelatedAnaplanSystemDimension", - "RelatedAnaplanView", - "RelatedAnaplanWorkspace", - ], - "_init_anomalo": [ - "Anomalo", - "AnomaloCheck", - "RelatedAnomalo", - "RelatedAnomaloCheck", - ], - "_init_api": [ - "API", - "APIField", - "APIObject", - "APIPath", - "APIQuery", - "APISpec", - "RelatedAPI", - "RelatedAPIField", - "RelatedAPIObject", - "RelatedAPIPath", - "RelatedAPIQuery", - "RelatedAPISpec", - ], - "_init_app": [ - "App", - "Application", - "ApplicationField", - "RelatedApp", - "RelatedApplication", - "RelatedApplicationField", - ], - "_init_app_workflow_run": ["AppWorkflowRun", "RelatedAppWorkflowRun"], - "_init_asset": [ - "Asset", - "DataSet", - "Incident", - "Infrastructure", - "ProcessExecution", - "RelatedAsset", - "RelatedDataSet", - "RelatedIncident", - "RelatedInfrastructure", - "RelatedProcessExecution", - ], - "_init_asset_grouping": [ - "AssetGrouping", - "AssetGroupingCollection", - "AssetGroupingStrategy", - "RelatedAssetGrouping", - "RelatedAssetGroupingCollection", - "RelatedAssetGroupingStrategy", - ], - "_init_atlan_app": [ - "AtlanApp", - "AtlanAppDeployment", - "AtlanAppInstalled", - "AtlanAppTool", - "AtlanAppWorkflow", - "RelatedAtlanApp", - "RelatedAtlanAppDeployment", - "RelatedAtlanAppInstalled", - "RelatedAtlanAppTool", - "RelatedAtlanAppWorkflow", - ], - "_init_azure_service_bus": [ - "AzureServiceBus", - "AzureServiceBusNamespace", - "AzureServiceBusSchema", - "AzureServiceBusTopic", - "RelatedAzureServiceBus", - "RelatedAzureServiceBusNamespace", - "RelatedAzureServiceBusSchema", - "RelatedAzureServiceBusTopic", - ], - "_init_bigquery": [ - "BigqueryRoutine", - "RelatedBigqueryRoutine", - "RelatedBigqueryTag", - ], - "_init_business_policy": [ - "BusinessPolicy", - "BusinessPolicyException", - "BusinessPolicyIncident", - "BusinessPolicyLog", - "RelatedBusinessPolicy", - "RelatedBusinessPolicyException", - "RelatedBusinessPolicyIncident", - "RelatedBusinessPolicyLog", - ], - "_init_cassandra": [ - "Cassandra", - "CassandraColumn", - "CassandraIndex", - "CassandraKeyspace", - "CassandraTable", - "CassandraView", - "RelatedCassandra", - "RelatedCassandraColumn", - "RelatedCassandraIndex", - "RelatedCassandraKeyspace", - "RelatedCassandraTable", - "RelatedCassandraView", - ], - "_init_catalog": [ - "BI", - "Catalog", - "EventStore", - "Insight", - "NoSQL", - "ObjectStore", - "RelatedBI", - "RelatedCatalog", - "RelatedEventStore", - "RelatedInsight", - "RelatedNoSQL", - "RelatedObjectStore", - "RelatedSaaS", - "SaaS", - ], - "_init_cloud": [ - "AWS", - "Azure", - "Cloud", - "Google", - "RelatedAWS", - "RelatedAzure", - "RelatedCloud", - "RelatedGoogle", - ], - "_init_cognite": [ - "Cognite", - "Cognite3DModel", - "CogniteAsset", - "CogniteEvent", - "CogniteFile", - "CogniteSequence", - "CogniteTimeSeries", - "RelatedCognite", - "RelatedCognite3DModel", - "RelatedCogniteAsset", - "RelatedCogniteEvent", - "RelatedCogniteFile", - "RelatedCogniteSequence", - "RelatedCogniteTimeSeries", - ], - "_init_cognos": [ - "Cognos", - "CognosColumn", - "CognosDashboard", - "CognosDataset", - "CognosDatasource", - "CognosExploration", - "CognosFile", - "CognosFolder", - "CognosModule", - "CognosPackage", - "CognosReport", - "RelatedCognos", - "RelatedCognosColumn", - "RelatedCognosDashboard", - "RelatedCognosDataset", - "RelatedCognosDatasource", - "RelatedCognosExploration", - "RelatedCognosFile", - "RelatedCognosFolder", - "RelatedCognosModule", - "RelatedCognosPackage", - "RelatedCognosReport", - ], - "_init_connection": ["Connection", "RelatedConnection"], - "_init_cosmos_mongo_db": [ - "CosmosMongoDB", - "CosmosMongoDBAccount", - "CosmosMongoDBCollection", - "CosmosMongoDBDatabase", - "RelatedCosmosMongoDB", - "RelatedCosmosMongoDBAccount", - "RelatedCosmosMongoDBCollection", - "RelatedCosmosMongoDBDatabase", - ], - "_init_cube": [ - "Cube", - "CubeDimension", - "CubeField", - "CubeHierarchy", - "MultiDimensionalDataset", - "RelatedCube", - "RelatedCubeDimension", - "RelatedCubeField", - "RelatedCubeHierarchy", - "RelatedMultiDimensionalDataset", - ], - "_init_custom": [ - "Custom", - "CustomEntity", - "RelatedCustom", - "RelatedCustomEntity", - ], - "_init_data_contract": ["DataContract", "RelatedDataContract"], - "_init_data_mesh": [ - "DataDomain", - "DataMesh", - "DataMeshDataset", - "DataProduct", - "RelatedDataDomain", - "RelatedDataMesh", - "RelatedDataMeshDataset", - "RelatedDataProduct", - "RelatedStakeholder", - "RelatedStakeholderTitle", - ], - "_init_data_quality": [ - "DataQuality", - "DataQualityRule", - "DataQualityRuleTemplate", - "Metric", - "RelatedDataQuality", - "RelatedDataQualityRule", - "RelatedDataQualityRuleTemplate", - "RelatedMetric", - ], - "_init_data_studio": [ - "DataStudio", - "DataStudioAsset", - "RelatedDataStudio", - "RelatedDataStudioAsset", - ], - "_init_databricks": [ - "Databricks", - "DatabricksAIModelContext", - "DatabricksAIModelVersion", - "DatabricksExternalLocation", - "DatabricksExternalLocationPath", - "DatabricksMetricView", - "DatabricksNotebook", - "DatabricksVolume", - "DatabricksVolumePath", - "RelatedDatabricks", - "RelatedDatabricksAIModelContext", - "RelatedDatabricksAIModelVersion", - "RelatedDatabricksExternalLocation", - "RelatedDatabricksExternalLocationPath", - "RelatedDatabricksMetricView", - "RelatedDatabricksNotebook", - "RelatedDatabricksUnityCatalogTag", - "RelatedDatabricksVolume", - "RelatedDatabricksVolumePath", - ], - "_init_dataverse": [ - "Dataverse", - "DataverseAttribute", - "DataverseEntity", - "RelatedDataverse", - "RelatedDataverseAttribute", - "RelatedDataverseEntity", - ], - "_init_dbt": [ - "Dbt", - "DbtColumnProcess", - "DbtDimension", - "DbtEntity", - "DbtMeasure", - "DbtMetric", - "DbtModel", - "DbtModelColumn", - "DbtProcess", - "DbtSeed", - "DbtSemanticModel", - "DbtSource", - "DbtTag", - "DbtTest", - "RelatedDbt", - "RelatedDbtColumnProcess", - "RelatedDbtDimension", - "RelatedDbtEntity", - "RelatedDbtMeasure", - "RelatedDbtMetric", - "RelatedDbtModel", - "RelatedDbtModelColumn", - "RelatedDbtProcess", - "RelatedDbtSeed", - "RelatedDbtSemanticModel", - "RelatedDbtSource", - "RelatedDbtTag", - "RelatedDbtTest", - ], - "_init_document_db": [ - "DocumentDB", - "DocumentDBCollection", - "DocumentDBDatabase", - "RelatedDocumentDB", - "RelatedDocumentDBCollection", - "RelatedDocumentDBDatabase", - ], - "_init_domo": [ - "Domo", - "DomoCard", - "DomoDashboard", - "DomoDataset", - "DomoDatasetColumn", - "RelatedDomo", - "RelatedDomoCard", - "RelatedDomoDashboard", - "RelatedDomoDataset", - "RelatedDomoDatasetColumn", - ], - "_init_dremio": [ - "Dremio", - "DremioColumn", - "DremioFolder", - "DremioPhysicalDataset", - "DremioSource", - "DremioSpace", - "DremioVirtualDataset", - "RelatedDremio", - "RelatedDremioColumn", - "RelatedDremioFolder", - "RelatedDremioPhysicalDataset", - "RelatedDremioSource", - "RelatedDremioSpace", - "RelatedDremioVirtualDataset", - ], - "_init_dynamo_db": [ - "DynamoDB", - "DynamoDBAttribute", - "DynamoDBSecondaryIndex", - "DynamoDBTable", - "RelatedDynamoDB", - "RelatedDynamoDBAttribute", - "RelatedDynamoDBGlobalSecondaryIndex", - "RelatedDynamoDBLocalSecondaryIndex", - "RelatedDynamoDBSecondaryIndex", - "RelatedDynamoDBTable", - ], - "_init_fabric": [ - "Fabric", - "FabricActivity", - "FabricDashboard", - "FabricDataPipeline", - "FabricDataflow", - "FabricDataflowEntityColumn", - "FabricPage", - "FabricReport", - "FabricSemanticModel", - "FabricSemanticModelTable", - "FabricSemanticModelTableColumn", - "FabricVisual", - "FabricWorkspace", - "RelatedFabric", - "RelatedFabricActivity", - "RelatedFabricDashboard", - "RelatedFabricDataPipeline", - "RelatedFabricDataflow", - "RelatedFabricDataflowEntityColumn", - "RelatedFabricPage", - "RelatedFabricReport", - "RelatedFabricSemanticModel", - "RelatedFabricSemanticModelTable", - "RelatedFabricSemanticModelTableColumn", - "RelatedFabricVisual", - "RelatedFabricWorkspace", - ], - "_init_fivetran": [ - "Fivetran", - "FivetranConnector", - "RelatedFivetran", - "RelatedFivetranConnector", - ], - "_init_flow": [ - "Flow", - "FlowControlOperation", - "FlowDataset", - "FlowDatasetOperation", - "FlowField", - "FlowFieldOperation", - "FlowFolder", - "FlowProject", - "FlowReusableUnit", - "RelatedFlow", - "RelatedFlowControlOperation", - "RelatedFlowDataset", - "RelatedFlowDatasetOperation", - "RelatedFlowField", - "RelatedFlowFieldOperation", - "RelatedFlowFolder", - "RelatedFlowProject", - "RelatedFlowReusableUnit", - ], - "_init_form": ["Form", "RelatedForm", "RelatedResponse"], - "_init_gcs": [ - "GCS", - "GCSBucket", - "GCSObject", - "RelatedGCS", - "RelatedGCSBucket", - "RelatedGCSObject", - ], - "_init_gtc": [ - "AtlasGlossary", - "AtlasGlossaryCategory", - "AtlasGlossaryTerm", - "RelatedAtlasGlossary", - "RelatedAtlasGlossaryCategory", - "RelatedAtlasGlossaryTerm", - ], - "_init_iceberg": [ - "Iceberg", - "IcebergCatalog", - "IcebergColumn", - "IcebergNamespace", - "IcebergTable", - "RelatedIceberg", - "RelatedIcebergCatalog", - "RelatedIcebergColumn", - "RelatedIcebergNamespace", - "RelatedIcebergTable", - ], - "_init_kafka": [ - "Kafka", - "KafkaCluster", - "KafkaConsumerGroup", - "KafkaField", - "KafkaTopic", - "RelatedAzureEventHub", - "RelatedAzureEventHubConsumerGroup", - "RelatedKafka", - "RelatedKafkaCluster", - "RelatedKafkaConsumerGroup", - "RelatedKafkaField", - "RelatedKafkaTopic", - ], - "_init_looker": [ - "Looker", - "LookerDashboard", - "LookerExplore", - "LookerField", - "LookerFolder", - "LookerLook", - "LookerModel", - "LookerProject", - "LookerQuery", - "LookerTile", - "LookerView", - "RelatedLooker", - "RelatedLookerDashboard", - "RelatedLookerExplore", - "RelatedLookerField", - "RelatedLookerFolder", - "RelatedLookerLook", - "RelatedLookerModel", - "RelatedLookerProject", - "RelatedLookerQuery", - "RelatedLookerTile", - "RelatedLookerView", - ], - "_init_manual": [ - "AccessControl", - "AuthPolicy", - "AzureEventHub", - "AzureEventHubConsumerGroup", - "Badge", - "BadgeCondition", - "Cognite3DModel", - "DataContract", - "Persona", - "Purpose", - "RelatedSuperset", - "RelatedSupersetChart", - "RelatedSupersetDashboard", - "RelatedSupersetDataset", - "SnowflakeDynamicTable", - "Superset", - "SupersetChart", - "SupersetDashboard", - "SupersetDataset", - ], - "_init_matillion": [ - "Matillion", - "MatillionComponent", - "MatillionGroup", - "MatillionJob", - "MatillionProject", - "RelatedMatillion", - "RelatedMatillionComponent", - "RelatedMatillionGroup", - "RelatedMatillionJob", - "RelatedMatillionProject", - ], - "_init_metabase": [ - "Metabase", - "MetabaseCollection", - "MetabaseDashboard", - "MetabaseQuestion", - "RelatedMetabase", - "RelatedMetabaseCollection", - "RelatedMetabaseDashboard", - "RelatedMetabaseQuestion", - ], - "_init_micro_strategy": [ - "MicroStrategy", - "MicroStrategyAttribute", - "MicroStrategyColumn", - "MicroStrategyCube", - "MicroStrategyDocument", - "MicroStrategyDossier", - "MicroStrategyFact", - "MicroStrategyMetric", - "MicroStrategyProject", - "MicroStrategyReport", - "MicroStrategyVisualization", - "RelatedMicroStrategy", - "RelatedMicroStrategyAttribute", - "RelatedMicroStrategyColumn", - "RelatedMicroStrategyCube", - "RelatedMicroStrategyDocument", - "RelatedMicroStrategyDossier", - "RelatedMicroStrategyFact", - "RelatedMicroStrategyMetric", - "RelatedMicroStrategyProject", - "RelatedMicroStrategyReport", - "RelatedMicroStrategyVisualization", - ], - "_init_mode": [ - "Mode", - "ModeChart", - "ModeCollection", - "ModeQuery", - "ModeReport", - "ModeWorkspace", - "RelatedMode", - "RelatedModeChart", - "RelatedModeCollection", - "RelatedModeQuery", - "RelatedModeReport", - "RelatedModeWorkspace", - ], - "_init_model": [ - "Model", - "ModelAttribute", - "ModelAttributeAssociation", - "ModelDataModel", - "ModelEntity", - "ModelEntityAssociation", - "ModelVersion", - "RelatedModel", - "RelatedModelAttribute", - "RelatedModelAttributeAssociation", - "RelatedModelDataModel", - "RelatedModelEntity", - "RelatedModelEntityAssociation", - "RelatedModelVersion", - ], - "_init_mongo_db": [ - "MongoDB", - "MongoDBCollection", - "MongoDBDatabase", - "RelatedMongoDB", - "RelatedMongoDBCollection", - "RelatedMongoDBDatabase", - ], - "_init_monte_carlo": [ - "MCIncident", - "MCMonitor", - "MonteCarlo", - "RelatedMCIncident", - "RelatedMCMonitor", - "RelatedMonteCarlo", - ], - "_init_namespace": [ - "Collection", - "Folder", - "Namespace", - "RelatedCollection", - "RelatedFolder", - "RelatedNamespace", - ], - "_init_notebook": ["Notebook", "RelatedNotebook"], - "_init_partial": [ - "Partial", - "PartialField", - "PartialObject", - "RelatedPartial", - "RelatedPartialField", - "RelatedPartialObject", - ], - "_init_power_bi": [ - "PowerBI", - "PowerBIApp", - "PowerBIColumn", - "PowerBIDashboard", - "PowerBIDataflow", - "PowerBIDataflowEntityColumn", - "PowerBIDataset", - "PowerBIDatasource", - "PowerBIMeasure", - "PowerBIPage", - "PowerBIReport", - "PowerBITable", - "PowerBITile", - "PowerBIWorkspace", - "RelatedPowerBI", - "RelatedPowerBIApp", - "RelatedPowerBIColumn", - "RelatedPowerBIDashboard", - "RelatedPowerBIDataflow", - "RelatedPowerBIDataflowEntityColumn", - "RelatedPowerBIDataset", - "RelatedPowerBIDatasource", - "RelatedPowerBIMeasure", - "RelatedPowerBIPage", - "RelatedPowerBIReport", - "RelatedPowerBITable", - "RelatedPowerBITile", - "RelatedPowerBIWorkspace", - ], - "_init_preset": [ - "Preset", - "PresetChart", - "PresetDashboard", - "PresetDataset", - "PresetWorkspace", - "RelatedPreset", - "RelatedPresetChart", - "RelatedPresetDashboard", - "RelatedPresetDataset", - "RelatedPresetWorkspace", - ], - "_init_process": [ - "BIProcess", - "ColumnProcess", - "Process", - "RelatedBIProcess", - "RelatedColumnProcess", - "RelatedConnectionProcess", - "RelatedProcess", - ], - "_init_qlik": [ - "Qlik", - "QlikApp", - "QlikChart", - "QlikColumn", - "QlikDataset", - "QlikSheet", - "QlikSpace", - "RelatedQlik", - "RelatedQlikApp", - "RelatedQlikChart", - "RelatedQlikColumn", - "RelatedQlikDataset", - "RelatedQlikSheet", - "RelatedQlikSpace", - "RelatedQlikStream", - ], - "_init_quick_sight": [ - "QuickSight", - "QuickSightAnalysis", - "QuickSightAnalysisVisual", - "QuickSightDashboard", - "QuickSightDashboardVisual", - "QuickSightDataset", - "QuickSightDatasetField", - "QuickSightFolder", - "RelatedQuickSight", - "RelatedQuickSightAnalysis", - "RelatedQuickSightAnalysisVisual", - "RelatedQuickSightDashboard", - "RelatedQuickSightDashboardVisual", - "RelatedQuickSightDataset", - "RelatedQuickSightDatasetField", - "RelatedQuickSightFolder", - ], - "_init_redash": [ - "Redash", - "RedashDashboard", - "RedashQuery", - "RedashVisualization", - "RelatedRedash", - "RelatedRedashDashboard", - "RelatedRedashQuery", - "RelatedRedashVisualization", - ], - "_init_referenceable": ["Referenceable", "RelatedReferenceable"], - "_init_resource": [ - "File", - "Link", - "Readme", - "ReadmeTemplate", - "RelatedBadge", - "RelatedFile", - "RelatedLink", - "RelatedReadme", - "RelatedReadmeTemplate", - "RelatedResource", - "Related__internal", - "Resource", - ], - "_init_s3": [ - "RelatedS3", - "RelatedS3Bucket", - "RelatedS3Object", - "RelatedS3Prefix", - "S3", - "S3Bucket", - "S3Object", - "S3Prefix", - ], - "_init_sage_maker": [ - "RelatedSageMaker", - "RelatedSageMakerFeature", - "RelatedSageMakerFeatureGroup", - "RelatedSageMakerModel", - "RelatedSageMakerModelDeployment", - "RelatedSageMakerModelGroup", - "SageMaker", - "SageMakerFeature", - "SageMakerFeatureGroup", - "SageMakerModel", - "SageMakerModelDeployment", - "SageMakerModelGroup", - ], - "_init_sage_maker_unified_studio": [ - "RelatedSageMakerUnifiedStudio", - "RelatedSageMakerUnifiedStudioAsset", - "RelatedSageMakerUnifiedStudioAssetSchema", - "RelatedSageMakerUnifiedStudioProject", - "RelatedSageMakerUnifiedStudioPublishedAsset", - "RelatedSageMakerUnifiedStudioSubscribedAsset", - "SageMakerUnifiedStudio", - "SageMakerUnifiedStudioAsset", - "SageMakerUnifiedStudioAssetSchema", - "SageMakerUnifiedStudioProject", - "SageMakerUnifiedStudioPublishedAsset", - "SageMakerUnifiedStudioSubscribedAsset", - ], - "_init_salesforce": [ - "RelatedSalesforce", - "RelatedSalesforceDashboard", - "RelatedSalesforceField", - "RelatedSalesforceObject", - "RelatedSalesforceOrganization", - "RelatedSalesforceReport", - "Salesforce", - "SalesforceDashboard", - "SalesforceField", - "SalesforceObject", - "SalesforceOrganization", - "SalesforceReport", - ], - "_init_sap": [ - "RelatedSAP", - "RelatedSapErpAbapProgram", - "RelatedSapErpCdsView", - "RelatedSapErpColumn", - "RelatedSapErpComponent", - "RelatedSapErpFunctionModule", - "RelatedSapErpTable", - "RelatedSapErpTransactionCode", - "RelatedSapErpView", - "SAP", - "SapErpAbapProgram", - "SapErpCdsView", - "SapErpColumn", - "SapErpComponent", - "SapErpFunctionModule", - "SapErpTable", - "SapErpTransactionCode", - "SapErpView", - ], - "_init_schema_registry": [ - "RelatedSchemaRegistry", - "RelatedSchemaRegistrySubject", - "RelatedSchemaRegistryVersion", - "SchemaRegistry", - "SchemaRegistrySubject", - "SchemaRegistryVersion", - ], - "_init_semantic": [ - "RelatedSemantic", - "RelatedSemanticDimension", - "RelatedSemanticEntity", - "RelatedSemanticField", - "RelatedSemanticMeasure", - "RelatedSemanticModel", - "Semantic", - "SemanticDimension", - "SemanticEntity", - "SemanticField", - "SemanticMeasure", - "SemanticModel", - ], - "_init_sigma": [ - "RelatedSigma", - "RelatedSigmaDataElement", - "RelatedSigmaDataElementField", - "RelatedSigmaDataset", - "RelatedSigmaDatasetColumn", - "RelatedSigmaPage", - "RelatedSigmaWorkbook", - "Sigma", - "SigmaDataElement", - "SigmaDataElementField", - "SigmaDataset", - "SigmaDatasetColumn", - "SigmaPage", - "SigmaWorkbook", - ], - "_init_sisense": [ - "RelatedSisense", - "RelatedSisenseDashboard", - "RelatedSisenseDatamodel", - "RelatedSisenseDatamodelTable", - "RelatedSisenseFolder", - "RelatedSisenseWidget", - "Sisense", - "SisenseDashboard", - "SisenseDatamodel", - "SisenseDatamodelTable", - "SisenseFolder", - "SisenseWidget", - ], - "_init_snowflake": [ - "RelatedSnowflake", - "RelatedSnowflakeAIModelContext", - "RelatedSnowflakeAIModelVersion", - "RelatedSnowflakeDynamicTable", - "RelatedSnowflakePipe", - "RelatedSnowflakeSemanticDimension", - "RelatedSnowflakeSemanticFact", - "RelatedSnowflakeSemanticLogicalTable", - "RelatedSnowflakeSemanticMetric", - "RelatedSnowflakeSemanticView", - "RelatedSnowflakeStage", - "RelatedSnowflakeStream", - "RelatedSnowflakeTag", - "Snowflake", - "SnowflakeAIModelContext", - "SnowflakeAIModelVersion", - "SnowflakeSemanticDimension", - "SnowflakeSemanticFact", - "SnowflakeSemanticLogicalTable", - "SnowflakeSemanticMetric", - "SnowflakeSemanticView", - ], - "_init_soda": [ - "RelatedSoda", - "RelatedSodaCheck", - "Soda", - "SodaCheck", - ], - "_init_spark": [ - "RelatedSpark", - "RelatedSparkJob", - "Spark", - "SparkJob", - ], - "_init_sql": [ - "CalculationView", - "Column", - "Database", - "Function", - "MaterialisedView", - "Procedure", - "Query", - "RelatedCalculationView", - "RelatedColumn", - "RelatedDatabase", - "RelatedFunction", - "RelatedMaterialisedView", - "RelatedProcedure", - "RelatedQuery", - "RelatedSQL", - "RelatedSchema", - "RelatedTable", - "RelatedTablePartition", - "RelatedView", - "SQL", - "Schema", - "Table", - "TablePartition", - "View", - ], - "_init_sql_insight": [ - "RelatedSqlInsight", - "RelatedSqlInsightBusinessQuestion", - "RelatedSqlInsightFilter", - "RelatedSqlInsightJoin", - "SqlInsight", - "SqlInsightBusinessQuestion", - "SqlInsightFilter", - "SqlInsightJoin", - ], - "_init_starburst": [ - "RelatedStarburst", - "RelatedStarburstDataset", - "RelatedStarburstDatasetColumn", - "Starburst", - "StarburstDataset", - "StarburstDatasetColumn", - ], - "_init_superset": [ - "RelatedSuperset", - "RelatedSupersetChart", - "RelatedSupersetDashboard", - "RelatedSupersetDataset", - "Superset", - "SupersetChart", - "SupersetDashboard", - "SupersetDataset", - ], - "_init_tableau": [ - "RelatedTableau", - "RelatedTableauCalculatedField", - "RelatedTableauDashboard", - "RelatedTableauDashboardField", - "RelatedTableauDatasource", - "RelatedTableauDatasourceField", - "RelatedTableauFlow", - "RelatedTableauMetric", - "RelatedTableauProject", - "RelatedTableauSite", - "RelatedTableauWorkbook", - "RelatedTableauWorksheet", - "RelatedTableauWorksheetField", - "Tableau", - "TableauCalculatedField", - "TableauDashboard", - "TableauDashboardField", - "TableauDatasource", - "TableauDatasourceField", - "TableauFlow", - "TableauMetric", - "TableauProject", - "TableauSite", - "TableauWorkbook", - "TableauWorksheet", - "TableauWorksheetField", - ], - "_init_tag": [ - "RelatedSourceTag", - "RelatedTag", - "RelatedTagAttachment", - "SourceTag", - "Tag", - ], - "_init_task": ["RelatedTask", "Task"], - "_init_thoughtspot": [ - "RelatedThoughtspot", - "RelatedThoughtspotAnswer", - "RelatedThoughtspotColumn", - "RelatedThoughtspotDashlet", - "RelatedThoughtspotLiveboard", - "RelatedThoughtspotTable", - "RelatedThoughtspotView", - "RelatedThoughtspotWorksheet", - "Thoughtspot", - "ThoughtspotAnswer", - "ThoughtspotColumn", - "ThoughtspotDashlet", - "ThoughtspotLiveboard", - "ThoughtspotTable", - "ThoughtspotView", - "ThoughtspotWorksheet", - ], - "_init_workflow": ["RelatedWorkflow", "RelatedWorkflowRun", "Workflow"], -} +Direct submodule imports are preferred and have zero overhead at import time:: -__getattr__, __dir__, __all__ = lazy.attach( - __name__, submod_attrs=__PYATLAN_V9_ASSETS__ -) + from pyatlan.models.column import Column # preferred — zero overhead -__all__ += [ - "AtlasClassification", +Package-level imports also work and only load the specific module needed:: + + from pyatlan.models import Column # lazy — only _init_sql.py loaded +""" + +from __future__ import annotations + +import ast +import importlib +import pkgutil +from pathlib import Path +from typing import Any + +# Base classes are always exported eagerly (they have no large transitive deps). +from .entity import AtlasClassification, Entity, TermAssignment +from .related_entity import RelatedEntity, SaveSemantic + +__all__ = [ + # Base classes "Entity", + "AtlasClassification", "TermAssignment", "RelatedEntity", "SaveSemantic", ] + +# Lazy index: class name → _init_module_name. +# Built once on first __getattr__ call via AST scanning (no module execution). +_lazy_index: dict[str, str] | None = None + + +def _build_lazy_index() -> dict[str, str]: + """Scan _init_*.py __all__ lists via AST to build a name→module map. + + Uses ast.parse() so no model modules are executed or imported — only their + source text is read to extract the __all__ list. + """ + index: dict[str, str] = {} + current_dir = Path(__file__).parent + for module_info in pkgutil.iter_modules([str(current_dir)]): + if not module_info.name.startswith("_init_"): + continue + module_file = current_dir / f"{module_info.name}.py" + try: + tree = ast.parse(module_file.read_bytes()) + except SyntaxError: + continue + for node in ast.walk(tree): + if isinstance(node, ast.Assign): + for target in node.targets: + if isinstance(target, ast.Name) and target.id == "__all__": + if isinstance(node.value, (ast.List, ast.Tuple)): + for elt in node.value.elts: + if isinstance(elt, ast.Constant) and isinstance( + elt.value, str + ): + index[elt.value] = module_info.name + return index + + +def __getattr__(name: str) -> Any: + """Lazy import of model classes on first access. + + Called by Python when an attribute is not found in this module's globals. + Enables ``from pyatlan.models import Column`` without eagerly importing all + model modules at package import time. + + The result is cached in globals() so subsequent accesses are O(1) dict lookups + and never go through __getattr__ again. + """ + global _lazy_index + if _lazy_index is None: + _lazy_index = _build_lazy_index() + + module_name = _lazy_index.get(name) + if module_name is not None: + module = importlib.import_module(f".{module_name}", __package__) + val = getattr(module, name) + # Cache in globals so future accesses bypass __getattr__ entirely. + globals()[name] = val + if name not in __all__: + __all__.append(name) + return val + + raise AttributeError(f"module 'pyatlan.models' has no attribute {name!r}") diff --git a/pyatlan_v9/model/assets/_init_agentic.py b/pyatlan_v9/model/assets/_init_agentic.py new file mode 100644 index 000000000..51d75c1d1 --- /dev/null +++ b/pyatlan_v9/model/assets/_init_agentic.py @@ -0,0 +1,17 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +Agentic module exports. + +This module provides convenient imports for all Agentic types and their Related variants. +""" + +from .agentic_related import RelatedAgentic +from .agentic import Agentic + +__all__ = [ + "Agentic", + "RelatedAgentic", +] diff --git a/pyatlan_v9/model/assets/_init_artifact.py b/pyatlan_v9/model/assets/_init_artifact.py new file mode 100644 index 000000000..20528c615 --- /dev/null +++ b/pyatlan_v9/model/assets/_init_artifact.py @@ -0,0 +1,17 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +Artifact module exports. + +This module provides convenient imports for all Artifact types and their Related variants. +""" + +from .artifact_related import RelatedArtifact +from .artifact import Artifact + +__all__ = [ + "Artifact", + "RelatedArtifact", +] diff --git a/pyatlan_v9/model/assets/_init_business_policy.py b/pyatlan_v9/model/assets/_init_business_policy.py index f2d6f2dfc..c96d6d1ad 100644 --- a/pyatlan_v9/model/assets/_init_business_policy.py +++ b/pyatlan_v9/model/assets/_init_business_policy.py @@ -9,9 +9,6 @@ """ from .business_policy import BusinessPolicy -from .business_policy_exception import BusinessPolicyException -from .business_policy_incident import BusinessPolicyIncident -from .business_policy_log import BusinessPolicyLog from .business_policy_related import ( RelatedBusinessPolicy, RelatedBusinessPolicyException, @@ -21,9 +18,6 @@ __all__ = [ "BusinessPolicy", - "BusinessPolicyException", - "BusinessPolicyIncident", - "BusinessPolicyLog", "RelatedBusinessPolicy", "RelatedBusinessPolicyException", "RelatedBusinessPolicyIncident", diff --git a/pyatlan_v9/model/assets/_init_context.py b/pyatlan_v9/model/assets/_init_context.py new file mode 100644 index 000000000..7751b1b1c --- /dev/null +++ b/pyatlan_v9/model/assets/_init_context.py @@ -0,0 +1,27 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +Context module exports. + +This module provides convenient imports for all Context types and their Related variants. +""" + +from .context_related import ( + RelatedContext, + RelatedContextArtifact, + RelatedContextRepository, +) +from .context import Context +from .context_artifact import ContextArtifact +from .context_repository import ContextRepository + +__all__ = [ + "Context", + "ContextArtifact", + "ContextRepository", + "RelatedContext", + "RelatedContextArtifact", + "RelatedContextRepository", +] diff --git a/pyatlan_v9/model/assets/_init_gcp_dataplex.py b/pyatlan_v9/model/assets/_init_gcp_dataplex.py new file mode 100644 index 000000000..e58ade95d --- /dev/null +++ b/pyatlan_v9/model/assets/_init_gcp_dataplex.py @@ -0,0 +1,23 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +GCPDataplex module exports. + +This module provides convenient imports for all GCPDataplex types and their Related variants. +""" + +from .gcp_dataplex_related import ( + RelatedGCPDataplex, + RelatedGCPDataplexAspectType, +) +from .gcp_dataplex import GCPDataplex +from .gcp_dataplex_aspect_type import GCPDataplexAspectType + +__all__ = [ + "GCPDataplex", + "GCPDataplexAspectType", + "RelatedGCPDataplex", + "RelatedGCPDataplexAspectType", +] diff --git a/pyatlan_v9/model/assets/_init_skill.py b/pyatlan_v9/model/assets/_init_skill.py new file mode 100644 index 000000000..05c1956a0 --- /dev/null +++ b/pyatlan_v9/model/assets/_init_skill.py @@ -0,0 +1,17 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +Skill module exports. + +This module provides convenient imports for all Skill types and their Related variants. +""" + +from .skill_related import RelatedSkill +from .skill import Skill + +__all__ = [ + "RelatedSkill", + "Skill", +] diff --git a/pyatlan_v9/model/assets/_init_skill_artifact.py b/pyatlan_v9/model/assets/_init_skill_artifact.py new file mode 100644 index 000000000..7d9b6b6a7 --- /dev/null +++ b/pyatlan_v9/model/assets/_init_skill_artifact.py @@ -0,0 +1,17 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +SkillArtifact module exports. + +This module provides convenient imports for all SkillArtifact types and their Related variants. +""" + +from .skill_artifact_related import RelatedSkillArtifact +from .skill_artifact import SkillArtifact + +__all__ = [ + "RelatedSkillArtifact", + "SkillArtifact", +] diff --git a/pyatlan_v9/model/assets/access_control.py b/pyatlan_v9/model/assets/access_control.py index f0eb6a2a5..291c7f048 100644 --- a/pyatlan_v9/model/assets/access_control.py +++ b/pyatlan_v9/model/assets/access_control.py @@ -1,68 +1,250 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# ruff: noqa: ARG002 # SPDX-License-Identifier: Apache-2.0 -# Copyright 2026 Atlan Pte. Ltd. +# Copyright 2024 Atlan Pte. Ltd. -"""AccessControl asset model for pyatlan_v9.""" +""" +AccessControl asset model with flattened inheritance. + +This module provides: +- AccessControl: Flat asset class (easy to use) +- AccessControlAttributes: Nested attributes struct (extends AssetAttributes) +- AccessControlNested: Nested API format struct +""" from __future__ import annotations -from typing import Any, ClassVar, Set, Union +from typing import Any, ClassVar, List, Union from msgspec import UNSET, UnsetType from pyatlan_v9.model.conversion_utils import ( - build_attributes_kwargs, - build_flat_kwargs, + categorize_relationships, merge_relationships, ) from pyatlan_v9.model.serde import Serde, get_serde from pyatlan_v9.model.transform import register_asset -from .asset import Asset, AssetAttributes, AssetNested -from .auth_policy import AuthPolicy +from .access_control_related import RelatedAuthPolicy +from .anomalo_related import RelatedAnomaloCheck +from .app_related import RelatedApplication, RelatedApplicationField +from .asset import ( + _ASSET_REL_FIELDS, + Asset, + AssetAttributes, + AssetNested, + AssetRelationshipAttributes, + _extract_asset_attrs, + _populate_asset_attrs, +) +from .data_contract_related import RelatedDataContract +from .data_mesh_related import RelatedDataProduct +from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType +from .gtc_related import RelatedAtlasGlossaryTerm +from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor +from .referenceable_related import RelatedReferenceable +from .resource_related import RelatedFile, RelatedLink, RelatedReadme +from .schema_registry_related import RelatedSchemaRegistrySubject +from .soda_related import RelatedSodaCheck + +# ============================================================================= +# FLAT ASSET CLASS +# ============================================================================= @register_asset class AccessControl(Asset): - """AccessControl asset — base type for Persona and Purpose access policies.""" + """ + Atlan Type representing parent model for Persona, Purpose + """ - IS_ACCESS_CONTROL_ENABLED: ClassVar[Any] = None - DENY_SIDEBAR_TABS: ClassVar[Any] = None - DENY_CUSTOM_METADATA_GUIDS: ClassVar[Any] = None + CHANNEL_LINK: ClassVar[Any] = None + DEFAULT_NAVIGATION: ClassVar[Any] = None + DENY_ASSET_FILTERS: ClassVar[Any] = None DENY_ASSET_METADATA_TYPES: ClassVar[Any] = None DENY_ASSET_TABS: ClassVar[Any] = None - DENY_ASSET_FILTERS: ClassVar[Any] = None - CHANNEL_LINK: ClassVar[Any] = None DENY_ASSET_TYPES: ClassVar[Any] = None + DENY_CUSTOM_METADATA_GUIDS: ClassVar[Any] = None DENY_NAVIGATION_PAGES: ClassVar[Any] = None - DEFAULT_NAVIGATION: ClassVar[Any] = None + DENY_SIDEBAR_TABS: ClassVar[Any] = None DISPLAY_PREFERENCES: ClassVar[Any] = None + IS_ACCESS_CONTROL_ENABLED: ClassVar[Any] = None POLICIES: ClassVar[Any] = None + ANOMALO_CHECKS: ClassVar[Any] = None + APPLICATION: ClassVar[Any] = None + APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None + OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + METRICS: ClassVar[Any] = None + DQ_BASE_DATASET_RULES: ClassVar[Any] = None + DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None + MEANINGS: ClassVar[Any] = None + MC_MONITORS: ClassVar[Any] = None + MC_INCIDENTS: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_TO: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_FROM: ClassVar[Any] = None + FILES: ClassVar[Any] = None + LINKS: ClassVar[Any] = None + README: ClassVar[Any] = None + SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None + SODA_CHECKS: ClassVar[Any] = None type_name: Union[str, UnsetType] = "AccessControl" - is_access_control_enabled: Union[bool, None, UnsetType] = UNSET - deny_sidebar_tabs: Union[Set[str], None, UnsetType] = UNSET - deny_custom_metadata_guids: Union[Set[str], None, UnsetType] = UNSET - deny_asset_metadata_types: Union[Set[str], None, UnsetType] = UNSET - deny_asset_tabs: Union[Set[str], None, UnsetType] = UNSET - deny_asset_filters: Union[Set[str], None, UnsetType] = UNSET + channel_link: Union[str, None, UnsetType] = UNSET - deny_asset_types: Union[Set[str], None, UnsetType] = UNSET - deny_navigation_pages: Union[Set[str], None, UnsetType] = UNSET + """TBC""" + default_navigation: Union[str, None, UnsetType] = UNSET - display_preferences: Union[Set[str], None, UnsetType] = UNSET - policies: Union[list[AuthPolicy], None, UnsetType] = UNSET + """TBC""" + + deny_asset_filters: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_metadata_types: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_tabs: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_types: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_custom_metadata_guids: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_navigation_pages: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_sidebar_tabs: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + display_preferences: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + is_access_control_enabled: Union[bool, None, UnsetType] = UNSET + """TBC""" + + policies: Union[List[RelatedAuthPolicy], None, UnsetType] = UNSET + """Access control entity to which this policy applies.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = ( + UNSET + ) + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = ( + UNSET + ) + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[ + List[RelatedSchemaRegistrySubject], None, UnsetType + ] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + def __post_init__(self) -> None: + self.type_name = "AccessControl" + + # ========================================================================= + # Optimized Serialization Methods (override Asset base class) + # ========================================================================= def to_json(self, nested: bool = True, serde: Serde | None = None) -> str: + """ + Convert to JSON string using optimized nested struct serialization. + + Args: + nested: If True (default), use nested API format. If False, use flat format. + serde: Optional Serde instance for encoder reuse. Uses shared singleton if None. + + Returns: + JSON string representation + """ if serde is None: serde = get_serde() if nested: - return _access_control_to_nested_bytes(self, serde).decode("utf-8") - return serde.encode(self).decode("utf-8") + return self.to_nested_bytes(serde).decode("utf-8") + else: + return serde.encode(self).decode("utf-8") + + def to_nested_bytes(self, serde: Serde | None = None) -> bytes: + """Serialize to Atlas nested-format JSON bytes (pure msgspec, no dict intermediate).""" + if serde is None: + serde = get_serde() + return _access_control_to_nested_bytes(self, serde) @staticmethod - def from_json( - json_data: Union[str, bytes], serde: Serde | None = None - ) -> "AccessControl": + def from_json(json_data: str | bytes, serde: Serde | None = None) -> AccessControl: + """ + Create from JSON string or bytes using optimized nested struct deserialization. + + Args: + json_data: JSON string or bytes to deserialize + serde: Optional Serde instance for decoder reuse. Uses shared singleton if None. + + Returns: + AccessControl instance + """ if isinstance(json_data, str): json_data = json_data.encode("utf-8") if serde is None: @@ -70,113 +252,356 @@ def from_json( return _access_control_from_nested_bytes(json_data, serde) -# --------------------------------------------------------------------------- -# Deferred field descriptor initialization -# --------------------------------------------------------------------------- -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - RelationField, - TextField, -) - -AccessControl.IS_ACCESS_CONTROL_ENABLED = BooleanField( - "isAccessControlEnabled", "isAccessControlEnabled" -) -AccessControl.DENY_SIDEBAR_TABS = KeywordField("denySidebarTabs", "denySidebarTabs") -AccessControl.DENY_CUSTOM_METADATA_GUIDS = KeywordField( - "denyCustomMetadataGuids", "denyCustomMetadataGuids" -) -AccessControl.DENY_ASSET_METADATA_TYPES = KeywordField( - "denyAssetMetadataTypes", "denyAssetMetadataTypes" -) -AccessControl.DENY_ASSET_TABS = KeywordField("denyAssetTabs", "denyAssetTabs") -AccessControl.DENY_ASSET_FILTERS = TextField("denyAssetFilters", "denyAssetFilters") -AccessControl.CHANNEL_LINK = TextField("channelLink", "channelLink") -AccessControl.DENY_ASSET_TYPES = TextField("denyAssetTypes", "denyAssetTypes") -AccessControl.DENY_NAVIGATION_PAGES = TextField( - "denyNavigationPages", "denyNavigationPages" -) -AccessControl.DEFAULT_NAVIGATION = TextField("defaultNavigation", "defaultNavigation") -AccessControl.DISPLAY_PREFERENCES = KeywordField( - "displayPreferences", "displayPreferences" -) -AccessControl.POLICIES = RelationField("policies") - - # ============================================================================= # NESTED FORMAT CLASSES # ============================================================================= class AccessControlAttributes(AssetAttributes): - is_access_control_enabled: Union[bool, None, UnsetType] = UNSET - deny_sidebar_tabs: Union[Set[str], None, UnsetType] = UNSET - deny_custom_metadata_guids: Union[Set[str], None, UnsetType] = UNSET - deny_asset_metadata_types: Union[Set[str], None, UnsetType] = UNSET - deny_asset_tabs: Union[Set[str], None, UnsetType] = UNSET - deny_asset_filters: Union[Set[str], None, UnsetType] = UNSET + """AccessControl-specific attributes for nested API format.""" + channel_link: Union[str, None, UnsetType] = UNSET - deny_asset_types: Union[Set[str], None, UnsetType] = UNSET - deny_navigation_pages: Union[Set[str], None, UnsetType] = UNSET + """TBC""" + default_navigation: Union[str, None, UnsetType] = UNSET - display_preferences: Union[Set[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_filters: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_metadata_types: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_tabs: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_types: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_custom_metadata_guids: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_navigation_pages: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_sidebar_tabs: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + display_preferences: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + is_access_control_enabled: Union[bool, None, UnsetType] = UNSET + """TBC""" + + +class AccessControlRelationshipAttributes(AssetRelationshipAttributes): + """AccessControl-specific relationship attributes for nested API format.""" + + policies: Union[List[RelatedAuthPolicy], None, UnsetType] = UNSET + """Access control entity to which this policy applies.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = ( + UNSET + ) + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = ( + UNSET + ) + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[ + List[RelatedSchemaRegistrySubject], None, UnsetType + ] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" class AccessControlNested(AssetNested): + """AccessControl in nested API format for high-performance serialization.""" + attributes: Union[AccessControlAttributes, UnsetType] = UNSET + relationship_attributes: Union[AccessControlRelationshipAttributes, UnsetType] = ( + UNSET + ) + append_relationship_attributes: Union[ + AccessControlRelationshipAttributes, UnsetType + ] = UNSET + remove_relationship_attributes: Union[ + AccessControlRelationshipAttributes, UnsetType + ] = UNSET -def _access_control_to_nested(ac: AccessControl) -> AccessControlNested: - attrs_kwargs = build_attributes_kwargs(ac, AccessControlAttributes) - attrs = AccessControlAttributes(**attrs_kwargs) +# ============================================================================= +# CONVERSION HELPERS & CONSTANTS +# ============================================================================= + +_ACCESS_CONTROL_REL_FIELDS: List[str] = [ + *_ASSET_REL_FIELDS, + "policies", + "anomalo_checks", + "application", + "application_field", + "data_contract_latest", + "data_contract_latest_certified", + "output_port_data_products", + "input_port_data_products", + "metrics", + "dq_base_dataset_rules", + "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", + "meanings", + "mc_monitors", + "mc_incidents", + "user_def_relationship_to", + "user_def_relationship_from", + "files", + "links", + "readme", + "schema_registry_subjects", + "soda_checks", +] + + +def _populate_access_control_attrs( + attrs: AccessControlAttributes, obj: AccessControl +) -> None: + """Populate AccessControl-specific attributes on the attrs struct.""" + _populate_asset_attrs(attrs, obj) + attrs.channel_link = obj.channel_link + attrs.default_navigation = obj.default_navigation + attrs.deny_asset_filters = obj.deny_asset_filters + attrs.deny_asset_metadata_types = obj.deny_asset_metadata_types + attrs.deny_asset_tabs = obj.deny_asset_tabs + attrs.deny_asset_types = obj.deny_asset_types + attrs.deny_custom_metadata_guids = obj.deny_custom_metadata_guids + attrs.deny_navigation_pages = obj.deny_navigation_pages + attrs.deny_sidebar_tabs = obj.deny_sidebar_tabs + attrs.display_preferences = obj.display_preferences + attrs.is_access_control_enabled = obj.is_access_control_enabled + + +def _extract_access_control_attrs(attrs: AccessControlAttributes) -> dict: + """Extract all AccessControl attributes from the attrs struct into a flat dict.""" + result = _extract_asset_attrs(attrs) + result["channel_link"] = attrs.channel_link + result["default_navigation"] = attrs.default_navigation + result["deny_asset_filters"] = attrs.deny_asset_filters + result["deny_asset_metadata_types"] = attrs.deny_asset_metadata_types + result["deny_asset_tabs"] = attrs.deny_asset_tabs + result["deny_asset_types"] = attrs.deny_asset_types + result["deny_custom_metadata_guids"] = attrs.deny_custom_metadata_guids + result["deny_navigation_pages"] = attrs.deny_navigation_pages + result["deny_sidebar_tabs"] = attrs.deny_sidebar_tabs + result["display_preferences"] = attrs.display_preferences + result["is_access_control_enabled"] = attrs.is_access_control_enabled + return result + + +# ============================================================================= +# CONVERSION FUNCTIONS +# ============================================================================= + + +def _access_control_to_nested(access_control: AccessControl) -> AccessControlNested: + """Convert flat AccessControl to nested format.""" + attrs = AccessControlAttributes() + _populate_access_control_attrs(attrs, access_control) + # Categorize relationships by save semantic (REPLACE, APPEND, REMOVE) + replace_rels, append_rels, remove_rels = categorize_relationships( + access_control, _ACCESS_CONTROL_REL_FIELDS, AccessControlRelationshipAttributes + ) return AccessControlNested( - guid=ac.guid, - type_name=ac.type_name, - status=ac.status, - version=ac.version, - create_time=ac.create_time, - update_time=ac.update_time, - created_by=ac.created_by, - updated_by=ac.updated_by, - classifications=ac.classifications, - classification_names=ac.classification_names, - meanings=ac.meanings, - labels=ac.labels, - business_attributes=ac.business_attributes, - custom_attributes=ac.custom_attributes, - pending_tasks=ac.pending_tasks, - proxy=ac.proxy, - is_incomplete=ac.is_incomplete, - provenance_type=ac.provenance_type, - home_id=ac.home_id, + guid=access_control.guid, + type_name=access_control.type_name, + status=access_control.status, + version=access_control.version, + create_time=access_control.create_time, + update_time=access_control.update_time, + created_by=access_control.created_by, + updated_by=access_control.updated_by, + classifications=access_control.classifications, + classification_names=access_control.classification_names, + meanings=access_control.meanings, + labels=access_control.labels, + business_attributes=access_control.business_attributes, + custom_attributes=access_control.custom_attributes, + pending_tasks=access_control.pending_tasks, + proxy=access_control.proxy, + is_incomplete=access_control.is_incomplete, + provenance_type=access_control.provenance_type, + home_id=access_control.home_id, attributes=attrs, + relationship_attributes=replace_rels, + append_relationship_attributes=append_rels, + remove_relationship_attributes=remove_rels, ) def _access_control_from_nested(nested: AccessControlNested) -> AccessControl: + """Convert nested format to flat AccessControl.""" attrs = ( nested.attributes if nested.attributes is not UNSET else AccessControlAttributes() ) + # Merge relationships from all three buckets merged_rels = merge_relationships( nested.relationship_attributes, nested.append_relationship_attributes, nested.remove_relationship_attributes, - [], - object, + _ACCESS_CONTROL_REL_FIELDS, + AccessControlRelationshipAttributes, ) - kwargs = build_flat_kwargs( - nested, attrs, merged_rels, AssetNested, AccessControlAttributes + return AccessControl( + guid=nested.guid, + type_name=nested.type_name, + status=nested.status, + version=nested.version, + create_time=nested.create_time, + update_time=nested.update_time, + created_by=nested.created_by, + updated_by=nested.updated_by, + classifications=nested.classifications, + classification_names=nested.classification_names, + meanings=nested.meanings, + labels=nested.labels, + business_attributes=nested.business_attributes, + custom_attributes=nested.custom_attributes, + pending_tasks=nested.pending_tasks, + proxy=nested.proxy, + is_incomplete=nested.is_incomplete, + provenance_type=nested.provenance_type, + home_id=nested.home_id, + **_extract_access_control_attrs(attrs), + # Merged relationship attributes + **merged_rels, ) - return AccessControl(**kwargs) -def _access_control_to_nested_bytes(ac: AccessControl, serde: Serde) -> bytes: - return serde.encode(_access_control_to_nested(ac)) +def _access_control_to_nested_bytes( + access_control: AccessControl, serde: Serde +) -> bytes: + """Convert flat AccessControl to nested JSON bytes.""" + return serde.encode(_access_control_to_nested(access_control)) def _access_control_from_nested_bytes(data: bytes, serde: Serde) -> AccessControl: + """Convert nested JSON bytes to flat AccessControl.""" nested = serde.decode(data, AccessControlNested) return _access_control_from_nested(nested) + + +# --------------------------------------------------------------------------- +# Deferred field descriptor initialization +# --------------------------------------------------------------------------- +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + BooleanField, + KeywordField, + RelationField, +) + +AccessControl.CHANNEL_LINK = KeywordField("channelLink", "channelLink") +AccessControl.DEFAULT_NAVIGATION = KeywordField( + "defaultNavigation", "defaultNavigation" +) +AccessControl.DENY_ASSET_FILTERS = KeywordField("denyAssetFilters", "denyAssetFilters") +AccessControl.DENY_ASSET_METADATA_TYPES = KeywordField( + "denyAssetMetadataTypes", "denyAssetMetadataTypes" +) +AccessControl.DENY_ASSET_TABS = KeywordField("denyAssetTabs", "denyAssetTabs") +AccessControl.DENY_ASSET_TYPES = KeywordField("denyAssetTypes", "denyAssetTypes") +AccessControl.DENY_CUSTOM_METADATA_GUIDS = KeywordField( + "denyCustomMetadataGuids", "denyCustomMetadataGuids" +) +AccessControl.DENY_NAVIGATION_PAGES = KeywordField( + "denyNavigationPages", "denyNavigationPages" +) +AccessControl.DENY_SIDEBAR_TABS = KeywordField("denySidebarTabs", "denySidebarTabs") +AccessControl.DISPLAY_PREFERENCES = KeywordField( + "displayPreferences", "displayPreferences" +) +AccessControl.IS_ACCESS_CONTROL_ENABLED = BooleanField( + "isAccessControlEnabled", "isAccessControlEnabled" +) +AccessControl.POLICIES = RelationField("policies") +AccessControl.ANOMALO_CHECKS = RelationField("anomaloChecks") +AccessControl.APPLICATION = RelationField("application") +AccessControl.APPLICATION_FIELD = RelationField("applicationField") +AccessControl.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +AccessControl.DATA_CONTRACT_LATEST_CERTIFIED = RelationField( + "dataContractLatestCertified" +) +AccessControl.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") +AccessControl.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") +AccessControl.METRICS = RelationField("metrics") +AccessControl.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") +AccessControl.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AccessControl.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) +AccessControl.MEANINGS = RelationField("meanings") +AccessControl.MC_MONITORS = RelationField("mcMonitors") +AccessControl.MC_INCIDENTS = RelationField("mcIncidents") +AccessControl.USER_DEF_RELATIONSHIP_TO = RelationField("userDefRelationshipTo") +AccessControl.USER_DEF_RELATIONSHIP_FROM = RelationField("userDefRelationshipFrom") +AccessControl.FILES = RelationField("files") +AccessControl.LINKS = RelationField("links") +AccessControl.README = RelationField("readme") +AccessControl.SCHEMA_REGISTRY_SUBJECTS = RelationField("schemaRegistrySubjects") +AccessControl.SODA_CHECKS = RelationField("sodaChecks") diff --git a/pyatlan_v9/model/assets/adf.py b/pyatlan_v9/model/assets/adf.py index 2e48b3945..b5c7ddb64 100644 --- a/pyatlan_v9/model/assets/adf.py +++ b/pyatlan_v9/model/assets/adf.py @@ -25,7 +25,6 @@ from pyatlan_v9.model.serde import Serde, get_serde from pyatlan_v9.model.transform import register_asset -from .adf_related import RelatedADF from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -41,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -80,6 +80,7 @@ class ADF(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +98,8 @@ class ADF(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ADF" + adf_factory_name: Union[str, None, UnsetType] = UNSET """Defines the name of the factory in which this asset exists.""" @@ -152,6 +155,11 @@ class ADF(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -207,66 +215,6 @@ class ADF(Asset): def __post_init__(self) -> None: self.type_name = "ADF" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ADF instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ADF validation failed: {errors}") - - def minimize(self) -> "ADF": - """ - Return a minimal copy of this ADF with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ADF with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ADF instance with only the minimum required fields. - """ - self.validate() - return ADF(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedADF": - """ - Create a :class:`RelatedADF` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedADF reference to this asset. - """ - if self.guid is not UNSET: - return RelatedADF(guid=self.guid) - return RelatedADF(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -381,6 +329,11 @@ class ADFRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -463,6 +416,7 @@ class ADFNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -532,9 +486,6 @@ def _adf_to_nested(adf: ADF) -> ADFNested: is_incomplete=adf.is_incomplete, provenance_type=adf.provenance_type, home_id=adf.home_id, - depth=adf.depth, - immediate_upstream=adf.immediate_upstream, - immediate_downstream=adf.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -564,6 +515,7 @@ def _adf_from_nested(nested: ADFNested) -> ADF: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -572,9 +524,6 @@ def _adf_from_nested(nested: ADFNested) -> ADF: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adf_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -614,6 +563,9 @@ def _adf_from_nested_bytes(data: bytes, serde: Serde) -> ADF: ADF.METRICS = RelationField("metrics") ADF.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ADF.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ADF.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ADF.MEANINGS = RelationField("meanings") ADF.MC_MONITORS = RelationField("mcMonitors") ADF.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/adf_activity.py b/pyatlan_v9/model/assets/adf_activity.py index 13201b2c0..5a2245431 100644 --- a/pyatlan_v9/model/assets/adf_activity.py +++ b/pyatlan_v9/model/assets/adf_activity.py @@ -27,7 +27,6 @@ from pyatlan_v9.model.transform import register_asset from .adf_related import ( - RelatedAdfActivity, RelatedAdfDataflow, RelatedAdfDataset, RelatedAdfLinkedservice, @@ -48,6 +47,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -111,6 +111,7 @@ class AdfActivity(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -128,6 +129,8 @@ class AdfActivity(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AdfActivity" + adf_activity_type: Union[str, None, UnsetType] = UNSET """The type of the ADF activity.""" @@ -255,6 +258,11 @@ class AdfActivity(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -316,74 +324,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AdfActivity instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.adf_pipeline is UNSET: - errors.append("adf_pipeline is required for creation") - if self.adf_pipeline_qualified_name is UNSET: - errors.append("adf_pipeline_qualified_name is required for creation") - if errors: - raise ValueError(f"AdfActivity validation failed: {errors}") - - def minimize(self) -> "AdfActivity": - """ - Return a minimal copy of this AdfActivity with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AdfActivity with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AdfActivity instance with only the minimum required fields. - """ - self.validate() - return AdfActivity(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAdfActivity": - """ - Create a :class:`RelatedAdfActivity` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAdfActivity reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAdfActivity(guid=self.guid) - return RelatedAdfActivity(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -570,6 +510,11 @@ class AdfActivityRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -661,6 +606,7 @@ class AdfActivityNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -774,9 +720,6 @@ def _adf_activity_to_nested(adf_activity: AdfActivity) -> AdfActivityNested: is_incomplete=adf_activity.is_incomplete, provenance_type=adf_activity.provenance_type, home_id=adf_activity.home_id, - depth=adf_activity.depth, - immediate_upstream=adf_activity.immediate_upstream, - immediate_downstream=adf_activity.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -808,6 +751,7 @@ def _adf_activity_from_nested(nested: AdfActivityNested) -> AdfActivity: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -816,9 +760,6 @@ def _adf_activity_from_nested(nested: AdfActivityNested) -> AdfActivity: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adf_activity_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -926,6 +867,9 @@ def _adf_activity_from_nested_bytes(data: bytes, serde: Serde) -> AdfActivity: AdfActivity.METRICS = RelationField("metrics") AdfActivity.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AdfActivity.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AdfActivity.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AdfActivity.MEANINGS = RelationField("meanings") AdfActivity.MC_MONITORS = RelationField("mcMonitors") AdfActivity.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/adf_dataflow.py b/pyatlan_v9/model/assets/adf_dataflow.py index b7fbf9ad8..b0817f154 100644 --- a/pyatlan_v9/model/assets/adf_dataflow.py +++ b/pyatlan_v9/model/assets/adf_dataflow.py @@ -27,7 +27,6 @@ from .adf_related import ( RelatedAdfActivity, - RelatedAdfDataflow, RelatedAdfDataset, RelatedAdfLinkedservice, RelatedAdfPipeline, @@ -47,6 +46,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -93,6 +93,7 @@ class AdfDataflow(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -110,6 +111,8 @@ class AdfDataflow(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AdfDataflow" + adf_dataflow_sources: Union[List[str], None, UnsetType] = UNSET """The list of names of sources for this dataflow.""" @@ -186,6 +189,11 @@ class AdfDataflow(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -241,66 +249,6 @@ class AdfDataflow(Asset): def __post_init__(self) -> None: self.type_name = "AdfDataflow" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AdfDataflow instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AdfDataflow validation failed: {errors}") - - def minimize(self) -> "AdfDataflow": - """ - Return a minimal copy of this AdfDataflow with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AdfDataflow with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AdfDataflow instance with only the minimum required fields. - """ - self.validate() - return AdfDataflow(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAdfDataflow": - """ - Create a :class:`RelatedAdfDataflow` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAdfDataflow reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAdfDataflow(guid=self.guid) - return RelatedAdfDataflow(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -436,6 +384,11 @@ class AdfDataflowRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -526,6 +479,7 @@ class AdfDataflowNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -603,9 +557,6 @@ def _adf_dataflow_to_nested(adf_dataflow: AdfDataflow) -> AdfDataflowNested: is_incomplete=adf_dataflow.is_incomplete, provenance_type=adf_dataflow.provenance_type, home_id=adf_dataflow.home_id, - depth=adf_dataflow.depth, - immediate_upstream=adf_dataflow.immediate_upstream, - immediate_downstream=adf_dataflow.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -637,6 +588,7 @@ def _adf_dataflow_from_nested(nested: AdfDataflowNested) -> AdfDataflow: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -645,9 +597,6 @@ def _adf_dataflow_from_nested(nested: AdfDataflowNested) -> AdfDataflow: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adf_dataflow_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -702,6 +651,9 @@ def _adf_dataflow_from_nested_bytes(data: bytes, serde: Serde) -> AdfDataflow: AdfDataflow.METRICS = RelationField("metrics") AdfDataflow.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AdfDataflow.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AdfDataflow.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AdfDataflow.MEANINGS = RelationField("meanings") AdfDataflow.MC_MONITORS = RelationField("mcMonitors") AdfDataflow.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/adf_dataset.py b/pyatlan_v9/model/assets/adf_dataset.py index 6d209f5ed..baa913cc8 100644 --- a/pyatlan_v9/model/assets/adf_dataset.py +++ b/pyatlan_v9/model/assets/adf_dataset.py @@ -28,7 +28,6 @@ from .adf_related import ( RelatedAdfActivity, RelatedAdfDataflow, - RelatedAdfDataset, RelatedAdfLinkedservice, RelatedAdfPipeline, ) @@ -47,6 +46,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -101,6 +101,7 @@ class AdfDataset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -118,6 +119,8 @@ class AdfDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AdfDataset" + adf_dataset_type: Union[str, None, UnsetType] = UNSET """Defines the type of the dataset.""" @@ -218,6 +221,11 @@ class AdfDataset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -273,66 +281,6 @@ class AdfDataset(Asset): def __post_init__(self) -> None: self.type_name = "AdfDataset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AdfDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AdfDataset validation failed: {errors}") - - def minimize(self) -> "AdfDataset": - """ - Return a minimal copy of this AdfDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AdfDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AdfDataset instance with only the minimum required fields. - """ - self.validate() - return AdfDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAdfDataset": - """ - Create a :class:`RelatedAdfDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAdfDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAdfDataset(guid=self.guid) - return RelatedAdfDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -492,6 +440,11 @@ class AdfDatasetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -582,6 +535,7 @@ class AdfDatasetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -673,9 +627,6 @@ def _adf_dataset_to_nested(adf_dataset: AdfDataset) -> AdfDatasetNested: is_incomplete=adf_dataset.is_incomplete, provenance_type=adf_dataset.provenance_type, home_id=adf_dataset.home_id, - depth=adf_dataset.depth, - immediate_upstream=adf_dataset.immediate_upstream, - immediate_downstream=adf_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -707,6 +658,7 @@ def _adf_dataset_from_nested(nested: AdfDatasetNested) -> AdfDataset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -715,9 +667,6 @@ def _adf_dataset_from_nested(nested: AdfDatasetNested) -> AdfDataset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adf_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -796,6 +745,9 @@ def _adf_dataset_from_nested_bytes(data: bytes, serde: Serde) -> AdfDataset: AdfDataset.METRICS = RelationField("metrics") AdfDataset.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AdfDataset.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AdfDataset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AdfDataset.MEANINGS = RelationField("meanings") AdfDataset.MC_MONITORS = RelationField("mcMonitors") AdfDataset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/adf_linkedservice.py b/pyatlan_v9/model/assets/adf_linkedservice.py index 25ab6acd7..5fdedeb57 100644 --- a/pyatlan_v9/model/assets/adf_linkedservice.py +++ b/pyatlan_v9/model/assets/adf_linkedservice.py @@ -29,7 +29,6 @@ RelatedAdfActivity, RelatedAdfDataflow, RelatedAdfDataset, - RelatedAdfLinkedservice, RelatedAdfPipeline, ) from .airflow_related import RelatedAirflowTask @@ -47,6 +46,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -105,6 +105,7 @@ class AdfLinkedservice(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -122,6 +123,8 @@ class AdfLinkedservice(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AdfLinkedservice" + adf_linkedservice_type: Union[str, None, UnsetType] = UNSET """Defines the type of the linked service.""" @@ -234,6 +237,11 @@ class AdfLinkedservice(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -289,66 +297,6 @@ class AdfLinkedservice(Asset): def __post_init__(self) -> None: self.type_name = "AdfLinkedservice" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AdfLinkedservice instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AdfLinkedservice validation failed: {errors}") - - def minimize(self) -> "AdfLinkedservice": - """ - Return a minimal copy of this AdfLinkedservice with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AdfLinkedservice with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AdfLinkedservice instance with only the minimum required fields. - """ - self.validate() - return AdfLinkedservice(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAdfLinkedservice": - """ - Create a :class:`RelatedAdfLinkedservice` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAdfLinkedservice reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAdfLinkedservice(guid=self.guid) - return RelatedAdfLinkedservice(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -522,6 +470,11 @@ class AdfLinkedserviceRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -614,6 +567,7 @@ class AdfLinkedserviceNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -725,9 +679,6 @@ def _adf_linkedservice_to_nested( is_incomplete=adf_linkedservice.is_incomplete, provenance_type=adf_linkedservice.provenance_type, home_id=adf_linkedservice.home_id, - depth=adf_linkedservice.depth, - immediate_upstream=adf_linkedservice.immediate_upstream, - immediate_downstream=adf_linkedservice.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -761,6 +712,7 @@ def _adf_linkedservice_from_nested(nested: AdfLinkedserviceNested) -> AdfLinkeds updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -769,9 +721,6 @@ def _adf_linkedservice_from_nested(nested: AdfLinkedserviceNested) -> AdfLinkeds is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adf_linkedservice_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -874,6 +823,9 @@ def _adf_linkedservice_from_nested_bytes(data: bytes, serde: Serde) -> AdfLinked AdfLinkedservice.METRICS = RelationField("metrics") AdfLinkedservice.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AdfLinkedservice.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AdfLinkedservice.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AdfLinkedservice.MEANINGS = RelationField("meanings") AdfLinkedservice.MC_MONITORS = RelationField("mcMonitors") AdfLinkedservice.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/adf_pipeline.py b/pyatlan_v9/model/assets/adf_pipeline.py index ec0820b37..ea447a6e7 100644 --- a/pyatlan_v9/model/assets/adf_pipeline.py +++ b/pyatlan_v9/model/assets/adf_pipeline.py @@ -30,7 +30,6 @@ RelatedAdfDataflow, RelatedAdfDataset, RelatedAdfLinkedservice, - RelatedAdfPipeline, ) from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck @@ -47,6 +46,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -93,6 +93,7 @@ class AdfPipeline(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -110,6 +111,8 @@ class AdfPipeline(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AdfPipeline" + adf_pipeline_activity_count: Union[int, None, UnsetType] = UNSET """Defines the count of activities in the pipline.""" @@ -186,6 +189,11 @@ class AdfPipeline(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -241,66 +249,6 @@ class AdfPipeline(Asset): def __post_init__(self) -> None: self.type_name = "AdfPipeline" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AdfPipeline instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AdfPipeline validation failed: {errors}") - - def minimize(self) -> "AdfPipeline": - """ - Return a minimal copy of this AdfPipeline with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AdfPipeline with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AdfPipeline instance with only the minimum required fields. - """ - self.validate() - return AdfPipeline(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAdfPipeline": - """ - Create a :class:`RelatedAdfPipeline` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAdfPipeline reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAdfPipeline(guid=self.guid) - return RelatedAdfPipeline(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -436,6 +384,11 @@ class AdfPipelineRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -526,6 +479,7 @@ class AdfPipelineNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -603,9 +557,6 @@ def _adf_pipeline_to_nested(adf_pipeline: AdfPipeline) -> AdfPipelineNested: is_incomplete=adf_pipeline.is_incomplete, provenance_type=adf_pipeline.provenance_type, home_id=adf_pipeline.home_id, - depth=adf_pipeline.depth, - immediate_upstream=adf_pipeline.immediate_upstream, - immediate_downstream=adf_pipeline.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -637,6 +588,7 @@ def _adf_pipeline_from_nested(nested: AdfPipelineNested) -> AdfPipeline: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -645,9 +597,6 @@ def _adf_pipeline_from_nested(nested: AdfPipelineNested) -> AdfPipeline: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adf_pipeline_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -708,6 +657,9 @@ def _adf_pipeline_from_nested_bytes(data: bytes, serde: Serde) -> AdfPipeline: AdfPipeline.METRICS = RelationField("metrics") AdfPipeline.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AdfPipeline.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AdfPipeline.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AdfPipeline.MEANINGS = RelationField("meanings") AdfPipeline.MC_MONITORS = RelationField("mcMonitors") AdfPipeline.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/adls.py b/pyatlan_v9/model/assets/adls.py index b2d49f04b..7cda46587 100644 --- a/pyatlan_v9/model/assets/adls.py +++ b/pyatlan_v9/model/assets/adls.py @@ -25,7 +25,6 @@ from pyatlan_v9.model.serde import Serde, get_serde from pyatlan_v9.model.transform import register_asset -from .adls_related import RelatedADLS from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -41,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +85,7 @@ class ADLS(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +103,8 @@ class ADLS(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ADLS" + adls_account_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the account for this ADLS asset.""" @@ -172,6 +175,11 @@ class ADLS(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -227,66 +235,6 @@ class ADLS(Asset): def __post_init__(self) -> None: self.type_name = "ADLS" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ADLS instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ADLS validation failed: {errors}") - - def minimize(self) -> "ADLS": - """ - Return a minimal copy of this ADLS with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ADLS with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ADLS instance with only the minimum required fields. - """ - self.validate() - return ADLS(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedADLS": - """ - Create a :class:`RelatedADLS` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedADLS reference to this asset. - """ - if self.guid is not UNSET: - return RelatedADLS(guid=self.guid) - return RelatedADLS(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -416,6 +364,11 @@ class ADLSRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -498,6 +451,7 @@ class ADLSNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -577,9 +531,6 @@ def _adls_to_nested(adls: ADLS) -> ADLSNested: is_incomplete=adls.is_incomplete, provenance_type=adls.provenance_type, home_id=adls.home_id, - depth=adls.depth, - immediate_upstream=adls.immediate_upstream, - immediate_downstream=adls.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -609,6 +560,7 @@ def _adls_from_nested(nested: ADLSNested) -> ADLS: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -617,9 +569,6 @@ def _adls_from_nested(nested: ADLSNested) -> ADLS: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adls_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -678,6 +627,9 @@ def _adls_from_nested_bytes(data: bytes, serde: Serde) -> ADLS: ADLS.METRICS = RelationField("metrics") ADLS.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ADLS.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ADLS.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ADLS.MEANINGS = RelationField("meanings") ADLS.MC_MONITORS = RelationField("mcMonitors") ADLS.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/adls_account.py b/pyatlan_v9/model/assets/adls_account.py index 9759491e5..9845cb35f 100644 --- a/pyatlan_v9/model/assets/adls_account.py +++ b/pyatlan_v9/model/assets/adls_account.py @@ -27,7 +27,7 @@ from pyatlan_v9.model.transform import register_asset from pyatlan_v9.utils import init_guid, validate_required_fields -from .adls_related import RelatedADLSAccount, RelatedADLSContainer +from .adls_related import RelatedADLSContainer from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -43,6 +43,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -98,6 +99,7 @@ class ADLSAccount(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -115,6 +117,8 @@ class ADLSAccount(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ADLSAccount" + adls_etag: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="adlsETag" ) @@ -220,6 +224,11 @@ class ADLSAccount(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -275,66 +284,6 @@ class ADLSAccount(Asset): def __post_init__(self) -> None: self.type_name = "ADLSAccount" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ADLSAccount instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ADLSAccount validation failed: {errors}") - - def minimize(self) -> "ADLSAccount": - """ - Return a minimal copy of this ADLSAccount with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ADLSAccount with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ADLSAccount instance with only the minimum required fields. - """ - self.validate() - return ADLSAccount(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedADLSAccount": - """ - Create a :class:`RelatedADLSAccount` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedADLSAccount reference to this asset. - """ - if self.guid is not UNSET: - return RelatedADLSAccount(guid=self.guid) - return RelatedADLSAccount(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -531,6 +480,11 @@ class ADLSAccountRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -618,6 +572,7 @@ class ADLSAccountNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -719,9 +674,6 @@ def _adls_account_to_nested(adls_account: ADLSAccount) -> ADLSAccountNested: is_incomplete=adls_account.is_incomplete, provenance_type=adls_account.provenance_type, home_id=adls_account.home_id, - depth=adls_account.depth, - immediate_upstream=adls_account.immediate_upstream, - immediate_downstream=adls_account.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -753,6 +705,7 @@ def _adls_account_from_nested(nested: ADLSAccountNested) -> ADLSAccount: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -761,9 +714,6 @@ def _adls_account_from_nested(nested: ADLSAccountNested) -> ADLSAccount: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adls_account_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -855,6 +805,9 @@ def _adls_account_from_nested_bytes(data: bytes, serde: Serde) -> ADLSAccount: ADLSAccount.METRICS = RelationField("metrics") ADLSAccount.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ADLSAccount.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ADLSAccount.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ADLSAccount.MEANINGS = RelationField("meanings") ADLSAccount.MC_MONITORS = RelationField("mcMonitors") ADLSAccount.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/adls_container.py b/pyatlan_v9/model/assets/adls_container.py index 2f4eb96c6..879a8d4ac 100644 --- a/pyatlan_v9/model/assets/adls_container.py +++ b/pyatlan_v9/model/assets/adls_container.py @@ -27,7 +27,7 @@ from pyatlan_v9.model.transform import register_asset from pyatlan_v9.utils import init_guid, validate_required_fields -from .adls_related import RelatedADLSAccount, RelatedADLSContainer, RelatedADLSObject +from .adls_related import RelatedADLSAccount, RelatedADLSObject from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -43,6 +43,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -95,6 +96,7 @@ class ADLSContainer(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -112,6 +114,8 @@ class ADLSContainer(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ADLSContainer" + adls_container_url: Union[str, None, UnsetType] = UNSET """URL of this container.""" @@ -208,6 +212,11 @@ class ADLSContainer(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -269,76 +278,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ADLSContainer instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.adls_account is UNSET: - errors.append("adls_account is required for creation") - if self.adls_account_name is UNSET: - errors.append("adls_account_name is required for creation") - if self.adls_account_qualified_name is UNSET: - errors.append("adls_account_qualified_name is required for creation") - if errors: - raise ValueError(f"ADLSContainer validation failed: {errors}") - - def minimize(self) -> "ADLSContainer": - """ - Return a minimal copy of this ADLSContainer with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ADLSContainer with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ADLSContainer instance with only the minimum required fields. - """ - self.validate() - return ADLSContainer(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedADLSContainer": - """ - Create a :class:`RelatedADLSContainer` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedADLSContainer reference to this asset. - """ - if self.guid is not UNSET: - return RelatedADLSContainer(guid=self.guid) - return RelatedADLSContainer(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -540,6 +479,11 @@ class ADLSContainerRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -630,6 +574,7 @@ class ADLSContainerNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -727,9 +672,6 @@ def _adls_container_to_nested(adls_container: ADLSContainer) -> ADLSContainerNes is_incomplete=adls_container.is_incomplete, provenance_type=adls_container.provenance_type, home_id=adls_container.home_id, - depth=adls_container.depth, - immediate_upstream=adls_container.immediate_upstream, - immediate_downstream=adls_container.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -763,6 +705,7 @@ def _adls_container_from_nested(nested: ADLSContainerNested) -> ADLSContainer: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -771,9 +714,6 @@ def _adls_container_from_nested(nested: ADLSContainerNested) -> ADLSContainer: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adls_container_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -859,6 +799,9 @@ def _adls_container_from_nested_bytes(data: bytes, serde: Serde) -> ADLSContaine ADLSContainer.METRICS = RelationField("metrics") ADLSContainer.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ADLSContainer.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ADLSContainer.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ADLSContainer.MEANINGS = RelationField("meanings") ADLSContainer.MC_MONITORS = RelationField("mcMonitors") ADLSContainer.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/adls_object.py b/pyatlan_v9/model/assets/adls_object.py index b7fb4ed16..b9dd3f23d 100644 --- a/pyatlan_v9/model/assets/adls_object.py +++ b/pyatlan_v9/model/assets/adls_object.py @@ -29,7 +29,7 @@ from pyatlan_v9.model.transform import register_asset from pyatlan_v9.utils import init_guid, validate_required_fields -from .adls_related import RelatedADLSContainer, RelatedADLSObject +from .adls_related import RelatedADLSContainer from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -45,6 +45,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -109,6 +110,7 @@ class ADLSObject(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -126,6 +128,8 @@ class ADLSObject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ADLSObject" + adls_object_url: Union[str, None, UnsetType] = UNSET """URL of this object.""" @@ -258,6 +262,11 @@ class ADLSObject(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -321,80 +330,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ADLSObject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.adls_container is UNSET: - errors.append("adls_container is required for creation") - if self.adls_container_name is UNSET: - errors.append("adls_container_name is required for creation") - if self.adls_container_qualified_name is UNSET: - errors.append("adls_container_qualified_name is required for creation") - if self.adls_account_name is UNSET: - errors.append("adls_account_name is required for creation") - if self.adls_account_qualified_name is UNSET: - errors.append("adls_account_qualified_name is required for creation") - if errors: - raise ValueError(f"ADLSObject validation failed: {errors}") - - def minimize(self) -> "ADLSObject": - """ - Return a minimal copy of this ADLSObject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ADLSObject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ADLSObject instance with only the minimum required fields. - """ - self.validate() - return ADLSObject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedADLSObject": - """ - Create a :class:`RelatedADLSObject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedADLSObject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedADLSObject(guid=self.guid) - return RelatedADLSObject(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -698,6 +633,11 @@ class ADLSObjectRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -785,6 +725,7 @@ class ADLSObjectNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -910,9 +851,6 @@ def _adls_object_to_nested(adls_object: ADLSObject) -> ADLSObjectNested: is_incomplete=adls_object.is_incomplete, provenance_type=adls_object.provenance_type, home_id=adls_object.home_id, - depth=adls_object.depth, - immediate_upstream=adls_object.immediate_upstream, - immediate_downstream=adls_object.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -944,6 +882,7 @@ def _adls_object_from_nested(nested: ADLSObjectNested) -> ADLSObject: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -952,9 +891,6 @@ def _adls_object_from_nested(nested: ADLSObjectNested) -> ADLSObject: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_adls_object_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1074,6 +1010,9 @@ def _adls_object_from_nested_bytes(data: bytes, serde: Serde) -> ADLSObject: ADLSObject.METRICS = RelationField("metrics") ADLSObject.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ADLSObject.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ADLSObject.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ADLSObject.MEANINGS = RelationField("meanings") ADLSObject.MC_MONITORS = RelationField("mcMonitors") ADLSObject.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/agentic.py b/pyatlan_v9/model/assets/agentic.py new file mode 100644 index 000000000..98a0c2d4e --- /dev/null +++ b/pyatlan_v9/model/assets/agentic.py @@ -0,0 +1,533 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# ruff: noqa: ARG002 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +Agentic asset model with flattened inheritance. + +This module provides: +- Agentic: Flat asset class (easy to use) +- AgenticAttributes: Nested attributes struct (extends AssetAttributes) +- AgenticNested: Nested API format struct +""" + +from __future__ import annotations + +from typing import Any, ClassVar, Dict, List, Set, Union + +import msgspec +from msgspec import UNSET, UnsetType + +from .airflow_related import RelatedAirflowTask +from .anomalo_related import RelatedAnomaloCheck +from .app_related import RelatedApplication, RelatedApplicationField +from .asset import ( + _ASSET_REL_FIELDS, + Asset, + AssetAttributes, + AssetNested, + AssetRelationshipAttributes, + _extract_asset_attrs, + _populate_asset_attrs, +) +from .data_contract_related import RelatedDataContract +from .data_mesh_related import RelatedDataProduct +from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType +from .gtc_related import RelatedAtlasGlossaryTerm +from .model_related import RelatedModelAttribute, RelatedModelEntity +from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor +from .partial_related import RelatedPartialField, RelatedPartialObject +from .process_related import RelatedProcess +from .referenceable_related import RelatedReferenceable +from .resource_related import RelatedFile, RelatedLink, RelatedReadme +from .schema_registry_related import RelatedSchemaRegistrySubject +from .soda_related import RelatedSodaCheck +from .spark_related import RelatedSparkJob +from pyatlan_v9.model.conversion_utils import categorize_relationships, merge_relationships +from pyatlan_v9.model.serde import Serde, get_serde +from pyatlan_v9.model.transform import register_asset + +# ============================================================================= +# FLAT ASSET CLASS +# ============================================================================= + +@register_asset +class Agentic(Asset): + """ + Base class for agentic assets in Atlan. Extends Catalog to participate in lineage (Process inputs/outputs require Catalog types). Agentic assets include skills, skill artifacts, context repositories, knowledge files, and other building blocks consumed by AI agents. + """ + + CATALOG_DATASET_GUID: ClassVar[Any] = None + INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None + OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None + ANOMALO_CHECKS: ClassVar[Any] = None + APPLICATION: ClassVar[Any] = None + APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None + OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + MODEL_IMPLEMENTED_ENTITIES: ClassVar[Any] = None + MODEL_IMPLEMENTED_ATTRIBUTES: ClassVar[Any] = None + METRICS: ClassVar[Any] = None + DQ_BASE_DATASET_RULES: ClassVar[Any] = None + DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None + MEANINGS: ClassVar[Any] = None + MC_MONITORS: ClassVar[Any] = None + MC_INCIDENTS: ClassVar[Any] = None + PARTIAL_CHILD_FIELDS: ClassVar[Any] = None + PARTIAL_CHILD_OBJECTS: ClassVar[Any] = None + INPUT_TO_PROCESSES: ClassVar[Any] = None + OUTPUT_FROM_PROCESSES: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_TO: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_FROM: ClassVar[Any] = None + FILES: ClassVar[Any] = None + LINKS: ClassVar[Any] = None + README: ClassVar[Any] = None + SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None + SODA_CHECKS: ClassVar[Any] = None + INPUT_TO_SPARK_JOBS: ClassVar[Any] = None + OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + + type_name: Union[str, UnsetType] = "Agentic" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + def __post_init__(self) -> None: + self.type_name = "Agentic" + + + + # ========================================================================= + # Optimized Serialization Methods (override Asset base class) + # ========================================================================= + + def to_json(self, nested: bool = True, serde: Serde | None = None) -> str: + """ + Convert to JSON string using optimized nested struct serialization. + + Args: + nested: If True (default), use nested API format. If False, use flat format. + serde: Optional Serde instance for encoder reuse. Uses shared singleton if None. + + Returns: + JSON string representation + """ + if serde is None: + serde = get_serde() + if nested: + return self.to_nested_bytes(serde).decode("utf-8") + else: + return serde.encode(self).decode("utf-8") + + def to_nested_bytes(self, serde: Serde | None = None) -> bytes: + """Serialize to Atlas nested-format JSON bytes (pure msgspec, no dict intermediate).""" + if serde is None: + serde = get_serde() + return _agentic_to_nested_bytes(self, serde) + + @staticmethod + def from_json(json_data: str | bytes, serde: Serde | None = None) -> Agentic: + """ + Create from JSON string or bytes using optimized nested struct deserialization. + + Args: + json_data: JSON string or bytes to deserialize + serde: Optional Serde instance for decoder reuse. Uses shared singleton if None. + + Returns: + Agentic instance + """ + if isinstance(json_data, str): + json_data = json_data.encode("utf-8") + if serde is None: + serde = get_serde() + return _agentic_from_nested_bytes(json_data, serde) + + +# ============================================================================= +# NESTED FORMAT CLASSES +# ============================================================================= + +class AgenticAttributes(AssetAttributes): + """Agentic-specific attributes for nested API format.""" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + +class AgenticRelationshipAttributes(AssetRelationshipAttributes): + """Agentic-specific relationship attributes for nested API format.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + +class AgenticNested(AssetNested): + """Agentic in nested API format for high-performance serialization.""" + + attributes: Union[AgenticAttributes, UnsetType] = UNSET + relationship_attributes: Union[AgenticRelationshipAttributes, UnsetType] = UNSET + append_relationship_attributes: Union[AgenticRelationshipAttributes, UnsetType] = UNSET + remove_relationship_attributes: Union[AgenticRelationshipAttributes, UnsetType] = UNSET + +# ============================================================================= +# CONVERSION HELPERS & CONSTANTS +# ============================================================================= + +_AGENTIC_REL_FIELDS: List[str] = [ + *_ASSET_REL_FIELDS, + "input_to_airflow_tasks", + "output_from_airflow_tasks", + "anomalo_checks", + "application", + "application_field", + "data_contract_latest", + "data_contract_latest_certified", + "output_port_data_products", + "input_port_data_products", + "model_implemented_entities", + "model_implemented_attributes", + "metrics", + "dq_base_dataset_rules", + "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", + "meanings", + "mc_monitors", + "mc_incidents", + "partial_child_fields", + "partial_child_objects", + "input_to_processes", + "output_from_processes", + "user_def_relationship_to", + "user_def_relationship_from", + "files", + "links", + "readme", + "schema_registry_subjects", + "soda_checks", + "input_to_spark_jobs", + "output_from_spark_jobs", +] + +def _populate_agentic_attrs(attrs: AgenticAttributes, obj: Agentic) -> None: + """Populate Agentic-specific attributes on the attrs struct.""" + _populate_asset_attrs(attrs, obj) + attrs.catalog_dataset_guid = obj.catalog_dataset_guid + +def _extract_agentic_attrs(attrs: AgenticAttributes) -> dict: + """Extract all Agentic attributes from the attrs struct into a flat dict.""" + result = _extract_asset_attrs(attrs) + result["catalog_dataset_guid"] = attrs.catalog_dataset_guid + return result + +# ============================================================================= +# CONVERSION FUNCTIONS +# ============================================================================= + + +def _agentic_to_nested(agentic: Agentic) -> AgenticNested: + """Convert flat Agentic to nested format.""" + attrs = AgenticAttributes() + _populate_agentic_attrs(attrs, agentic) + # Categorize relationships by save semantic (REPLACE, APPEND, REMOVE) + replace_rels, append_rels, remove_rels = categorize_relationships( + agentic, _AGENTIC_REL_FIELDS, AgenticRelationshipAttributes + ) + return AgenticNested( + guid=agentic.guid, + type_name=agentic.type_name, + status=agentic.status, + version=agentic.version, + create_time=agentic.create_time, + update_time=agentic.update_time, + created_by=agentic.created_by, + updated_by=agentic.updated_by, + classifications=agentic.classifications, + classification_names=agentic.classification_names, + meanings=agentic.meanings, + labels=agentic.labels, + business_attributes=agentic.business_attributes, + custom_attributes=agentic.custom_attributes, + pending_tasks=agentic.pending_tasks, + proxy=agentic.proxy, + is_incomplete=agentic.is_incomplete, + provenance_type=agentic.provenance_type, + home_id=agentic.home_id, + attributes=attrs, + relationship_attributes=replace_rels, + append_relationship_attributes=append_rels, + remove_relationship_attributes=remove_rels, + ) + +def _agentic_from_nested(nested: AgenticNested) -> Agentic: + """Convert nested format to flat Agentic.""" + attrs = nested.attributes if nested.attributes is not UNSET else AgenticAttributes() + # Merge relationships from all three buckets + merged_rels = merge_relationships( + nested.relationship_attributes, + nested.append_relationship_attributes, + nested.remove_relationship_attributes, + _AGENTIC_REL_FIELDS, + AgenticRelationshipAttributes + ) + return Agentic( + guid=nested.guid, + type_name=nested.type_name, + status=nested.status, + version=nested.version, + create_time=nested.create_time, + update_time=nested.update_time, + created_by=nested.created_by, + updated_by=nested.updated_by, + classifications=nested.classifications, + classification_names=nested.classification_names, + meanings=nested.meanings, + labels=nested.labels, + business_attributes=nested.business_attributes, + custom_attributes=nested.custom_attributes, + pending_tasks=nested.pending_tasks, + proxy=nested.proxy, + is_incomplete=nested.is_incomplete, + provenance_type=nested.provenance_type, + home_id=nested.home_id, + **_extract_agentic_attrs(attrs), + # Merged relationship attributes + **merged_rels, + ) + +def _agentic_to_nested_bytes(agentic: Agentic, serde: Serde) -> bytes: + """Convert flat Agentic to nested JSON bytes.""" + return serde.encode(_agentic_to_nested(agentic)) + + +def _agentic_from_nested_bytes(data: bytes, serde: Serde) -> Agentic: + """Convert nested JSON bytes to flat Agentic.""" + nested = serde.decode(data, AgenticNested) + return _agentic_from_nested(nested) + +# --------------------------------------------------------------------------- +# Deferred field descriptor initialization +# --------------------------------------------------------------------------- +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + KeywordField, + RelationField, +) + +Agentic.CATALOG_DATASET_GUID = KeywordField("catalogDatasetGuid", "catalogDatasetGuid") +Agentic.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") +Agentic.OUTPUT_FROM_AIRFLOW_TASKS = RelationField("outputFromAirflowTasks") +Agentic.ANOMALO_CHECKS = RelationField("anomaloChecks") +Agentic.APPLICATION = RelationField("application") +Agentic.APPLICATION_FIELD = RelationField("applicationField") +Agentic.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +Agentic.DATA_CONTRACT_LATEST_CERTIFIED = RelationField("dataContractLatestCertified") +Agentic.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") +Agentic.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") +Agentic.MODEL_IMPLEMENTED_ENTITIES = RelationField("modelImplementedEntities") +Agentic.MODEL_IMPLEMENTED_ATTRIBUTES = RelationField("modelImplementedAttributes") +Agentic.METRICS = RelationField("metrics") +Agentic.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") +Agentic.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Agentic.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField("gcpDataplexAspectTypeMetadataEntities") +Agentic.MEANINGS = RelationField("meanings") +Agentic.MC_MONITORS = RelationField("mcMonitors") +Agentic.MC_INCIDENTS = RelationField("mcIncidents") +Agentic.PARTIAL_CHILD_FIELDS = RelationField("partialChildFields") +Agentic.PARTIAL_CHILD_OBJECTS = RelationField("partialChildObjects") +Agentic.INPUT_TO_PROCESSES = RelationField("inputToProcesses") +Agentic.OUTPUT_FROM_PROCESSES = RelationField("outputFromProcesses") +Agentic.USER_DEF_RELATIONSHIP_TO = RelationField("userDefRelationshipTo") +Agentic.USER_DEF_RELATIONSHIP_FROM = RelationField("userDefRelationshipFrom") +Agentic.FILES = RelationField("files") +Agentic.LINKS = RelationField("links") +Agentic.README = RelationField("readme") +Agentic.SCHEMA_REGISTRY_SUBJECTS = RelationField("schemaRegistrySubjects") +Agentic.SODA_CHECKS = RelationField("sodaChecks") +Agentic.INPUT_TO_SPARK_JOBS = RelationField("inputToSparkJobs") +Agentic.OUTPUT_FROM_SPARK_JOBS = RelationField("outputFromSparkJobs") \ No newline at end of file diff --git a/pyatlan_v9/model/assets/agentic_related.py b/pyatlan_v9/model/assets/agentic_related.py new file mode 100644 index 000000000..551f84d06 --- /dev/null +++ b/pyatlan_v9/model/assets/agentic_related.py @@ -0,0 +1,39 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +Related type classes for Agentic module. + +This module contains all Related{Type} classes for the Agentic type hierarchy. +These classes are used for relationship attributes to reference related entities. +""" + +from __future__ import annotations + +from typing import Dict, List, Set, Union + +import msgspec +from msgspec import UNSET, UnsetType + +from .catalog_related import RelatedCatalog +from .referenceable_related import RelatedReferenceable + +__all__ = [ + "RelatedAgentic", +] + + +class RelatedAgentic(RelatedCatalog): + """ + Related entity reference for Agentic assets. + + Extends RelatedCatalog with Agentic-specific attributes. + """ + + # type_name inherited from parent with default=UNSET + # __post_init__ sets it to "Agentic" so it serializes correctly + + def __post_init__(self) -> None: + RelatedReferenceable.__post_init__(self) + self.type_name = "Agentic" diff --git a/pyatlan_v9/model/assets/ai.py b/pyatlan_v9/model/assets/ai.py index 3744009ae..cdb22454d 100644 --- a/pyatlan_v9/model/assets/ai.py +++ b/pyatlan_v9/model/assets/ai.py @@ -26,7 +26,6 @@ from pyatlan_v9.model.serde import Serde, get_serde from pyatlan_v9.model.transform import register_asset -from .ai_related import RelatedAI from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -42,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -86,6 +86,7 @@ class AI(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,6 +104,8 @@ class AI(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AI" + ethical_ai_privacy_config: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="ethicalAIPrivacyConfig" ) @@ -187,6 +190,11 @@ class AI(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -242,66 +250,6 @@ class AI(Asset): def __post_init__(self) -> None: self.type_name = "AI" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AI instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AI validation failed: {errors}") - - def minimize(self) -> "AI": - """ - Return a minimal copy of this AI with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AI with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AI instance with only the minimum required fields. - """ - self.validate() - return AI(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAI": - """ - Create a :class:`RelatedAI` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAI reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAI(guid=self.guid) - return RelatedAI(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -445,6 +393,11 @@ class AIRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -527,6 +480,7 @@ class AINested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -616,9 +570,6 @@ def _ai_to_nested(ai: AI) -> AINested: is_incomplete=ai.is_incomplete, provenance_type=ai.provenance_type, home_id=ai.home_id, - depth=ai.depth, - immediate_upstream=ai.immediate_upstream, - immediate_downstream=ai.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -648,6 +599,7 @@ def _ai_from_nested(nested: AINested) -> AI: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -656,9 +608,6 @@ def _ai_from_nested(nested: AINested) -> AI: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_ai_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -718,6 +667,9 @@ def _ai_from_nested_bytes(data: bytes, serde: Serde) -> AI: AI.METRICS = RelationField("metrics") AI.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AI.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AI.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AI.MEANINGS = RelationField("meanings") AI.MC_MONITORS = RelationField("mcMonitors") AI.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/ai_application.py b/pyatlan_v9/model/assets/ai_application.py index 070f84cc9..a0bc6f083 100644 --- a/pyatlan_v9/model/assets/ai_application.py +++ b/pyatlan_v9/model/assets/ai_application.py @@ -29,7 +29,7 @@ from pyatlan_v9.model.transform import register_asset from pyatlan_v9.utils import init_guid, validate_required_fields -from .ai_related import RelatedAIApplication, RelatedAIModel +from .ai_related import RelatedAIModel from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -45,6 +45,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -92,6 +93,7 @@ class AIApplication(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -109,6 +111,8 @@ class AIApplication(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AIApplication" + ai_application_version: Union[str, None, UnsetType] = UNSET """Version of the AI application""" @@ -202,6 +206,11 @@ class AIApplication(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -257,66 +266,6 @@ class AIApplication(Asset): def __post_init__(self) -> None: self.type_name = "AIApplication" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AIApplication instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AIApplication validation failed: {errors}") - - def minimize(self) -> "AIApplication": - """ - Return a minimal copy of this AIApplication with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AIApplication with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AIApplication instance with only the minimum required fields. - """ - self.validate() - return AIApplication(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAIApplication": - """ - Create a :class:`RelatedAIApplication` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAIApplication reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAIApplication(guid=self.guid) - return RelatedAIApplication(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -506,6 +455,11 @@ class AIApplicationRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -595,6 +549,7 @@ class AIApplicationNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -690,9 +645,6 @@ def _ai_application_to_nested(ai_application: AIApplication) -> AIApplicationNes is_incomplete=ai_application.is_incomplete, provenance_type=ai_application.provenance_type, home_id=ai_application.home_id, - depth=ai_application.depth, - immediate_upstream=ai_application.immediate_upstream, - immediate_downstream=ai_application.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -726,6 +678,7 @@ def _ai_application_from_nested(nested: AIApplicationNested) -> AIApplication: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -734,9 +687,6 @@ def _ai_application_from_nested(nested: AIApplicationNested) -> AIApplication: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_ai_application_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -809,6 +759,9 @@ def _ai_application_from_nested_bytes(data: bytes, serde: Serde) -> AIApplicatio AIApplication.METRICS = RelationField("metrics") AIApplication.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AIApplication.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AIApplication.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AIApplication.MEANINGS = RelationField("meanings") AIApplication.MC_MONITORS = RelationField("mcMonitors") AIApplication.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/ai_model.py b/pyatlan_v9/model/assets/ai_model.py index b4cc4e5f9..0b0b99846 100644 --- a/pyatlan_v9/model/assets/ai_model.py +++ b/pyatlan_v9/model/assets/ai_model.py @@ -30,7 +30,7 @@ from pyatlan_v9.model.transform import get_type, register_asset from pyatlan_v9.utils import init_guid, validate_required_fields -from .ai_related import RelatedAIApplication, RelatedAIModel, RelatedAIModelVersion +from .ai_related import RelatedAIApplication, RelatedAIModelVersion from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -46,6 +46,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -95,6 +96,7 @@ class AIModel(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -112,6 +114,8 @@ class AIModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AIModel" + ai_model_datasets_dsl: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="aiModelDatasetsDSL" ) @@ -213,6 +217,11 @@ class AIModel(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -268,66 +277,6 @@ class AIModel(Asset): def __post_init__(self) -> None: self.type_name = "AIModel" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AIModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AIModel validation failed: {errors}") - - def minimize(self) -> "AIModel": - """ - Return a minimal copy of this AIModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AIModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AIModel instance with only the minimum required fields. - """ - self.validate() - return AIModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAIModel": - """ - Create a :class:`RelatedAIModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAIModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAIModel(guid=self.guid) - return RelatedAIModel(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -582,6 +531,11 @@ class AIModelRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -670,6 +624,7 @@ class AIModelNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -765,9 +720,6 @@ def _ai_model_to_nested(ai_model: AIModel) -> AIModelNested: is_incomplete=ai_model.is_incomplete, provenance_type=ai_model.provenance_type, home_id=ai_model.home_id, - depth=ai_model.depth, - immediate_upstream=ai_model.immediate_upstream, - immediate_downstream=ai_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -797,6 +749,7 @@ def _ai_model_from_nested(nested: AIModelNested) -> AIModel: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -805,9 +758,6 @@ def _ai_model_from_nested(nested: AIModelNested) -> AIModel: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_ai_model_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -872,6 +822,9 @@ def _ai_model_from_nested_bytes(data: bytes, serde: Serde) -> AIModel: AIModel.METRICS = RelationField("metrics") AIModel.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AIModel.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AIModel.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AIModel.MEANINGS = RelationField("meanings") AIModel.MC_MONITORS = RelationField("mcMonitors") AIModel.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/ai_model_version.py b/pyatlan_v9/model/assets/ai_model_version.py index 0606d92ab..3c073ff03 100644 --- a/pyatlan_v9/model/assets/ai_model_version.py +++ b/pyatlan_v9/model/assets/ai_model_version.py @@ -27,7 +27,7 @@ from pyatlan_v9.model.serde import Serde, get_serde from pyatlan_v9.model.transform import register_asset -from .ai_related import RelatedAIModel, RelatedAIModelVersion +from .ai_related import RelatedAIModel from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField @@ -43,6 +43,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -91,6 +92,7 @@ class AIModelVersion(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -108,6 +110,8 @@ class AIModelVersion(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AIModelVersion" + ai_model_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AI model to which this version belongs, used to navigate from a version back to its parent model.""" @@ -204,6 +208,11 @@ class AIModelVersion(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -265,74 +274,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AIModelVersion instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.ai_model is UNSET: - errors.append("ai_model is required for creation") - if self.ai_model_qualified_name is UNSET: - errors.append("ai_model_qualified_name is required for creation") - if errors: - raise ValueError(f"AIModelVersion validation failed: {errors}") - - def minimize(self) -> "AIModelVersion": - """ - Return a minimal copy of this AIModelVersion with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AIModelVersion with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AIModelVersion instance with only the minimum required fields. - """ - self.validate() - return AIModelVersion(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAIModelVersion": - """ - Create a :class:`RelatedAIModelVersion` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAIModelVersion reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAIModelVersion(guid=self.guid) - return RelatedAIModelVersion(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -488,6 +429,11 @@ class AIModelVersionRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -577,6 +523,7 @@ class AIModelVersionNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -678,9 +625,6 @@ def _ai_model_version_to_nested( is_incomplete=ai_model_version.is_incomplete, provenance_type=ai_model_version.provenance_type, home_id=ai_model_version.home_id, - depth=ai_model_version.depth, - immediate_upstream=ai_model_version.immediate_upstream, - immediate_downstream=ai_model_version.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -714,6 +658,7 @@ def _ai_model_version_from_nested(nested: AIModelVersionNested) -> AIModelVersio updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -722,9 +667,6 @@ def _ai_model_version_from_nested(nested: AIModelVersionNested) -> AIModelVersio is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_ai_model_version_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -802,6 +744,9 @@ def _ai_model_version_from_nested_bytes(data: bytes, serde: Serde) -> AIModelVer AIModelVersion.METRICS = RelationField("metrics") AIModelVersion.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AIModelVersion.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AIModelVersion.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AIModelVersion.MEANINGS = RelationField("meanings") AIModelVersion.MC_MONITORS = RelationField("mcMonitors") AIModelVersion.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/airflow.py b/pyatlan_v9/model/assets/airflow.py index 334aa1bfc..cf7cba5e8 100644 --- a/pyatlan_v9/model/assets/airflow.py +++ b/pyatlan_v9/model/assets/airflow.py @@ -25,7 +25,7 @@ from pyatlan_v9.model.serde import Serde, get_serde from pyatlan_v9.model.transform import register_asset -from .airflow_related import RelatedAirflow, RelatedAirflowTask +from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +86,7 @@ class Airflow(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,6 +105,8 @@ class Airflow(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None SPARK_ORCHESTRATED_ASSETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Airflow" + airflow_tags: Union[List[str], None, UnsetType] = UNSET """Tags assigned to the asset in Airflow.""" @@ -176,6 +180,11 @@ class Airflow(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -234,66 +243,6 @@ class Airflow(Asset): def __post_init__(self) -> None: self.type_name = "Airflow" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Airflow instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Airflow validation failed: {errors}") - - def minimize(self) -> "Airflow": - """ - Return a minimal copy of this Airflow with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Airflow with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Airflow instance with only the minimum required fields. - """ - self.validate() - return Airflow(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAirflow": - """ - Create a :class:`RelatedAirflow` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAirflow reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAirflow(guid=self.guid) - return RelatedAirflow(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -426,6 +375,11 @@ class AirflowRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -515,6 +469,7 @@ class AirflowNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -597,9 +552,6 @@ def _airflow_to_nested(airflow: Airflow) -> AirflowNested: is_incomplete=airflow.is_incomplete, provenance_type=airflow.provenance_type, home_id=airflow.home_id, - depth=airflow.depth, - immediate_upstream=airflow.immediate_upstream, - immediate_downstream=airflow.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -629,6 +581,7 @@ def _airflow_from_nested(nested: AirflowNested) -> Airflow: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -637,9 +590,6 @@ def _airflow_from_nested(nested: AirflowNested) -> Airflow: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_airflow_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -695,6 +645,9 @@ def _airflow_from_nested_bytes(data: bytes, serde: Serde) -> Airflow: Airflow.METRICS = RelationField("metrics") Airflow.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Airflow.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Airflow.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Airflow.MEANINGS = RelationField("meanings") Airflow.MC_MONITORS = RelationField("mcMonitors") Airflow.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/airflow_dag.py b/pyatlan_v9/model/assets/airflow_dag.py index 0c72a4990..b7db85ce8 100644 --- a/pyatlan_v9/model/assets/airflow_dag.py +++ b/pyatlan_v9/model/assets/airflow_dag.py @@ -26,7 +26,7 @@ from pyatlan_v9.model.transform import register_asset from pyatlan_v9.utils import init_guid, validate_required_fields -from .airflow_related import RelatedAirflowDag, RelatedAirflowTask +from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -89,6 +90,7 @@ class AirflowDag(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -107,6 +109,8 @@ class AirflowDag(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None SPARK_ORCHESTRATED_ASSETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AirflowDag" + airflow_dag_schedule: Union[str, None, UnsetType] = UNSET """Schedule for the DAG.""" @@ -189,6 +193,11 @@ class AirflowDag(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -247,66 +256,6 @@ class AirflowDag(Asset): def __post_init__(self) -> None: self.type_name = "AirflowDag" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AirflowDag instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AirflowDag validation failed: {errors}") - - def minimize(self) -> "AirflowDag": - """ - Return a minimal copy of this AirflowDag with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AirflowDag with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AirflowDag instance with only the minimum required fields. - """ - self.validate() - return AirflowDag(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAirflowDag": - """ - Create a :class:`RelatedAirflowDag` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAirflowDag reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAirflowDag(guid=self.guid) - return RelatedAirflowDag(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -478,6 +427,11 @@ class AirflowDagRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -568,6 +522,7 @@ class AirflowDagNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -654,9 +609,6 @@ def _airflow_dag_to_nested(airflow_dag: AirflowDag) -> AirflowDagNested: is_incomplete=airflow_dag.is_incomplete, provenance_type=airflow_dag.provenance_type, home_id=airflow_dag.home_id, - depth=airflow_dag.depth, - immediate_upstream=airflow_dag.immediate_upstream, - immediate_downstream=airflow_dag.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -688,6 +640,7 @@ def _airflow_dag_from_nested(nested: AirflowDagNested) -> AirflowDag: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -696,9 +649,6 @@ def _airflow_dag_from_nested(nested: AirflowDagNested) -> AirflowDag: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_airflow_dag_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -763,6 +713,9 @@ def _airflow_dag_from_nested_bytes(data: bytes, serde: Serde) -> AirflowDag: AirflowDag.METRICS = RelationField("metrics") AirflowDag.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AirflowDag.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AirflowDag.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AirflowDag.MEANINGS = RelationField("meanings") AirflowDag.MC_MONITORS = RelationField("mcMonitors") AirflowDag.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/airflow_task.py b/pyatlan_v9/model/assets/airflow_task.py index 48a531d71..1b7db42ef 100644 --- a/pyatlan_v9/model/assets/airflow_task.py +++ b/pyatlan_v9/model/assets/airflow_task.py @@ -43,6 +43,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -104,6 +105,7 @@ class AirflowTask(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -122,6 +124,8 @@ class AirflowTask(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None SPARK_ORCHESTRATED_ASSETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AirflowTask" + airflow_task_operator_class: Union[str, None, UnsetType] = UNSET """Class name for the operator this task uses.""" @@ -243,6 +247,11 @@ class AirflowTask(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -307,76 +316,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AirflowTask instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.airflow_dag is UNSET: - errors.append("airflow_dag is required for creation") - if self.airflow_dag_name is UNSET: - errors.append("airflow_dag_name is required for creation") - if self.airflow_dag_qualified_name is UNSET: - errors.append("airflow_dag_qualified_name is required for creation") - if errors: - raise ValueError(f"AirflowTask validation failed: {errors}") - - def minimize(self) -> "AirflowTask": - """ - Return a minimal copy of this AirflowTask with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AirflowTask with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AirflowTask instance with only the minimum required fields. - """ - self.validate() - return AirflowTask(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAirflowTask": - """ - Create a :class:`RelatedAirflowTask` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAirflowTask reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAirflowTask(guid=self.guid) - return RelatedAirflowTask(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -593,6 +532,11 @@ class AirflowTaskRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -686,6 +630,7 @@ class AirflowTaskNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -794,9 +739,6 @@ def _airflow_task_to_nested(airflow_task: AirflowTask) -> AirflowTaskNested: is_incomplete=airflow_task.is_incomplete, provenance_type=airflow_task.provenance_type, home_id=airflow_task.home_id, - depth=airflow_task.depth, - immediate_upstream=airflow_task.immediate_upstream, - immediate_downstream=airflow_task.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -828,6 +770,7 @@ def _airflow_task_from_nested(nested: AirflowTaskNested) -> AirflowTask: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -836,9 +779,6 @@ def _airflow_task_from_nested(nested: AirflowTaskNested) -> AirflowTask: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_airflow_task_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -937,6 +877,9 @@ def _airflow_task_from_nested_bytes(data: bytes, serde: Serde) -> AirflowTask: AirflowTask.METRICS = RelationField("metrics") AirflowTask.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AirflowTask.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AirflowTask.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AirflowTask.MEANINGS = RelationField("meanings") AirflowTask.MC_MONITORS = RelationField("mcMonitors") AirflowTask.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/anaplan.py b/pyatlan_v9/model/assets/anaplan.py index 3c24a8738..3cdaeae30 100644 --- a/pyatlan_v9/model/assets/anaplan.py +++ b/pyatlan_v9/model/assets/anaplan.py @@ -26,7 +26,6 @@ from pyatlan_v9.model.transform import register_asset from .airflow_related import RelatedAirflowTask -from .anaplan_related import RelatedAnaplan from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -41,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +85,7 @@ class Anaplan(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +103,8 @@ class Anaplan(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Anaplan" + anaplan_workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AnaplanWorkspace asset that contains this asset (AnaplanModel and everything under its hierarchy).""" @@ -172,6 +175,11 @@ class Anaplan(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -227,66 +235,6 @@ class Anaplan(Asset): def __post_init__(self) -> None: self.type_name = "Anaplan" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Anaplan instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Anaplan validation failed: {errors}") - - def minimize(self) -> "Anaplan": - """ - Return a minimal copy of this Anaplan with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Anaplan with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Anaplan instance with only the minimum required fields. - """ - self.validate() - return Anaplan(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplan": - """ - Create a :class:`RelatedAnaplan` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplan reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplan(guid=self.guid) - return RelatedAnaplan(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -416,6 +364,11 @@ class AnaplanRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -502,6 +455,7 @@ class AnaplanNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -581,9 +535,6 @@ def _anaplan_to_nested(anaplan: Anaplan) -> AnaplanNested: is_incomplete=anaplan.is_incomplete, provenance_type=anaplan.provenance_type, home_id=anaplan.home_id, - depth=anaplan.depth, - immediate_upstream=anaplan.immediate_upstream, - immediate_downstream=anaplan.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -613,6 +564,7 @@ def _anaplan_from_nested(nested: AnaplanNested) -> Anaplan: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -621,9 +573,6 @@ def _anaplan_from_nested(nested: AnaplanNested) -> Anaplan: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -676,6 +625,9 @@ def _anaplan_from_nested_bytes(data: bytes, serde: Serde) -> Anaplan: Anaplan.METRICS = RelationField("metrics") Anaplan.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Anaplan.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Anaplan.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Anaplan.MEANINGS = RelationField("meanings") Anaplan.MC_MONITORS = RelationField("mcMonitors") Anaplan.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/anaplan_app.py b/pyatlan_v9/model/assets/anaplan_app.py index 96aaaf75b..b6ad1c753 100644 --- a/pyatlan_v9/model/assets/anaplan_app.py +++ b/pyatlan_v9/model/assets/anaplan_app.py @@ -27,7 +27,7 @@ from pyatlan_v9.utils import init_guid, validate_required_fields from .airflow_related import RelatedAirflowTask -from .anaplan_related import RelatedAnaplanApp, RelatedAnaplanPage +from .anaplan_related import RelatedAnaplanPage from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -87,6 +88,7 @@ class AnaplanApp(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -104,6 +106,8 @@ class AnaplanApp(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanApp" + anaplan_workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AnaplanWorkspace asset that contains this asset (AnaplanModel and everything under its hierarchy).""" @@ -177,6 +181,11 @@ class AnaplanApp(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -232,66 +241,6 @@ class AnaplanApp(Asset): def __post_init__(self) -> None: self.type_name = "AnaplanApp" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanApp instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AnaplanApp validation failed: {errors}") - - def minimize(self) -> "AnaplanApp": - """ - Return a minimal copy of this AnaplanApp with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanApp with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanApp instance with only the minimum required fields. - """ - self.validate() - return AnaplanApp(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanApp": - """ - Create a :class:`RelatedAnaplanApp` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanApp reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanApp(guid=self.guid) - return RelatedAnaplanApp(qualified_name=self.qualified_name) - @classmethod @init_guid def creator(cls, *, name: str, connection_qualified_name: str) -> "AnaplanApp": @@ -450,6 +399,11 @@ class AnaplanAppRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -537,6 +491,7 @@ class AnaplanAppNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -616,9 +571,6 @@ def _anaplan_app_to_nested(anaplan_app: AnaplanApp) -> AnaplanAppNested: is_incomplete=anaplan_app.is_incomplete, provenance_type=anaplan_app.provenance_type, home_id=anaplan_app.home_id, - depth=anaplan_app.depth, - immediate_upstream=anaplan_app.immediate_upstream, - immediate_downstream=anaplan_app.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -650,6 +602,7 @@ def _anaplan_app_from_nested(nested: AnaplanAppNested) -> AnaplanApp: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -658,9 +611,6 @@ def _anaplan_app_from_nested(nested: AnaplanAppNested) -> AnaplanApp: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_app_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -716,6 +666,9 @@ def _anaplan_app_from_nested_bytes(data: bytes, serde: Serde) -> AnaplanApp: AnaplanApp.METRICS = RelationField("metrics") AnaplanApp.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AnaplanApp.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AnaplanApp.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AnaplanApp.MEANINGS = RelationField("meanings") AnaplanApp.MC_MONITORS = RelationField("mcMonitors") AnaplanApp.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/anaplan_dimension.py b/pyatlan_v9/model/assets/anaplan_dimension.py index e1d9a5be0..66cc90580 100644 --- a/pyatlan_v9/model/assets/anaplan_dimension.py +++ b/pyatlan_v9/model/assets/anaplan_dimension.py @@ -30,7 +30,6 @@ from .airflow_related import RelatedAirflowTask from .anaplan_related import ( - RelatedAnaplanDimension, RelatedAnaplanLineItem, RelatedAnaplanModel, RelatedAnaplanView, @@ -49,6 +48,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -98,6 +98,7 @@ class AnaplanDimension(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -115,6 +116,8 @@ class AnaplanDimension(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanDimension" + anaplan_workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AnaplanWorkspace asset that contains this asset (AnaplanModel and everything under its hierarchy).""" @@ -200,6 +203,11 @@ class AnaplanDimension(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -263,82 +271,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanDimension instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.anaplan_model is UNSET: - errors.append("anaplan_model is required for creation") - if self.anaplan_model_name is UNSET: - errors.append("anaplan_model_name is required for creation") - if self.anaplan_model_qualified_name is UNSET: - errors.append("anaplan_model_qualified_name is required for creation") - if self.anaplan_workspace_name is UNSET: - errors.append("anaplan_workspace_name is required for creation") - if self.anaplan_workspace_qualified_name is UNSET: - errors.append( - "anaplan_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"AnaplanDimension validation failed: {errors}") - - def minimize(self) -> "AnaplanDimension": - """ - Return a minimal copy of this AnaplanDimension with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanDimension with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanDimension instance with only the minimum required fields. - """ - self.validate() - return AnaplanDimension(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanDimension": - """ - Create a :class:`RelatedAnaplanDimension` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanDimension reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanDimension(guid=self.guid) - return RelatedAnaplanDimension(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -534,6 +466,11 @@ class AnaplanDimensionRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -627,6 +564,7 @@ class AnaplanDimensionNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -712,9 +650,6 @@ def _anaplan_dimension_to_nested( is_incomplete=anaplan_dimension.is_incomplete, provenance_type=anaplan_dimension.provenance_type, home_id=anaplan_dimension.home_id, - depth=anaplan_dimension.depth, - immediate_upstream=anaplan_dimension.immediate_upstream, - immediate_downstream=anaplan_dimension.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -748,6 +683,7 @@ def _anaplan_dimension_from_nested(nested: AnaplanDimensionNested) -> AnaplanDim updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -756,9 +692,6 @@ def _anaplan_dimension_from_nested(nested: AnaplanDimensionNested) -> AnaplanDim is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_dimension_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -828,6 +761,9 @@ def _anaplan_dimension_from_nested_bytes(data: bytes, serde: Serde) -> AnaplanDi AnaplanDimension.METRICS = RelationField("metrics") AnaplanDimension.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AnaplanDimension.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AnaplanDimension.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AnaplanDimension.MEANINGS = RelationField("meanings") AnaplanDimension.MC_MONITORS = RelationField("mcMonitors") AnaplanDimension.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/anaplan_line_item.py b/pyatlan_v9/model/assets/anaplan_line_item.py index a06418c0b..211004be8 100644 --- a/pyatlan_v9/model/assets/anaplan_line_item.py +++ b/pyatlan_v9/model/assets/anaplan_line_item.py @@ -31,7 +31,6 @@ from .airflow_related import RelatedAirflowTask from .anaplan_related import ( RelatedAnaplanDimension, - RelatedAnaplanLineItem, RelatedAnaplanList, RelatedAnaplanModule, ) @@ -49,6 +48,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -97,6 +97,7 @@ class AnaplanLineItem(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -114,6 +115,8 @@ class AnaplanLineItem(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanLineItem" + anaplan_line_item_formula: Union[str, None, UnsetType] = UNSET """Formula of the AnaplanLineItem from the source system.""" @@ -196,6 +199,11 @@ class AnaplanLineItem(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -259,86 +267,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanLineItem instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.anaplan_module is UNSET: - errors.append("anaplan_module is required for creation") - if self.anaplan_module_name is UNSET: - errors.append("anaplan_module_name is required for creation") - if self.anaplan_module_qualified_name is UNSET: - errors.append("anaplan_module_qualified_name is required for creation") - if self.anaplan_model_name is UNSET: - errors.append("anaplan_model_name is required for creation") - if self.anaplan_model_qualified_name is UNSET: - errors.append("anaplan_model_qualified_name is required for creation") - if self.anaplan_workspace_name is UNSET: - errors.append("anaplan_workspace_name is required for creation") - if self.anaplan_workspace_qualified_name is UNSET: - errors.append( - "anaplan_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"AnaplanLineItem validation failed: {errors}") - - def minimize(self) -> "AnaplanLineItem": - """ - Return a minimal copy of this AnaplanLineItem with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanLineItem with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanLineItem instance with only the minimum required fields. - """ - self.validate() - return AnaplanLineItem(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanLineItem": - """ - Create a :class:`RelatedAnaplanLineItem` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanLineItem reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanLineItem(guid=self.guid) - return RelatedAnaplanLineItem(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -535,6 +463,11 @@ class AnaplanLineItemRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -626,6 +559,7 @@ class AnaplanLineItemNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -713,9 +647,6 @@ def _anaplan_line_item_to_nested( is_incomplete=anaplan_line_item.is_incomplete, provenance_type=anaplan_line_item.provenance_type, home_id=anaplan_line_item.home_id, - depth=anaplan_line_item.depth, - immediate_upstream=anaplan_line_item.immediate_upstream, - immediate_downstream=anaplan_line_item.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -749,6 +680,7 @@ def _anaplan_line_item_from_nested(nested: AnaplanLineItemNested) -> AnaplanLine updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -757,9 +689,6 @@ def _anaplan_line_item_from_nested(nested: AnaplanLineItemNested) -> AnaplanLine is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_line_item_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -830,6 +759,9 @@ def _anaplan_line_item_from_nested_bytes(data: bytes, serde: Serde) -> AnaplanLi AnaplanLineItem.METRICS = RelationField("metrics") AnaplanLineItem.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AnaplanLineItem.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AnaplanLineItem.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AnaplanLineItem.MEANINGS = RelationField("meanings") AnaplanLineItem.MC_MONITORS = RelationField("mcMonitors") AnaplanLineItem.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/anaplan_list.py b/pyatlan_v9/model/assets/anaplan_list.py index 951c0a2d9..a89e859fb 100644 --- a/pyatlan_v9/model/assets/anaplan_list.py +++ b/pyatlan_v9/model/assets/anaplan_list.py @@ -29,11 +29,7 @@ from pyatlan_v9.utils import init_guid, validate_required_fields from .airflow_related import RelatedAirflowTask -from .anaplan_related import ( - RelatedAnaplanLineItem, - RelatedAnaplanList, - RelatedAnaplanModel, -) +from .anaplan_related import RelatedAnaplanLineItem, RelatedAnaplanModel from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -48,6 +44,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -95,6 +92,7 @@ class AnaplanList(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -112,6 +110,8 @@ class AnaplanList(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanList" + anaplan_list_item_count: Union[int, None, UnsetType] = UNSET """Item Count of the AnaplanList from the source system.""" @@ -191,6 +191,11 @@ class AnaplanList(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -254,82 +259,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanList instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.anaplan_model is UNSET: - errors.append("anaplan_model is required for creation") - if self.anaplan_model_name is UNSET: - errors.append("anaplan_model_name is required for creation") - if self.anaplan_model_qualified_name is UNSET: - errors.append("anaplan_model_qualified_name is required for creation") - if self.anaplan_workspace_name is UNSET: - errors.append("anaplan_workspace_name is required for creation") - if self.anaplan_workspace_qualified_name is UNSET: - errors.append( - "anaplan_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"AnaplanList validation failed: {errors}") - - def minimize(self) -> "AnaplanList": - """ - Return a minimal copy of this AnaplanList with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanList with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanList instance with only the minimum required fields. - """ - self.validate() - return AnaplanList(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanList": - """ - Create a :class:`RelatedAnaplanList` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanList reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanList(guid=self.guid) - return RelatedAnaplanList(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -515,6 +444,11 @@ class AnaplanListRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -603,6 +537,7 @@ class AnaplanListNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -686,9 +621,6 @@ def _anaplan_list_to_nested(anaplan_list: AnaplanList) -> AnaplanListNested: is_incomplete=anaplan_list.is_incomplete, provenance_type=anaplan_list.provenance_type, home_id=anaplan_list.home_id, - depth=anaplan_list.depth, - immediate_upstream=anaplan_list.immediate_upstream, - immediate_downstream=anaplan_list.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -720,6 +652,7 @@ def _anaplan_list_from_nested(nested: AnaplanListNested) -> AnaplanList: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -728,9 +661,6 @@ def _anaplan_list_from_nested(nested: AnaplanListNested) -> AnaplanList: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_list_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -796,6 +726,9 @@ def _anaplan_list_from_nested_bytes(data: bytes, serde: Serde) -> AnaplanList: AnaplanList.METRICS = RelationField("metrics") AnaplanList.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AnaplanList.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AnaplanList.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AnaplanList.MEANINGS = RelationField("meanings") AnaplanList.MC_MONITORS = RelationField("mcMonitors") AnaplanList.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/anaplan_model.py b/pyatlan_v9/model/assets/anaplan_model.py index 3d8ae9395..8c0df62aa 100644 --- a/pyatlan_v9/model/assets/anaplan_model.py +++ b/pyatlan_v9/model/assets/anaplan_model.py @@ -32,7 +32,6 @@ from .anaplan_related import ( RelatedAnaplanDimension, RelatedAnaplanList, - RelatedAnaplanModel, RelatedAnaplanModule, RelatedAnaplanPage, RelatedAnaplanWorkspace, @@ -51,6 +50,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -100,6 +100,7 @@ class AnaplanModel(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -117,6 +118,8 @@ class AnaplanModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanModel" + anaplan_workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AnaplanWorkspace asset that contains this asset (AnaplanModel and everything under its hierarchy).""" @@ -202,6 +205,11 @@ class AnaplanModel(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -263,78 +271,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.anaplan_workspace is UNSET: - errors.append("anaplan_workspace is required for creation") - if self.anaplan_workspace_name is UNSET: - errors.append("anaplan_workspace_name is required for creation") - if self.anaplan_workspace_qualified_name is UNSET: - errors.append( - "anaplan_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"AnaplanModel validation failed: {errors}") - - def minimize(self) -> "AnaplanModel": - """ - Return a minimal copy of this AnaplanModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanModel instance with only the minimum required fields. - """ - self.validate() - return AnaplanModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanModel": - """ - Create a :class:`RelatedAnaplanModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanModel(guid=self.guid) - return RelatedAnaplanModel(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -519,6 +455,11 @@ class AnaplanModelRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -612,6 +553,7 @@ class AnaplanModelNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -693,9 +635,6 @@ def _anaplan_model_to_nested(anaplan_model: AnaplanModel) -> AnaplanModelNested: is_incomplete=anaplan_model.is_incomplete, provenance_type=anaplan_model.provenance_type, home_id=anaplan_model.home_id, - depth=anaplan_model.depth, - immediate_upstream=anaplan_model.immediate_upstream, - immediate_downstream=anaplan_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -729,6 +668,7 @@ def _anaplan_model_from_nested(nested: AnaplanModelNested) -> AnaplanModel: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -737,9 +677,6 @@ def _anaplan_model_from_nested(nested: AnaplanModelNested) -> AnaplanModel: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_model_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -803,6 +740,9 @@ def _anaplan_model_from_nested_bytes(data: bytes, serde: Serde) -> AnaplanModel: AnaplanModel.METRICS = RelationField("metrics") AnaplanModel.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AnaplanModel.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AnaplanModel.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AnaplanModel.MEANINGS = RelationField("meanings") AnaplanModel.MC_MONITORS = RelationField("mcMonitors") AnaplanModel.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/anaplan_module.py b/pyatlan_v9/model/assets/anaplan_module.py index 9cd73dddf..dedb715c7 100644 --- a/pyatlan_v9/model/assets/anaplan_module.py +++ b/pyatlan_v9/model/assets/anaplan_module.py @@ -32,7 +32,6 @@ from .anaplan_related import ( RelatedAnaplanLineItem, RelatedAnaplanModel, - RelatedAnaplanModule, RelatedAnaplanView, ) from .anomalo_related import RelatedAnomaloCheck @@ -49,6 +48,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -96,6 +96,7 @@ class AnaplanModule(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -113,6 +114,8 @@ class AnaplanModule(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanModule" + anaplan_workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AnaplanWorkspace asset that contains this asset (AnaplanModel and everything under its hierarchy).""" @@ -192,6 +195,11 @@ class AnaplanModule(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -255,82 +263,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanModule instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.anaplan_model is UNSET: - errors.append("anaplan_model is required for creation") - if self.anaplan_model_name is UNSET: - errors.append("anaplan_model_name is required for creation") - if self.anaplan_model_qualified_name is UNSET: - errors.append("anaplan_model_qualified_name is required for creation") - if self.anaplan_workspace_name is UNSET: - errors.append("anaplan_workspace_name is required for creation") - if self.anaplan_workspace_qualified_name is UNSET: - errors.append( - "anaplan_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"AnaplanModule validation failed: {errors}") - - def minimize(self) -> "AnaplanModule": - """ - Return a minimal copy of this AnaplanModule with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanModule with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanModule instance with only the minimum required fields. - """ - self.validate() - return AnaplanModule(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanModule": - """ - Create a :class:`RelatedAnaplanModule` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanModule reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanModule(guid=self.guid) - return RelatedAnaplanModule(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -509,6 +441,11 @@ class AnaplanModuleRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -600,6 +537,7 @@ class AnaplanModuleNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -681,9 +619,6 @@ def _anaplan_module_to_nested(anaplan_module: AnaplanModule) -> AnaplanModuleNes is_incomplete=anaplan_module.is_incomplete, provenance_type=anaplan_module.provenance_type, home_id=anaplan_module.home_id, - depth=anaplan_module.depth, - immediate_upstream=anaplan_module.immediate_upstream, - immediate_downstream=anaplan_module.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -717,6 +652,7 @@ def _anaplan_module_from_nested(nested: AnaplanModuleNested) -> AnaplanModule: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -725,9 +661,6 @@ def _anaplan_module_from_nested(nested: AnaplanModuleNested) -> AnaplanModule: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_module_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -791,6 +724,9 @@ def _anaplan_module_from_nested_bytes(data: bytes, serde: Serde) -> AnaplanModul AnaplanModule.METRICS = RelationField("metrics") AnaplanModule.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AnaplanModule.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AnaplanModule.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AnaplanModule.MEANINGS = RelationField("meanings") AnaplanModule.MC_MONITORS = RelationField("mcMonitors") AnaplanModule.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/anaplan_page.py b/pyatlan_v9/model/assets/anaplan_page.py index 4fda767a3..a6c0c3c82 100644 --- a/pyatlan_v9/model/assets/anaplan_page.py +++ b/pyatlan_v9/model/assets/anaplan_page.py @@ -29,7 +29,7 @@ from pyatlan_v9.utils import init_guid, validate_required_fields from .airflow_related import RelatedAirflowTask -from .anaplan_related import RelatedAnaplanApp, RelatedAnaplanModel, RelatedAnaplanPage +from .anaplan_related import RelatedAnaplanApp, RelatedAnaplanModel from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -44,6 +44,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -93,6 +94,7 @@ class AnaplanPage(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -110,6 +112,8 @@ class AnaplanPage(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanPage" + anaplan_app_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AnaplanApp asset that contains this asset.""" @@ -195,6 +199,11 @@ class AnaplanPage(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -256,74 +265,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanPage instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.anaplan_app is UNSET: - errors.append("anaplan_app is required for creation") - if self.anaplan_app_qualified_name is UNSET: - errors.append("anaplan_app_qualified_name is required for creation") - if errors: - raise ValueError(f"AnaplanPage validation failed: {errors}") - - def minimize(self) -> "AnaplanPage": - """ - Return a minimal copy of this AnaplanPage with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanPage with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanPage instance with only the minimum required fields. - """ - self.validate() - return AnaplanPage(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanPage": - """ - Create a :class:`RelatedAnaplanPage` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanPage reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanPage(guid=self.guid) - return RelatedAnaplanPage(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -508,6 +449,11 @@ class AnaplanPageRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -596,6 +542,7 @@ class AnaplanPageNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -683,9 +630,6 @@ def _anaplan_page_to_nested(anaplan_page: AnaplanPage) -> AnaplanPageNested: is_incomplete=anaplan_page.is_incomplete, provenance_type=anaplan_page.provenance_type, home_id=anaplan_page.home_id, - depth=anaplan_page.depth, - immediate_upstream=anaplan_page.immediate_upstream, - immediate_downstream=anaplan_page.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -717,6 +661,7 @@ def _anaplan_page_from_nested(nested: AnaplanPageNested) -> AnaplanPage: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -725,9 +670,6 @@ def _anaplan_page_from_nested(nested: AnaplanPageNested) -> AnaplanPage: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_page_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -793,6 +735,9 @@ def _anaplan_page_from_nested_bytes(data: bytes, serde: Serde) -> AnaplanPage: AnaplanPage.METRICS = RelationField("metrics") AnaplanPage.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AnaplanPage.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AnaplanPage.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AnaplanPage.MEANINGS = RelationField("meanings") AnaplanPage.MC_MONITORS = RelationField("mcMonitors") AnaplanPage.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/anaplan_system_dimension.py b/pyatlan_v9/model/assets/anaplan_system_dimension.py index 47d736ad8..deb386751 100644 --- a/pyatlan_v9/model/assets/anaplan_system_dimension.py +++ b/pyatlan_v9/model/assets/anaplan_system_dimension.py @@ -27,7 +27,6 @@ from pyatlan_v9.utils import init_guid, validate_required_fields from .airflow_related import RelatedAirflowTask -from .anaplan_related import RelatedAnaplanSystemDimension from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -42,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -86,6 +86,7 @@ class AnaplanSystemDimension(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,6 +104,8 @@ class AnaplanSystemDimension(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanSystemDimension" + anaplan_workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AnaplanWorkspace asset that contains this asset (AnaplanModel and everything under its hierarchy).""" @@ -173,6 +176,11 @@ class AnaplanSystemDimension(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -228,68 +236,6 @@ class AnaplanSystemDimension(Asset): def __post_init__(self) -> None: self.type_name = "AnaplanSystemDimension" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanSystemDimension instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AnaplanSystemDimension validation failed: {errors}") - - def minimize(self) -> "AnaplanSystemDimension": - """ - Return a minimal copy of this AnaplanSystemDimension with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanSystemDimension with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanSystemDimension instance with only the minimum required fields. - """ - self.validate() - return AnaplanSystemDimension( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedAnaplanSystemDimension": - """ - Create a :class:`RelatedAnaplanSystemDimension` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanSystemDimension reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanSystemDimension(guid=self.guid) - return RelatedAnaplanSystemDimension(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -451,6 +397,11 @@ class AnaplanSystemDimensionRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -539,6 +490,7 @@ class AnaplanSystemDimensionNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -626,9 +578,6 @@ def _anaplan_system_dimension_to_nested( is_incomplete=anaplan_system_dimension.is_incomplete, provenance_type=anaplan_system_dimension.provenance_type, home_id=anaplan_system_dimension.home_id, - depth=anaplan_system_dimension.depth, - immediate_upstream=anaplan_system_dimension.immediate_upstream, - immediate_downstream=anaplan_system_dimension.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -664,6 +613,7 @@ def _anaplan_system_dimension_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -672,9 +622,6 @@ def _anaplan_system_dimension_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_system_dimension_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -751,6 +698,9 @@ def _anaplan_system_dimension_from_nested_bytes( AnaplanSystemDimension.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +AnaplanSystemDimension.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AnaplanSystemDimension.MEANINGS = RelationField("meanings") AnaplanSystemDimension.MC_MONITORS = RelationField("mcMonitors") AnaplanSystemDimension.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/anaplan_view.py b/pyatlan_v9/model/assets/anaplan_view.py index 610e792fb..930ef4eda 100644 --- a/pyatlan_v9/model/assets/anaplan_view.py +++ b/pyatlan_v9/model/assets/anaplan_view.py @@ -29,11 +29,7 @@ from pyatlan_v9.utils import init_guid, validate_required_fields from .airflow_related import RelatedAirflowTask -from .anaplan_related import ( - RelatedAnaplanDimension, - RelatedAnaplanModule, - RelatedAnaplanView, -) +from .anaplan_related import RelatedAnaplanDimension, RelatedAnaplanModule from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -48,6 +44,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -96,6 +93,7 @@ class AnaplanView(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -113,6 +111,8 @@ class AnaplanView(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanView" + anaplan_workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AnaplanWorkspace asset that contains this asset (AnaplanModel and everything under its hierarchy).""" @@ -201,6 +201,11 @@ class AnaplanView(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -264,86 +269,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.anaplan_module is UNSET: - errors.append("anaplan_module is required for creation") - if self.anaplan_module_name is UNSET: - errors.append("anaplan_module_name is required for creation") - if self.anaplan_module_qualified_name is UNSET: - errors.append("anaplan_module_qualified_name is required for creation") - if self.anaplan_model_name is UNSET: - errors.append("anaplan_model_name is required for creation") - if self.anaplan_model_qualified_name is UNSET: - errors.append("anaplan_model_qualified_name is required for creation") - if self.anaplan_workspace_name is UNSET: - errors.append("anaplan_workspace_name is required for creation") - if self.anaplan_workspace_qualified_name is UNSET: - errors.append( - "anaplan_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"AnaplanView validation failed: {errors}") - - def minimize(self) -> "AnaplanView": - """ - Return a minimal copy of this AnaplanView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanView instance with only the minimum required fields. - """ - self.validate() - return AnaplanView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanView": - """ - Create a :class:`RelatedAnaplanView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanView(guid=self.guid) - return RelatedAnaplanView(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -542,6 +467,11 @@ class AnaplanViewRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -632,6 +562,7 @@ class AnaplanViewNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -713,9 +644,6 @@ def _anaplan_view_to_nested(anaplan_view: AnaplanView) -> AnaplanViewNested: is_incomplete=anaplan_view.is_incomplete, provenance_type=anaplan_view.provenance_type, home_id=anaplan_view.home_id, - depth=anaplan_view.depth, - immediate_upstream=anaplan_view.immediate_upstream, - immediate_downstream=anaplan_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -747,6 +675,7 @@ def _anaplan_view_from_nested(nested: AnaplanViewNested) -> AnaplanView: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -755,9 +684,6 @@ def _anaplan_view_from_nested(nested: AnaplanViewNested) -> AnaplanView: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_view_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -818,6 +744,9 @@ def _anaplan_view_from_nested_bytes(data: bytes, serde: Serde) -> AnaplanView: AnaplanView.METRICS = RelationField("metrics") AnaplanView.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AnaplanView.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AnaplanView.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AnaplanView.MEANINGS = RelationField("meanings") AnaplanView.MC_MONITORS = RelationField("mcMonitors") AnaplanView.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/anaplan_workspace.py b/pyatlan_v9/model/assets/anaplan_workspace.py index 1e920e5c9..c6eea3e8c 100644 --- a/pyatlan_v9/model/assets/anaplan_workspace.py +++ b/pyatlan_v9/model/assets/anaplan_workspace.py @@ -27,7 +27,7 @@ from pyatlan_v9.utils import init_guid, validate_required_fields from .airflow_related import RelatedAirflowTask -from .anaplan_related import RelatedAnaplanModel, RelatedAnaplanWorkspace +from .anaplan_related import RelatedAnaplanModel from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -89,6 +90,7 @@ class AnaplanWorkspace(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -106,6 +108,8 @@ class AnaplanWorkspace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnaplanWorkspace" + anaplan_workspace_current_size: Union[int, None, UnsetType] = UNSET """Current size of the AnaplanWorkspace from the source system, estimated in MB.""" @@ -185,6 +189,11 @@ class AnaplanWorkspace(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -240,66 +249,6 @@ class AnaplanWorkspace(Asset): def __post_init__(self) -> None: self.type_name = "AnaplanWorkspace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnaplanWorkspace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AnaplanWorkspace validation failed: {errors}") - - def minimize(self) -> "AnaplanWorkspace": - """ - Return a minimal copy of this AnaplanWorkspace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnaplanWorkspace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnaplanWorkspace instance with only the minimum required fields. - """ - self.validate() - return AnaplanWorkspace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnaplanWorkspace": - """ - Create a :class:`RelatedAnaplanWorkspace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnaplanWorkspace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnaplanWorkspace(guid=self.guid) - return RelatedAnaplanWorkspace(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -470,6 +419,11 @@ class AnaplanWorkspaceRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -559,6 +513,7 @@ class AnaplanWorkspaceNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -648,9 +603,6 @@ def _anaplan_workspace_to_nested( is_incomplete=anaplan_workspace.is_incomplete, provenance_type=anaplan_workspace.provenance_type, home_id=anaplan_workspace.home_id, - depth=anaplan_workspace.depth, - immediate_upstream=anaplan_workspace.immediate_upstream, - immediate_downstream=anaplan_workspace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -684,6 +636,7 @@ def _anaplan_workspace_from_nested(nested: AnaplanWorkspaceNested) -> AnaplanWor updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -692,9 +645,6 @@ def _anaplan_workspace_from_nested(nested: AnaplanWorkspaceNested) -> AnaplanWor is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anaplan_workspace_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -770,6 +720,9 @@ def _anaplan_workspace_from_nested_bytes(data: bytes, serde: Serde) -> AnaplanWo AnaplanWorkspace.METRICS = RelationField("metrics") AnaplanWorkspace.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AnaplanWorkspace.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AnaplanWorkspace.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AnaplanWorkspace.MEANINGS = RelationField("meanings") AnaplanWorkspace.MC_MONITORS = RelationField("mcMonitors") AnaplanWorkspace.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/anomalo.py b/pyatlan_v9/model/assets/anomalo.py index a079f3b15..59d6aec5a 100644 --- a/pyatlan_v9/model/assets/anomalo.py +++ b/pyatlan_v9/model/assets/anomalo.py @@ -26,7 +26,7 @@ from pyatlan_v9.model.transform import register_asset from .airflow_related import RelatedAirflowTask -from .anomalo_related import RelatedAnomalo, RelatedAnomaloCheck +from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( _ASSET_REL_FIELDS, @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -78,6 +79,7 @@ class Anomalo(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +97,8 @@ class Anomalo(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Anomalo" + dq_is_part_of_contract: Union[bool, None, UnsetType] = UNSET """Whether this data quality is part of contract (true) or not (false).""" @@ -147,6 +151,11 @@ class Anomalo(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -202,66 +211,6 @@ class Anomalo(Asset): def __post_init__(self) -> None: self.type_name = "Anomalo" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Anomalo instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Anomalo validation failed: {errors}") - - def minimize(self) -> "Anomalo": - """ - Return a minimal copy of this Anomalo with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Anomalo with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Anomalo instance with only the minimum required fields. - """ - self.validate() - return Anomalo(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnomalo": - """ - Create a :class:`RelatedAnomalo` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnomalo reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnomalo(guid=self.guid) - return RelatedAnomalo(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -373,6 +322,11 @@ class AnomaloRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -459,6 +413,7 @@ class AnomaloNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -526,9 +481,6 @@ def _anomalo_to_nested(anomalo: Anomalo) -> AnomaloNested: is_incomplete=anomalo.is_incomplete, provenance_type=anomalo.provenance_type, home_id=anomalo.home_id, - depth=anomalo.depth, - immediate_upstream=anomalo.immediate_upstream, - immediate_downstream=anomalo.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -558,6 +510,7 @@ def _anomalo_from_nested(nested: AnomaloNested) -> Anomalo: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -566,9 +519,6 @@ def _anomalo_from_nested(nested: AnomaloNested) -> Anomalo: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anomalo_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -613,6 +563,9 @@ def _anomalo_from_nested_bytes(data: bytes, serde: Serde) -> Anomalo: Anomalo.METRICS = RelationField("metrics") Anomalo.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Anomalo.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Anomalo.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Anomalo.MEANINGS = RelationField("meanings") Anomalo.MC_MONITORS = RelationField("mcMonitors") Anomalo.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/anomalo_check.py b/pyatlan_v9/model/assets/anomalo_check.py index 6da0b24a0..5964ff20f 100644 --- a/pyatlan_v9/model/assets/anomalo_check.py +++ b/pyatlan_v9/model/assets/anomalo_check.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -91,6 +92,7 @@ class AnomaloCheck(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -108,6 +110,8 @@ class AnomaloCheck(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AnomaloCheck" + anomalo_check_linked_asset_qualified_name: Union[str, None, UnsetType] = UNSET """QualifiedName of the asset associated with the check""" @@ -196,6 +200,11 @@ class AnomaloCheck(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -251,66 +260,6 @@ class AnomaloCheck(Asset): def __post_init__(self) -> None: self.type_name = "AnomaloCheck" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AnomaloCheck instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AnomaloCheck validation failed: {errors}") - - def minimize(self) -> "AnomaloCheck": - """ - Return a minimal copy of this AnomaloCheck with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AnomaloCheck with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AnomaloCheck instance with only the minimum required fields. - """ - self.validate() - return AnomaloCheck(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAnomaloCheck": - """ - Create a :class:`RelatedAnomaloCheck` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAnomaloCheck reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAnomaloCheck(guid=self.guid) - return RelatedAnomaloCheck(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -458,6 +407,11 @@ class AnomaloCheckRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -547,6 +501,7 @@ class AnomaloCheckNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -650,9 +605,6 @@ def _anomalo_check_to_nested(anomalo_check: AnomaloCheck) -> AnomaloCheckNested: is_incomplete=anomalo_check.is_incomplete, provenance_type=anomalo_check.provenance_type, home_id=anomalo_check.home_id, - depth=anomalo_check.depth, - immediate_upstream=anomalo_check.immediate_upstream, - immediate_downstream=anomalo_check.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -686,6 +638,7 @@ def _anomalo_check_from_nested(nested: AnomaloCheckNested) -> AnomaloCheck: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -694,9 +647,6 @@ def _anomalo_check_from_nested(nested: AnomaloCheckNested) -> AnomaloCheck: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_anomalo_check_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -778,6 +728,9 @@ def _anomalo_check_from_nested_bytes(data: bytes, serde: Serde) -> AnomaloCheck: AnomaloCheck.METRICS = RelationField("metrics") AnomaloCheck.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AnomaloCheck.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AnomaloCheck.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AnomaloCheck.MEANINGS = RelationField("meanings") AnomaloCheck.MC_MONITORS = RelationField("mcMonitors") AnomaloCheck.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/api.py b/pyatlan_v9/model/assets/api.py index 1b5bbbab9..2606aaa84 100644 --- a/pyatlan_v9/model/assets/api.py +++ b/pyatlan_v9/model/assets/api.py @@ -27,7 +27,6 @@ from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck -from .api_related import RelatedAPI from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( _ASSET_REL_FIELDS, @@ -41,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -86,6 +86,7 @@ class API(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,6 +104,8 @@ class API(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "API" + api_spec_type: Union[str, None, UnsetType] = UNSET """Type of API, for example: OpenAPI, GraphQL, etc.""" @@ -176,6 +179,11 @@ class API(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -231,66 +239,6 @@ class API(Asset): def __post_init__(self) -> None: self.type_name = "API" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this API instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"API validation failed: {errors}") - - def minimize(self) -> "API": - """ - Return a minimal copy of this API with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new API with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new API instance with only the minimum required fields. - """ - self.validate() - return API(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAPI": - """ - Create a :class:`RelatedAPI` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAPI reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAPI(guid=self.guid) - return RelatedAPI(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -423,6 +371,11 @@ class APIRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -505,6 +458,7 @@ class APINested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -586,9 +540,6 @@ def _api_to_nested(api: API) -> APINested: is_incomplete=api.is_incomplete, provenance_type=api.provenance_type, home_id=api.home_id, - depth=api.depth, - immediate_upstream=api.immediate_upstream, - immediate_downstream=api.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -618,6 +569,7 @@ def _api_from_nested(nested: APINested) -> API: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -626,9 +578,6 @@ def _api_from_nested(nested: APINested) -> API: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_api_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -685,6 +634,9 @@ def _api_from_nested_bytes(data: bytes, serde: Serde) -> API: API.METRICS = RelationField("metrics") API.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") API.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +API.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) API.MEANINGS = RelationField("meanings") API.MC_MONITORS = RelationField("mcMonitors") API.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/api_field.py b/pyatlan_v9/model/assets/api_field.py index 15e68b4c9..50a7868d0 100644 --- a/pyatlan_v9/model/assets/api_field.py +++ b/pyatlan_v9/model/assets/api_field.py @@ -29,7 +29,7 @@ from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck -from .api_related import RelatedAPIField, RelatedAPIObject, RelatedAPIQuery +from .api_related import RelatedAPIObject, RelatedAPIQuery from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( _ASSET_REL_FIELDS, @@ -43,6 +43,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -93,6 +94,7 @@ class APIField(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -110,6 +112,8 @@ class APIField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "APIField" + api_field_type: Union[str, None, UnsetType] = UNSET """Type of APIField, as free text (e.g. STRING, NUMBER etc).""" @@ -198,6 +202,11 @@ class APIField(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -259,74 +268,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this APIField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.api_object is UNSET: - errors.append("api_object is required for creation") - if self.api_object_qualified_name is UNSET: - errors.append("api_object_qualified_name is required for creation") - if errors: - raise ValueError(f"APIField validation failed: {errors}") - - def minimize(self) -> "APIField": - """ - Return a minimal copy of this APIField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new APIField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new APIField instance with only the minimum required fields. - """ - self.validate() - return APIField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAPIField": - """ - Create a :class:`RelatedAPIField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAPIField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAPIField(guid=self.guid) - return RelatedAPIField(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -597,6 +538,11 @@ class APIFieldRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -685,6 +631,7 @@ class APIFieldNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -772,9 +719,6 @@ def _api_field_to_nested(api_field: APIField) -> APIFieldNested: is_incomplete=api_field.is_incomplete, provenance_type=api_field.provenance_type, home_id=api_field.home_id, - depth=api_field.depth, - immediate_upstream=api_field.immediate_upstream, - immediate_downstream=api_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -806,6 +750,7 @@ def _api_field_from_nested(nested: APIFieldNested) -> APIField: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -814,9 +759,6 @@ def _api_field_from_nested(nested: APIFieldNested) -> APIField: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_api_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -880,6 +822,9 @@ def _api_field_from_nested_bytes(data: bytes, serde: Serde) -> APIField: APIField.METRICS = RelationField("metrics") APIField.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") APIField.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +APIField.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) APIField.MEANINGS = RelationField("meanings") APIField.MC_MONITORS = RelationField("mcMonitors") APIField.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/api_object.py b/pyatlan_v9/model/assets/api_object.py index ceb6fc486..dfa6a9e86 100644 --- a/pyatlan_v9/model/assets/api_object.py +++ b/pyatlan_v9/model/assets/api_object.py @@ -28,7 +28,7 @@ from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck -from .api_related import RelatedAPIField, RelatedAPIObject +from .api_related import RelatedAPIField from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( _ASSET_REL_FIELDS, @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -89,6 +90,7 @@ class APIObject(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -106,6 +108,8 @@ class APIObject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "APIObject" + api_field_count: Union[int, None, UnsetType] = UNSET """Count of the APIField of this object.""" @@ -185,6 +189,11 @@ class APIObject(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -240,66 +249,6 @@ class APIObject(Asset): def __post_init__(self) -> None: self.type_name = "APIObject" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this APIObject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"APIObject validation failed: {errors}") - - def minimize(self) -> "APIObject": - """ - Return a minimal copy of this APIObject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new APIObject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new APIObject instance with only the minimum required fields. - """ - self.validate() - return APIObject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAPIObject": - """ - Create a :class:`RelatedAPIObject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAPIObject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAPIObject(guid=self.guid) - return RelatedAPIObject(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -471,6 +420,11 @@ class APIObjectRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -558,6 +512,7 @@ class APIObjectNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -641,9 +596,6 @@ def _api_object_to_nested(api_object: APIObject) -> APIObjectNested: is_incomplete=api_object.is_incomplete, provenance_type=api_object.provenance_type, home_id=api_object.home_id, - depth=api_object.depth, - immediate_upstream=api_object.immediate_upstream, - immediate_downstream=api_object.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -675,6 +627,7 @@ def _api_object_from_nested(nested: APIObjectNested) -> APIObject: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -683,9 +636,6 @@ def _api_object_from_nested(nested: APIObjectNested) -> APIObject: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_api_object_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -747,6 +697,9 @@ def _api_object_from_nested_bytes(data: bytes, serde: Serde) -> APIObject: APIObject.METRICS = RelationField("metrics") APIObject.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") APIObject.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +APIObject.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) APIObject.MEANINGS = RelationField("meanings") APIObject.MC_MONITORS = RelationField("mcMonitors") APIObject.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/api_path.py b/pyatlan_v9/model/assets/api_path.py index c926c007a..784ead55a 100644 --- a/pyatlan_v9/model/assets/api_path.py +++ b/pyatlan_v9/model/assets/api_path.py @@ -30,7 +30,7 @@ from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck -from .api_related import RelatedAPIPath, RelatedAPISpec +from .api_related import RelatedAPISpec from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( _ASSET_REL_FIELDS, @@ -44,6 +44,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -96,6 +97,7 @@ class APIPath(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -113,6 +115,8 @@ class APIPath(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "APIPath" + api_path_summary: Union[str, None, UnsetType] = UNSET """Descriptive summary intended to apply to all operations in this path.""" @@ -209,6 +213,11 @@ class APIPath(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -270,76 +279,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this APIPath instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.api_spec is UNSET: - errors.append("api_spec is required for creation") - if self.api_spec_name is UNSET: - errors.append("api_spec_name is required for creation") - if self.api_spec_qualified_name is UNSET: - errors.append("api_spec_qualified_name is required for creation") - if errors: - raise ValueError(f"APIPath validation failed: {errors}") - - def minimize(self) -> "APIPath": - """ - Return a minimal copy of this APIPath with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new APIPath with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new APIPath instance with only the minimum required fields. - """ - self.validate() - return APIPath(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAPIPath": - """ - Create a :class:`RelatedAPIPath` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAPIPath reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAPIPath(guid=self.guid) - return RelatedAPIPath(qualified_name=self.qualified_name) - @property def api_path_raw_u_r_i(self) -> Union[str, None, UnsetType]: return self.api_path_raw_uri @@ -549,6 +488,11 @@ class APIPathRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -636,6 +580,7 @@ class APIPathNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -731,9 +676,6 @@ def _api_path_to_nested(api_path: APIPath) -> APIPathNested: is_incomplete=api_path.is_incomplete, provenance_type=api_path.provenance_type, home_id=api_path.home_id, - depth=api_path.depth, - immediate_upstream=api_path.immediate_upstream, - immediate_downstream=api_path.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -763,6 +705,7 @@ def _api_path_from_nested(nested: APIPathNested) -> APIPath: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -771,9 +714,6 @@ def _api_path_from_nested(nested: APIPathNested) -> APIPath: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_api_path_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -845,6 +785,9 @@ def _api_path_from_nested_bytes(data: bytes, serde: Serde) -> APIPath: APIPath.METRICS = RelationField("metrics") APIPath.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") APIPath.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +APIPath.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) APIPath.MEANINGS = RelationField("meanings") APIPath.MC_MONITORS = RelationField("mcMonitors") APIPath.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/api_query.py b/pyatlan_v9/model/assets/api_query.py index dbfbf533a..94e7442cb 100644 --- a/pyatlan_v9/model/assets/api_query.py +++ b/pyatlan_v9/model/assets/api_query.py @@ -28,7 +28,7 @@ from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck -from .api_related import RelatedAPIField, RelatedAPIQuery +from .api_related import RelatedAPIField from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( _ASSET_REL_FIELDS, @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -91,6 +92,7 @@ class APIQuery(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -108,6 +110,8 @@ class APIQuery(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "APIQuery" + api_input_field_count: Union[int, None, UnsetType] = UNSET """Count of the APIField of this query that are input to it.""" @@ -193,6 +197,11 @@ class APIQuery(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -248,66 +257,6 @@ class APIQuery(Asset): def __post_init__(self) -> None: self.type_name = "APIQuery" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this APIQuery instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"APIQuery validation failed: {errors}") - - def minimize(self) -> "APIQuery": - """ - Return a minimal copy of this APIQuery with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new APIQuery with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new APIQuery instance with only the minimum required fields. - """ - self.validate() - return APIQuery(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAPIQuery": - """ - Create a :class:`RelatedAPIQuery` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAPIQuery reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAPIQuery(guid=self.guid) - return RelatedAPIQuery(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -512,6 +461,11 @@ class APIQueryRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -599,6 +553,7 @@ class APIQueryNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -686,9 +641,6 @@ def _api_query_to_nested(api_query: APIQuery) -> APIQueryNested: is_incomplete=api_query.is_incomplete, provenance_type=api_query.provenance_type, home_id=api_query.home_id, - depth=api_query.depth, - immediate_upstream=api_query.immediate_upstream, - immediate_downstream=api_query.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -720,6 +672,7 @@ def _api_query_from_nested(nested: APIQueryNested) -> APIQuery: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -728,9 +681,6 @@ def _api_query_from_nested(nested: APIQueryNested) -> APIQuery: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_api_query_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -798,6 +748,9 @@ def _api_query_from_nested_bytes(data: bytes, serde: Serde) -> APIQuery: APIQuery.METRICS = RelationField("metrics") APIQuery.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") APIQuery.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +APIQuery.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) APIQuery.MEANINGS = RelationField("meanings") APIQuery.MC_MONITORS = RelationField("mcMonitors") APIQuery.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/api_spec.py b/pyatlan_v9/model/assets/api_spec.py index 3b893ab56..a7cc3e866 100644 --- a/pyatlan_v9/model/assets/api_spec.py +++ b/pyatlan_v9/model/assets/api_spec.py @@ -29,7 +29,7 @@ from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck -from .api_related import RelatedAPIPath, RelatedAPISpec +from .api_related import RelatedAPIPath from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( _ASSET_REL_FIELDS, @@ -43,6 +43,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -97,6 +98,7 @@ class APISpec(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -114,6 +116,8 @@ class APISpec(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "APISpec" + api_spec_terms_of_service_url: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="apiSpecTermsOfServiceURL" ) @@ -220,6 +224,11 @@ class APISpec(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -275,66 +284,6 @@ class APISpec(Asset): def __post_init__(self) -> None: self.type_name = "APISpec" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this APISpec instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"APISpec validation failed: {errors}") - - def minimize(self) -> "APISpec": - """ - Return a minimal copy of this APISpec with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new APISpec with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new APISpec instance with only the minimum required fields. - """ - self.validate() - return APISpec(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAPISpec": - """ - Create a :class:`RelatedAPISpec` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAPISpec reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAPISpec(guid=self.guid) - return RelatedAPISpec(qualified_name=self.qualified_name) - @classmethod @init_guid def creator(cls, *, name: str, connection_qualified_name: str) -> "APISpec": @@ -526,6 +475,11 @@ class APISpecRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -613,6 +567,7 @@ class APISpecNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -710,9 +665,6 @@ def _api_spec_to_nested(api_spec: APISpec) -> APISpecNested: is_incomplete=api_spec.is_incomplete, provenance_type=api_spec.provenance_type, home_id=api_spec.home_id, - depth=api_spec.depth, - immediate_upstream=api_spec.immediate_upstream, - immediate_downstream=api_spec.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -742,6 +694,7 @@ def _api_spec_from_nested(nested: APISpecNested) -> APISpec: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -750,9 +703,6 @@ def _api_spec_from_nested(nested: APISpecNested) -> APISpec: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_api_spec_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -834,6 +784,9 @@ def _api_spec_from_nested_bytes(data: bytes, serde: Serde) -> APISpec: APISpec.METRICS = RelationField("metrics") APISpec.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") APISpec.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +APISpec.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) APISpec.MEANINGS = RelationField("meanings") APISpec.MC_MONITORS = RelationField("mcMonitors") APISpec.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/app.py b/pyatlan_v9/model/assets/app.py index 5c9075801..4ea34247c 100644 --- a/pyatlan_v9/model/assets/app.py +++ b/pyatlan_v9/model/assets/app.py @@ -27,7 +27,7 @@ from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck -from .app_related import RelatedApp, RelatedApplication, RelatedApplicationField +from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( _ASSET_REL_FIELDS, Asset, @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -78,6 +79,7 @@ class App(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +97,8 @@ class App(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "App" + app_id: Union[str, None, UnsetType] = UNSET """Unique identifier for the application asset from the source system.""" @@ -147,6 +151,11 @@ class App(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -202,66 +211,6 @@ class App(Asset): def __post_init__(self) -> None: self.type_name = "App" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this App instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"App validation failed: {errors}") - - def minimize(self) -> "App": - """ - Return a minimal copy of this App with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new App with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new App instance with only the minimum required fields. - """ - self.validate() - return App(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedApp": - """ - Create a :class:`RelatedApp` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedApp reference to this asset. - """ - if self.guid is not UNSET: - return RelatedApp(guid=self.guid) - return RelatedApp(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -373,6 +322,11 @@ class AppRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -455,6 +409,7 @@ class AppNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -522,9 +477,6 @@ def _app_to_nested(app: App) -> AppNested: is_incomplete=app.is_incomplete, provenance_type=app.provenance_type, home_id=app.home_id, - depth=app.depth, - immediate_upstream=app.immediate_upstream, - immediate_downstream=app.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -554,6 +506,7 @@ def _app_from_nested(nested: AppNested) -> App: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -562,9 +515,6 @@ def _app_from_nested(nested: AppNested) -> App: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_app_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -603,6 +553,9 @@ def _app_from_nested_bytes(data: bytes, serde: Serde) -> App: App.METRICS = RelationField("metrics") App.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") App.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +App.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) App.MEANINGS = RelationField("meanings") App.MC_MONITORS = RelationField("mcMonitors") App.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/app_workflow_run.py b/pyatlan_v9/model/assets/app_workflow_run.py index e80d4e4f6..52dfbf139 100644 --- a/pyatlan_v9/model/assets/app_workflow_run.py +++ b/pyatlan_v9/model/assets/app_workflow_run.py @@ -28,7 +28,6 @@ from .airflow_related import RelatedAirflowTask from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField -from .app_workflow_run_related import RelatedAppWorkflowRun from .asset import ( _ASSET_REL_FIELDS, Asset, @@ -42,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -97,6 +97,7 @@ class AppWorkflowRun(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -114,6 +115,8 @@ class AppWorkflowRun(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AppWorkflowRun" + app_workflow_run_label: Union[str, None, UnsetType] = UNSET """Root name for the workflow run.""" @@ -217,6 +220,11 @@ class AppWorkflowRun(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -272,66 +280,6 @@ class AppWorkflowRun(Asset): def __post_init__(self) -> None: self.type_name = "AppWorkflowRun" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AppWorkflowRun instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AppWorkflowRun validation failed: {errors}") - - def minimize(self) -> "AppWorkflowRun": - """ - Return a minimal copy of this AppWorkflowRun with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AppWorkflowRun with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AppWorkflowRun instance with only the minimum required fields. - """ - self.validate() - return AppWorkflowRun(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAppWorkflowRun": - """ - Create a :class:`RelatedAppWorkflowRun` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAppWorkflowRun reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAppWorkflowRun(guid=self.guid) - return RelatedAppWorkflowRun(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -494,6 +442,11 @@ class AppWorkflowRunRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -583,6 +536,7 @@ class AppWorkflowRunNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -702,9 +656,6 @@ def _app_workflow_run_to_nested( is_incomplete=app_workflow_run.is_incomplete, provenance_type=app_workflow_run.provenance_type, home_id=app_workflow_run.home_id, - depth=app_workflow_run.depth, - immediate_upstream=app_workflow_run.immediate_upstream, - immediate_downstream=app_workflow_run.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -738,6 +689,7 @@ def _app_workflow_run_from_nested(nested: AppWorkflowRunNested) -> AppWorkflowRu updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -746,9 +698,6 @@ def _app_workflow_run_from_nested(nested: AppWorkflowRunNested) -> AppWorkflowRu is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_app_workflow_run_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -852,6 +801,9 @@ def _app_workflow_run_from_nested_bytes(data: bytes, serde: Serde) -> AppWorkflo AppWorkflowRun.METRICS = RelationField("metrics") AppWorkflowRun.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AppWorkflowRun.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AppWorkflowRun.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AppWorkflowRun.MEANINGS = RelationField("meanings") AppWorkflowRun.MC_MONITORS = RelationField("mcMonitors") AppWorkflowRun.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/application.py b/pyatlan_v9/model/assets/application.py index ae24a8069..9c11e17b6 100644 --- a/pyatlan_v9/model/assets/application.py +++ b/pyatlan_v9/model/assets/application.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -82,6 +83,7 @@ class Application(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -99,6 +101,8 @@ class Application(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Application" + app_id: Union[str, None, UnsetType] = UNSET """Unique identifier for the application asset from the source system.""" @@ -159,6 +163,11 @@ class Application(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -214,66 +223,6 @@ class Application(Asset): def __post_init__(self) -> None: self.type_name = "Application" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Application instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Application validation failed: {errors}") - - def minimize(self) -> "Application": - """ - Return a minimal copy of this Application with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Application with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Application instance with only the minimum required fields. - """ - self.validate() - return Application(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedApplication": - """ - Create a :class:`RelatedApplication` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedApplication reference to this asset. - """ - if self.guid is not UNSET: - return RelatedApplication(guid=self.guid) - return RelatedApplication(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -427,6 +376,11 @@ class ApplicationRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -515,6 +469,7 @@ class ApplicationNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -582,9 +537,6 @@ def _application_to_nested(application: Application) -> ApplicationNested: is_incomplete=application.is_incomplete, provenance_type=application.provenance_type, home_id=application.home_id, - depth=application.depth, - immediate_upstream=application.immediate_upstream, - immediate_downstream=application.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -616,6 +568,7 @@ def _application_from_nested(nested: ApplicationNested) -> Application: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -624,9 +577,6 @@ def _application_from_nested(nested: ApplicationNested) -> Application: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_application_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -671,6 +621,9 @@ def _application_from_nested_bytes(data: bytes, serde: Serde) -> Application: Application.METRICS = RelationField("metrics") Application.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Application.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Application.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Application.MEANINGS = RelationField("meanings") Application.MC_MONITORS = RelationField("mcMonitors") Application.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/application_field.py b/pyatlan_v9/model/assets/application_field.py index 73928f6c8..7514c0e1e 100644 --- a/pyatlan_v9/model/assets/application_field.py +++ b/pyatlan_v9/model/assets/application_field.py @@ -43,6 +43,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -84,6 +85,7 @@ class ApplicationField(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -101,6 +103,8 @@ class ApplicationField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ApplicationField" + application_parent_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the parent Application asset that contains this ApplicationField asset.""" @@ -162,6 +166,11 @@ class ApplicationField(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -223,72 +232,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ApplicationField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.application_parent is UNSET: - errors.append("application_parent is required for creation") - if errors: - raise ValueError(f"ApplicationField validation failed: {errors}") - - def minimize(self) -> "ApplicationField": - """ - Return a minimal copy of this ApplicationField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ApplicationField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ApplicationField instance with only the minimum required fields. - """ - self.validate() - return ApplicationField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedApplicationField": - """ - Create a :class:`RelatedApplicationField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedApplicationField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedApplicationField(guid=self.guid) - return RelatedApplicationField(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -460,6 +403,11 @@ class ApplicationFieldRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -550,6 +498,7 @@ class ApplicationFieldNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -627,9 +576,6 @@ def _application_field_to_nested( is_incomplete=application_field.is_incomplete, provenance_type=application_field.provenance_type, home_id=application_field.home_id, - depth=application_field.depth, - immediate_upstream=application_field.immediate_upstream, - immediate_downstream=application_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -663,6 +609,7 @@ def _application_field_from_nested(nested: ApplicationFieldNested) -> Applicatio updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -671,9 +618,6 @@ def _application_field_from_nested(nested: ApplicationFieldNested) -> Applicatio is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_application_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -727,6 +671,9 @@ def _application_field_from_nested_bytes(data: bytes, serde: Serde) -> Applicati ApplicationField.METRICS = RelationField("metrics") ApplicationField.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ApplicationField.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ApplicationField.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ApplicationField.MEANINGS = RelationField("meanings") ApplicationField.MC_MONITORS = RelationField("mcMonitors") ApplicationField.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/artifact.py b/pyatlan_v9/model/assets/artifact.py new file mode 100644 index 000000000..fb65a5e93 --- /dev/null +++ b/pyatlan_v9/model/assets/artifact.py @@ -0,0 +1,604 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# ruff: noqa: ARG002 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +Artifact asset model with flattened inheritance. + +This module provides: +- Artifact: Flat asset class (easy to use) +- ArtifactAttributes: Nested attributes struct (extends AssetAttributes) +- ArtifactNested: Nested API format struct +""" + +from __future__ import annotations + +from typing import Any, ClassVar, Dict, List, Set, Union + +import msgspec +from msgspec import UNSET, UnsetType + +from .airflow_related import RelatedAirflowTask +from .anomalo_related import RelatedAnomaloCheck +from .app_related import RelatedApplication, RelatedApplicationField +from .asset import ( + _ASSET_REL_FIELDS, + Asset, + AssetAttributes, + AssetNested, + AssetRelationshipAttributes, + _extract_asset_attrs, + _populate_asset_attrs, +) +from .data_contract_related import RelatedDataContract +from .data_mesh_related import RelatedDataProduct +from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType +from .gtc_related import RelatedAtlasGlossaryTerm +from .model_related import RelatedModelAttribute, RelatedModelEntity +from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor +from .partial_related import RelatedPartialField, RelatedPartialObject +from .process_related import RelatedProcess +from .referenceable_related import RelatedReferenceable +from .resource_related import RelatedFile, RelatedLink, RelatedReadme +from .schema_registry_related import RelatedSchemaRegistrySubject +from .soda_related import RelatedSodaCheck +from .spark_related import RelatedSparkJob +from pyatlan_v9.model.conversion_utils import categorize_relationships, merge_relationships +from pyatlan_v9.model.serde import Serde, get_serde +from pyatlan_v9.model.transform import register_asset + +# ============================================================================= +# FLAT ASSET CLASS +# ============================================================================= + +@register_asset +class Artifact(Asset): + """ + Base class for artifacts in Atlan. An artifact is a file or data object associated with an agentic asset. Extends File for native fileType and filePath support. + """ + + ARTIFACT_VERSION: ClassVar[Any] = None + CATALOG_DATASET_GUID: ClassVar[Any] = None + FILE_TYPE: ClassVar[Any] = None + FILE_PATH: ClassVar[Any] = None + LINK: ClassVar[Any] = None + IS_GLOBAL: ClassVar[Any] = None + REFERENCE: ClassVar[Any] = None + RESOURCE_METADATA: ClassVar[Any] = None + INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None + OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None + ANOMALO_CHECKS: ClassVar[Any] = None + APPLICATION: ClassVar[Any] = None + APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None + OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + MODEL_IMPLEMENTED_ENTITIES: ClassVar[Any] = None + MODEL_IMPLEMENTED_ATTRIBUTES: ClassVar[Any] = None + METRICS: ClassVar[Any] = None + DQ_BASE_DATASET_RULES: ClassVar[Any] = None + DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None + MEANINGS: ClassVar[Any] = None + MC_MONITORS: ClassVar[Any] = None + MC_INCIDENTS: ClassVar[Any] = None + PARTIAL_CHILD_FIELDS: ClassVar[Any] = None + PARTIAL_CHILD_OBJECTS: ClassVar[Any] = None + INPUT_TO_PROCESSES: ClassVar[Any] = None + OUTPUT_FROM_PROCESSES: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_TO: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_FROM: ClassVar[Any] = None + FILES: ClassVar[Any] = None + LINKS: ClassVar[Any] = None + README: ClassVar[Any] = None + SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None + SODA_CHECKS: ClassVar[Any] = None + INPUT_TO_SPARK_JOBS: ClassVar[Any] = None + OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + + type_name: Union[str, UnsetType] = "Artifact" + + artifact_version: Union[str, None, UnsetType] = UNSET + """Version identifier for this artifact.""" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + + file_type: Union[str, None, UnsetType] = UNSET + """Type (extension) of the file.""" + + file_path: Union[str, None, UnsetType] = UNSET + """URL giving the online location where the file can be accessed.""" + + link: Union[str, None, UnsetType] = UNSET + """URL to the resource.""" + + is_global: Union[bool, None, UnsetType] = UNSET + """Whether the resource is global (true) or not (false).""" + + reference: Union[str, None, UnsetType] = UNSET + """Reference to the resource.""" + + resource_metadata: Union[Dict[str, str], None, UnsetType] = UNSET + """Metadata of the resource.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + def __post_init__(self) -> None: + self.type_name = "Artifact" + + + + # ========================================================================= + # Optimized Serialization Methods (override Asset base class) + # ========================================================================= + + def to_json(self, nested: bool = True, serde: Serde | None = None) -> str: + """ + Convert to JSON string using optimized nested struct serialization. + + Args: + nested: If True (default), use nested API format. If False, use flat format. + serde: Optional Serde instance for encoder reuse. Uses shared singleton if None. + + Returns: + JSON string representation + """ + if serde is None: + serde = get_serde() + if nested: + return self.to_nested_bytes(serde).decode("utf-8") + else: + return serde.encode(self).decode("utf-8") + + def to_nested_bytes(self, serde: Serde | None = None) -> bytes: + """Serialize to Atlas nested-format JSON bytes (pure msgspec, no dict intermediate).""" + if serde is None: + serde = get_serde() + return _artifact_to_nested_bytes(self, serde) + + @staticmethod + def from_json(json_data: str | bytes, serde: Serde | None = None) -> Artifact: + """ + Create from JSON string or bytes using optimized nested struct deserialization. + + Args: + json_data: JSON string or bytes to deserialize + serde: Optional Serde instance for decoder reuse. Uses shared singleton if None. + + Returns: + Artifact instance + """ + if isinstance(json_data, str): + json_data = json_data.encode("utf-8") + if serde is None: + serde = get_serde() + return _artifact_from_nested_bytes(json_data, serde) + + +# ============================================================================= +# NESTED FORMAT CLASSES +# ============================================================================= + +class ArtifactAttributes(AssetAttributes): + """Artifact-specific attributes for nested API format.""" + + artifact_version: Union[str, None, UnsetType] = UNSET + """Version identifier for this artifact.""" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + + file_type: Union[str, None, UnsetType] = UNSET + """Type (extension) of the file.""" + + file_path: Union[str, None, UnsetType] = UNSET + """URL giving the online location where the file can be accessed.""" + + link: Union[str, None, UnsetType] = UNSET + """URL to the resource.""" + + is_global: Union[bool, None, UnsetType] = UNSET + """Whether the resource is global (true) or not (false).""" + + reference: Union[str, None, UnsetType] = UNSET + """Reference to the resource.""" + + resource_metadata: Union[Dict[str, str], None, UnsetType] = UNSET + """Metadata of the resource.""" + +class ArtifactRelationshipAttributes(AssetRelationshipAttributes): + """Artifact-specific relationship attributes for nested API format.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + +class ArtifactNested(AssetNested): + """Artifact in nested API format for high-performance serialization.""" + + attributes: Union[ArtifactAttributes, UnsetType] = UNSET + relationship_attributes: Union[ArtifactRelationshipAttributes, UnsetType] = UNSET + append_relationship_attributes: Union[ArtifactRelationshipAttributes, UnsetType] = UNSET + remove_relationship_attributes: Union[ArtifactRelationshipAttributes, UnsetType] = UNSET + +# ============================================================================= +# CONVERSION HELPERS & CONSTANTS +# ============================================================================= + +_ARTIFACT_REL_FIELDS: List[str] = [ + *_ASSET_REL_FIELDS, + "input_to_airflow_tasks", + "output_from_airflow_tasks", + "anomalo_checks", + "application", + "application_field", + "data_contract_latest", + "data_contract_latest_certified", + "output_port_data_products", + "input_port_data_products", + "model_implemented_entities", + "model_implemented_attributes", + "metrics", + "dq_base_dataset_rules", + "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", + "meanings", + "mc_monitors", + "mc_incidents", + "partial_child_fields", + "partial_child_objects", + "input_to_processes", + "output_from_processes", + "user_def_relationship_to", + "user_def_relationship_from", + "files", + "links", + "readme", + "schema_registry_subjects", + "soda_checks", + "input_to_spark_jobs", + "output_from_spark_jobs", +] + +def _populate_artifact_attrs(attrs: ArtifactAttributes, obj: Artifact) -> None: + """Populate Artifact-specific attributes on the attrs struct.""" + _populate_asset_attrs(attrs, obj) + attrs.artifact_version = obj.artifact_version + attrs.catalog_dataset_guid = obj.catalog_dataset_guid + attrs.file_type = obj.file_type + attrs.file_path = obj.file_path + attrs.link = obj.link + attrs.is_global = obj.is_global + attrs.reference = obj.reference + attrs.resource_metadata = obj.resource_metadata + +def _extract_artifact_attrs(attrs: ArtifactAttributes) -> dict: + """Extract all Artifact attributes from the attrs struct into a flat dict.""" + result = _extract_asset_attrs(attrs) + result["artifact_version"] = attrs.artifact_version + result["catalog_dataset_guid"] = attrs.catalog_dataset_guid + result["file_type"] = attrs.file_type + result["file_path"] = attrs.file_path + result["link"] = attrs.link + result["is_global"] = attrs.is_global + result["reference"] = attrs.reference + result["resource_metadata"] = attrs.resource_metadata + return result + +# ============================================================================= +# CONVERSION FUNCTIONS +# ============================================================================= + + +def _artifact_to_nested(artifact: Artifact) -> ArtifactNested: + """Convert flat Artifact to nested format.""" + attrs = ArtifactAttributes() + _populate_artifact_attrs(attrs, artifact) + # Categorize relationships by save semantic (REPLACE, APPEND, REMOVE) + replace_rels, append_rels, remove_rels = categorize_relationships( + artifact, _ARTIFACT_REL_FIELDS, ArtifactRelationshipAttributes + ) + return ArtifactNested( + guid=artifact.guid, + type_name=artifact.type_name, + status=artifact.status, + version=artifact.version, + create_time=artifact.create_time, + update_time=artifact.update_time, + created_by=artifact.created_by, + updated_by=artifact.updated_by, + classifications=artifact.classifications, + classification_names=artifact.classification_names, + meanings=artifact.meanings, + labels=artifact.labels, + business_attributes=artifact.business_attributes, + custom_attributes=artifact.custom_attributes, + pending_tasks=artifact.pending_tasks, + proxy=artifact.proxy, + is_incomplete=artifact.is_incomplete, + provenance_type=artifact.provenance_type, + home_id=artifact.home_id, + attributes=attrs, + relationship_attributes=replace_rels, + append_relationship_attributes=append_rels, + remove_relationship_attributes=remove_rels, + ) + +def _artifact_from_nested(nested: ArtifactNested) -> Artifact: + """Convert nested format to flat Artifact.""" + attrs = nested.attributes if nested.attributes is not UNSET else ArtifactAttributes() + # Merge relationships from all three buckets + merged_rels = merge_relationships( + nested.relationship_attributes, + nested.append_relationship_attributes, + nested.remove_relationship_attributes, + _ARTIFACT_REL_FIELDS, + ArtifactRelationshipAttributes + ) + return Artifact( + guid=nested.guid, + type_name=nested.type_name, + status=nested.status, + version=nested.version, + create_time=nested.create_time, + update_time=nested.update_time, + created_by=nested.created_by, + updated_by=nested.updated_by, + classifications=nested.classifications, + classification_names=nested.classification_names, + meanings=nested.meanings, + labels=nested.labels, + business_attributes=nested.business_attributes, + custom_attributes=nested.custom_attributes, + pending_tasks=nested.pending_tasks, + proxy=nested.proxy, + is_incomplete=nested.is_incomplete, + provenance_type=nested.provenance_type, + home_id=nested.home_id, + **_extract_artifact_attrs(attrs), + # Merged relationship attributes + **merged_rels, + ) + +def _artifact_to_nested_bytes(artifact: Artifact, serde: Serde) -> bytes: + """Convert flat Artifact to nested JSON bytes.""" + return serde.encode(_artifact_to_nested(artifact)) + + +def _artifact_from_nested_bytes(data: bytes, serde: Serde) -> Artifact: + """Convert nested JSON bytes to flat Artifact.""" + nested = serde.decode(data, ArtifactNested) + return _artifact_from_nested(nested) + +# --------------------------------------------------------------------------- +# Deferred field descriptor initialization +# --------------------------------------------------------------------------- +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + BooleanField, + KeywordField, + RelationField, +) + +Artifact.ARTIFACT_VERSION = KeywordField("artifactVersion", "artifactVersion") +Artifact.CATALOG_DATASET_GUID = KeywordField("catalogDatasetGuid", "catalogDatasetGuid") +Artifact.FILE_TYPE = KeywordField("fileType", "fileType") +Artifact.FILE_PATH = KeywordField("filePath", "filePath") +Artifact.LINK = KeywordField("link", "link") +Artifact.IS_GLOBAL = BooleanField("isGlobal", "isGlobal") +Artifact.REFERENCE = KeywordField("reference", "reference") +Artifact.RESOURCE_METADATA = KeywordField("resourceMetadata", "resourceMetadata") +Artifact.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") +Artifact.OUTPUT_FROM_AIRFLOW_TASKS = RelationField("outputFromAirflowTasks") +Artifact.ANOMALO_CHECKS = RelationField("anomaloChecks") +Artifact.APPLICATION = RelationField("application") +Artifact.APPLICATION_FIELD = RelationField("applicationField") +Artifact.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +Artifact.DATA_CONTRACT_LATEST_CERTIFIED = RelationField("dataContractLatestCertified") +Artifact.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") +Artifact.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") +Artifact.MODEL_IMPLEMENTED_ENTITIES = RelationField("modelImplementedEntities") +Artifact.MODEL_IMPLEMENTED_ATTRIBUTES = RelationField("modelImplementedAttributes") +Artifact.METRICS = RelationField("metrics") +Artifact.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") +Artifact.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Artifact.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField("gcpDataplexAspectTypeMetadataEntities") +Artifact.MEANINGS = RelationField("meanings") +Artifact.MC_MONITORS = RelationField("mcMonitors") +Artifact.MC_INCIDENTS = RelationField("mcIncidents") +Artifact.PARTIAL_CHILD_FIELDS = RelationField("partialChildFields") +Artifact.PARTIAL_CHILD_OBJECTS = RelationField("partialChildObjects") +Artifact.INPUT_TO_PROCESSES = RelationField("inputToProcesses") +Artifact.OUTPUT_FROM_PROCESSES = RelationField("outputFromProcesses") +Artifact.USER_DEF_RELATIONSHIP_TO = RelationField("userDefRelationshipTo") +Artifact.USER_DEF_RELATIONSHIP_FROM = RelationField("userDefRelationshipFrom") +Artifact.FILES = RelationField("files") +Artifact.LINKS = RelationField("links") +Artifact.README = RelationField("readme") +Artifact.SCHEMA_REGISTRY_SUBJECTS = RelationField("schemaRegistrySubjects") +Artifact.SODA_CHECKS = RelationField("sodaChecks") +Artifact.INPUT_TO_SPARK_JOBS = RelationField("inputToSparkJobs") +Artifact.OUTPUT_FROM_SPARK_JOBS = RelationField("outputFromSparkJobs") \ No newline at end of file diff --git a/pyatlan_v9/model/assets/artifact_related.py b/pyatlan_v9/model/assets/artifact_related.py new file mode 100644 index 000000000..39c509aaa --- /dev/null +++ b/pyatlan_v9/model/assets/artifact_related.py @@ -0,0 +1,42 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +Related type classes for Artifact module. + +This module contains all Related{Type} classes for the Artifact type hierarchy. +These classes are used for relationship attributes to reference related entities. +""" + +from __future__ import annotations + +from typing import Dict, List, Set, Union + +import msgspec +from msgspec import UNSET, UnsetType + +from .agentic_related import RelatedAgentic +from .referenceable_related import RelatedReferenceable + +__all__ = [ + "RelatedArtifact", +] + + +class RelatedArtifact(RelatedAgentic): + """ + Related entity reference for Artifact assets. + + Extends RelatedAgentic with Artifact-specific attributes. + """ + + # type_name inherited from parent with default=UNSET + # __post_init__ sets it to "Artifact" so it serializes correctly + + artifact_version: Union[str, None, UnsetType] = UNSET + """Version identifier for this artifact.""" + + def __post_init__(self) -> None: + RelatedReferenceable.__post_init__(self) + self.type_name = "Artifact" diff --git a/pyatlan_v9/model/assets/asset.py b/pyatlan_v9/model/assets/asset.py index 4e6c66432..988fb7eec 100644 --- a/pyatlan_v9/model/assets/asset.py +++ b/pyatlan_v9/model/assets/asset.py @@ -30,10 +30,10 @@ from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField -from .asset_related import RelatedAsset from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable import ( @@ -199,6 +199,11 @@ class Asset(Referenceable): ASSET_SODA_CHECK_STATUSES: ClassVar[Any] = None ASSET_SODA_SOURCE_URL: ClassVar[Any] = None ASSET_ICON: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_SCORE_VALUE: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_ENTITIES: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_LATEST_SCORES: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_AVG_SCORES: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_MIN_SCORES: ClassVar[Any] = None ASSET_EXTERNAL_DQ_METADATA_DETAILS: ClassVar[Any] = None IS_PARTIAL: ClassVar[Any] = None IS_AI_GENERATED: ClassVar[Any] = None @@ -263,6 +268,7 @@ class Asset(Referenceable): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -717,6 +723,31 @@ class Asset(Referenceable): asset_icon: Union[str, None, UnsetType] = UNSET """Name of the icon to use for this asset. (Only applies to glossaries, currently.)""" + asset_external_dq_score_value: Union[float, None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQScoreValue" + ) + """Single asset-level DQ score (0–100). Populated natively by tools that provide one.""" + + asset_external_dq_test_entities: Union[List[str], None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQTestEntities" + ) + """Ordered list of DQ test/scan names on this asset. Positionally aligned with the score metrics.""" + + asset_external_dq_test_latest_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestLatestScores") + ) + """List of scores of the most recent run for each DQ test.""" + + asset_external_dq_test_avg_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestAvgScores") + ) + """List of mean scores across all runs for each DQ test.""" + + asset_external_dq_test_min_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestMinScores") + ) + """List of minimum (floor) score across all runs for each DQ test.""" + asset_external_dq_metadata_details: Union[ Dict[str, Dict[str, Any]], None, UnsetType ] = msgspec.field(default=UNSET, name="assetExternalDQMetadataDetails") @@ -985,6 +1016,11 @@ class Asset(Referenceable): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1023,66 +1059,6 @@ def __post_init__(self) -> None: if self.type_name is UNSET: self.type_name = "Asset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Asset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Asset validation failed: {errors}") - - def minimize(self) -> "Asset": - """ - Return a minimal copy of this Asset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Asset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Asset instance with only the minimum required fields. - """ - self.validate() - return Asset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAsset": - """ - Create a :class:`RelatedAsset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAsset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAsset(guid=self.guid) - return RelatedAsset(qualified_name=self.qualified_name) - @classmethod def ref_by_guid( cls, guid: str, semantic: "SaveSemantic | str" = SaveSemantic.REPLACE @@ -1823,6 +1799,31 @@ class AssetAttributes(ReferenceableAttributes): asset_icon: Union[str, None, UnsetType] = UNSET """Name of the icon to use for this asset. (Only applies to glossaries, currently.)""" + asset_external_dq_score_value: Union[float, None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQScoreValue" + ) + """Single asset-level DQ score (0–100). Populated natively by tools that provide one.""" + + asset_external_dq_test_entities: Union[List[str], None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQTestEntities" + ) + """Ordered list of DQ test/scan names on this asset. Positionally aligned with the score metrics.""" + + asset_external_dq_test_latest_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestLatestScores") + ) + """List of scores of the most recent run for each DQ test.""" + + asset_external_dq_test_avg_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestAvgScores") + ) + """List of mean scores across all runs for each DQ test.""" + + asset_external_dq_test_min_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestMinScores") + ) + """List of minimum (floor) score across all runs for each DQ test.""" + asset_external_dq_metadata_details: Union[ Dict[str, Dict[str, Any]], None, UnsetType ] = msgspec.field(default=UNSET, name="assetExternalDQMetadataDetails") @@ -2095,6 +2096,11 @@ class AssetRelationshipAttributes(ReferenceableRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -2159,6 +2165,7 @@ class AssetNested(ReferenceableNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -2352,6 +2359,13 @@ def _populate_asset_attrs(attrs: AssetAttributes, obj: Asset) -> None: attrs.asset_soda_check_statuses = obj.asset_soda_check_statuses attrs.asset_soda_source_url = obj.asset_soda_source_url attrs.asset_icon = obj.asset_icon + attrs.asset_external_dq_score_value = obj.asset_external_dq_score_value + attrs.asset_external_dq_test_entities = obj.asset_external_dq_test_entities + attrs.asset_external_dq_test_latest_scores = ( + obj.asset_external_dq_test_latest_scores + ) + attrs.asset_external_dq_test_avg_scores = obj.asset_external_dq_test_avg_scores + attrs.asset_external_dq_test_min_scores = obj.asset_external_dq_test_min_scores attrs.asset_external_dq_metadata_details = obj.asset_external_dq_metadata_details attrs.is_partial = obj.is_partial attrs.is_ai_generated = obj.is_ai_generated @@ -2626,6 +2640,17 @@ def _extract_asset_attrs(attrs: AssetAttributes) -> dict: result["asset_soda_check_statuses"] = attrs.asset_soda_check_statuses result["asset_soda_source_url"] = attrs.asset_soda_source_url result["asset_icon"] = attrs.asset_icon + result["asset_external_dq_score_value"] = attrs.asset_external_dq_score_value + result["asset_external_dq_test_entities"] = attrs.asset_external_dq_test_entities + result["asset_external_dq_test_latest_scores"] = ( + attrs.asset_external_dq_test_latest_scores + ) + result["asset_external_dq_test_avg_scores"] = ( + attrs.asset_external_dq_test_avg_scores + ) + result["asset_external_dq_test_min_scores"] = ( + attrs.asset_external_dq_test_min_scores + ) result["asset_external_dq_metadata_details"] = ( attrs.asset_external_dq_metadata_details ) @@ -2740,9 +2765,6 @@ def _asset_to_nested(asset: Asset) -> AssetNested: is_incomplete=asset.is_incomplete, provenance_type=asset.provenance_type, home_id=asset.home_id, - depth=asset.depth, - immediate_upstream=asset.immediate_upstream, - immediate_downstream=asset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -2772,6 +2794,7 @@ def _asset_from_nested(nested: AssetNested) -> Asset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -2780,9 +2803,6 @@ def _asset_from_nested(nested: AssetNested) -> Asset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_asset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -3134,6 +3154,21 @@ def _asset_from_nested_bytes(data: bytes, serde: Serde) -> Asset: ) Asset.ASSET_SODA_SOURCE_URL = KeywordField("assetSodaSourceURL", "assetSodaSourceURL") Asset.ASSET_ICON = KeywordField("assetIcon", "assetIcon") +Asset.ASSET_EXTERNAL_DQ_SCORE_VALUE = NumericField( + "assetExternalDQScoreValue", "assetExternalDQScoreValue" +) +Asset.ASSET_EXTERNAL_DQ_TEST_ENTITIES = KeywordField( + "assetExternalDQTestEntities", "assetExternalDQTestEntities" +) +Asset.ASSET_EXTERNAL_DQ_TEST_LATEST_SCORES = NumericField( + "assetExternalDQTestLatestScores", "assetExternalDQTestLatestScores" +) +Asset.ASSET_EXTERNAL_DQ_TEST_AVG_SCORES = NumericField( + "assetExternalDQTestAvgScores", "assetExternalDQTestAvgScores" +) +Asset.ASSET_EXTERNAL_DQ_TEST_MIN_SCORES = NumericField( + "assetExternalDQTestMinScores", "assetExternalDQTestMinScores" +) Asset.ASSET_EXTERNAL_DQ_METADATA_DETAILS = KeywordField( "assetExternalDQMetadataDetails", "assetExternalDQMetadataDetails" ) @@ -3283,6 +3318,9 @@ def _asset_from_nested_bytes(data: bytes, serde: Serde) -> Asset: Asset.METRICS = RelationField("metrics") Asset.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Asset.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Asset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Asset.MEANINGS = RelationField("meanings") Asset.MC_MONITORS = RelationField("mcMonitors") Asset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/asset_grouping.py b/pyatlan_v9/model/assets/asset_grouping.py index bf3f21d0a..b5022ac34 100644 --- a/pyatlan_v9/model/assets/asset_grouping.py +++ b/pyatlan_v9/model/assets/asset_grouping.py @@ -37,10 +37,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .asset_grouping_related import RelatedAssetGrouping from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -78,6 +78,7 @@ class AssetGrouping(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +96,8 @@ class AssetGrouping(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AssetGrouping" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -144,6 +147,11 @@ class AssetGrouping(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -199,66 +207,6 @@ class AssetGrouping(Asset): def __post_init__(self) -> None: self.type_name = "AssetGrouping" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AssetGrouping instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AssetGrouping validation failed: {errors}") - - def minimize(self) -> "AssetGrouping": - """ - Return a minimal copy of this AssetGrouping with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AssetGrouping with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AssetGrouping instance with only the minimum required fields. - """ - self.validate() - return AssetGrouping(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAssetGrouping": - """ - Create a :class:`RelatedAssetGrouping` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAssetGrouping reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAssetGrouping(guid=self.guid) - return RelatedAssetGrouping(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -367,6 +315,11 @@ class AssetGroupingRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -455,6 +408,7 @@ class AssetGroupingNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -522,9 +476,6 @@ def _asset_grouping_to_nested(asset_grouping: AssetGrouping) -> AssetGroupingNes is_incomplete=asset_grouping.is_incomplete, provenance_type=asset_grouping.provenance_type, home_id=asset_grouping.home_id, - depth=asset_grouping.depth, - immediate_upstream=asset_grouping.immediate_upstream, - immediate_downstream=asset_grouping.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -558,6 +509,7 @@ def _asset_grouping_from_nested(nested: AssetGroupingNested) -> AssetGrouping: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -566,9 +518,6 @@ def _asset_grouping_from_nested(nested: AssetGroupingNested) -> AssetGrouping: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_asset_grouping_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -612,6 +561,9 @@ def _asset_grouping_from_nested_bytes(data: bytes, serde: Serde) -> AssetGroupin AssetGrouping.METRICS = RelationField("metrics") AssetGrouping.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AssetGrouping.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AssetGrouping.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AssetGrouping.MEANINGS = RelationField("meanings") AssetGrouping.MC_MONITORS = RelationField("mcMonitors") AssetGrouping.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/asset_grouping_collection.py b/pyatlan_v9/model/assets/asset_grouping_collection.py index b7629c0b7..3278d5ed4 100644 --- a/pyatlan_v9/model/assets/asset_grouping_collection.py +++ b/pyatlan_v9/model/assets/asset_grouping_collection.py @@ -38,13 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .asset_grouping_related import ( - RelatedAssetGroupingCollection, - RelatedAssetGroupingStrategy, -) +from .asset_grouping_related import RelatedAssetGroupingStrategy from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -83,6 +81,7 @@ class AssetGroupingCollection(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -100,6 +99,8 @@ class AssetGroupingCollection(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AssetGroupingCollection" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -154,6 +155,11 @@ class AssetGroupingCollection(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -215,74 +221,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AssetGroupingCollection instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.asset_grouping_strategy is UNSET: - errors.append("asset_grouping_strategy is required for creation") - if errors: - raise ValueError(f"AssetGroupingCollection validation failed: {errors}") - - def minimize(self) -> "AssetGroupingCollection": - """ - Return a minimal copy of this AssetGroupingCollection with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AssetGroupingCollection with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AssetGroupingCollection instance with only the minimum required fields. - """ - self.validate() - return AssetGroupingCollection( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedAssetGroupingCollection": - """ - Create a :class:`RelatedAssetGroupingCollection` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAssetGroupingCollection reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAssetGroupingCollection(guid=self.guid) - return RelatedAssetGroupingCollection(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -398,6 +336,11 @@ class AssetGroupingCollectionRelationshipAttributes(AssetRelationshipAttributes) ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -487,6 +430,7 @@ class AssetGroupingCollectionNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -560,9 +504,6 @@ def _asset_grouping_collection_to_nested( is_incomplete=asset_grouping_collection.is_incomplete, provenance_type=asset_grouping_collection.provenance_type, home_id=asset_grouping_collection.home_id, - depth=asset_grouping_collection.depth, - immediate_upstream=asset_grouping_collection.immediate_upstream, - immediate_downstream=asset_grouping_collection.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -598,6 +539,7 @@ def _asset_grouping_collection_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -606,9 +548,6 @@ def _asset_grouping_collection_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_asset_grouping_collection_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -667,6 +606,9 @@ def _asset_grouping_collection_from_nested_bytes( AssetGroupingCollection.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +AssetGroupingCollection.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AssetGroupingCollection.MEANINGS = RelationField("meanings") AssetGroupingCollection.MC_MONITORS = RelationField("mcMonitors") AssetGroupingCollection.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/asset_grouping_strategy.py b/pyatlan_v9/model/assets/asset_grouping_strategy.py index 6b090949e..66d1cb231 100644 --- a/pyatlan_v9/model/assets/asset_grouping_strategy.py +++ b/pyatlan_v9/model/assets/asset_grouping_strategy.py @@ -37,13 +37,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .asset_grouping_related import ( - RelatedAssetGroupingCollection, - RelatedAssetGroupingStrategy, -) +from .asset_grouping_related import RelatedAssetGroupingCollection from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -82,6 +80,7 @@ class AssetGroupingStrategy(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -99,6 +98,8 @@ class AssetGroupingStrategy(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AssetGroupingStrategy" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -153,6 +154,11 @@ class AssetGroupingStrategy(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -208,66 +214,6 @@ class AssetGroupingStrategy(Asset): def __post_init__(self) -> None: self.type_name = "AssetGroupingStrategy" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AssetGroupingStrategy instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AssetGroupingStrategy validation failed: {errors}") - - def minimize(self) -> "AssetGroupingStrategy": - """ - Return a minimal copy of this AssetGroupingStrategy with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AssetGroupingStrategy with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AssetGroupingStrategy instance with only the minimum required fields. - """ - self.validate() - return AssetGroupingStrategy(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAssetGroupingStrategy": - """ - Create a :class:`RelatedAssetGroupingStrategy` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAssetGroupingStrategy reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAssetGroupingStrategy(guid=self.guid) - return RelatedAssetGroupingStrategy(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -383,6 +329,11 @@ class AssetGroupingStrategyRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -472,6 +423,7 @@ class AssetGroupingStrategyNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -545,9 +497,6 @@ def _asset_grouping_strategy_to_nested( is_incomplete=asset_grouping_strategy.is_incomplete, provenance_type=asset_grouping_strategy.provenance_type, home_id=asset_grouping_strategy.home_id, - depth=asset_grouping_strategy.depth, - immediate_upstream=asset_grouping_strategy.immediate_upstream, - immediate_downstream=asset_grouping_strategy.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -583,6 +532,7 @@ def _asset_grouping_strategy_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -591,9 +541,6 @@ def _asset_grouping_strategy_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_asset_grouping_strategy_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -652,6 +599,9 @@ def _asset_grouping_strategy_from_nested_bytes( AssetGroupingStrategy.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +AssetGroupingStrategy.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AssetGroupingStrategy.MEANINGS = RelationField("meanings") AssetGroupingStrategy.MC_MONITORS = RelationField("mcMonitors") AssetGroupingStrategy.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/asset_related.py b/pyatlan_v9/model/assets/asset_related.py index b73f57182..5ea8be06f 100644 --- a/pyatlan_v9/model/assets/asset_related.py +++ b/pyatlan_v9/model/assets/asset_related.py @@ -480,6 +480,31 @@ class RelatedAsset(RelatedReferenceable): asset_icon: Union[str, None, UnsetType] = UNSET """Name of the icon to use for this asset. (Only applies to glossaries, currently.)""" + asset_external_dq_score_value: Union[float, None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQScoreValue" + ) + """Single asset-level DQ score (0–100). Populated natively by tools that provide one.""" + + asset_external_dq_test_entities: Union[List[str], None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQTestEntities" + ) + """Ordered list of DQ test/scan names on this asset. Positionally aligned with the score metrics.""" + + asset_external_dq_test_latest_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestLatestScores") + ) + """List of scores of the most recent run for each DQ test.""" + + asset_external_dq_test_avg_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestAvgScores") + ) + """List of mean scores across all runs for each DQ test.""" + + asset_external_dq_test_min_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestMinScores") + ) + """List of minimum (floor) score across all runs for each DQ test.""" + asset_external_dq_metadata_details: Union[ Dict[str, Dict[str, Any]], None, UnsetType ] = msgspec.field(default=UNSET, name="assetExternalDQMetadataDetails") @@ -776,7 +801,7 @@ class RelatedIncident(RelatedAsset): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "Incident" so it serializes correctly - incident_severity: Union[str, None, UnsetType] = UNSET + asset_severity: Union[str, None, UnsetType] = UNSET """Status of this asset's severity.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/atlan_app.py b/pyatlan_v9/model/assets/atlan_app.py index 8bfed7bde..d8965928b 100644 --- a/pyatlan_v9/model/assets/atlan_app.py +++ b/pyatlan_v9/model/assets/atlan_app.py @@ -37,14 +37,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .atlan_app_related import ( - RelatedAtlanApp, - RelatedAtlanAppTool, - RelatedAtlanAppWorkflow, -) +from .atlan_app_related import RelatedAtlanAppTool, RelatedAtlanAppWorkflow from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -88,6 +85,7 @@ class AtlanApp(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +103,8 @@ class AtlanApp(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AtlanApp" + atlan_app_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the Atlan application this asset belongs to.""" @@ -172,6 +172,11 @@ class AtlanApp(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -227,66 +232,6 @@ class AtlanApp(Asset): def __post_init__(self) -> None: self.type_name = "AtlanApp" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AtlanApp instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AtlanApp validation failed: {errors}") - - def minimize(self) -> "AtlanApp": - """ - Return a minimal copy of this AtlanApp with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AtlanApp with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AtlanApp instance with only the minimum required fields. - """ - self.validate() - return AtlanApp(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAtlanApp": - """ - Create a :class:`RelatedAtlanApp` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAtlanApp reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAtlanApp(guid=self.guid) - return RelatedAtlanApp(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -413,6 +358,11 @@ class AtlanAppRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -501,6 +451,7 @@ class AtlanAppNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -574,9 +525,6 @@ def _atlan_app_to_nested(atlan_app: AtlanApp) -> AtlanAppNested: is_incomplete=atlan_app.is_incomplete, provenance_type=atlan_app.provenance_type, home_id=atlan_app.home_id, - depth=atlan_app.depth, - immediate_upstream=atlan_app.immediate_upstream, - immediate_downstream=atlan_app.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -608,6 +556,7 @@ def _atlan_app_from_nested(nested: AtlanAppNested) -> AtlanApp: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -616,9 +565,6 @@ def _atlan_app_from_nested(nested: AtlanAppNested) -> AtlanApp: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_atlan_app_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -668,6 +614,9 @@ def _atlan_app_from_nested_bytes(data: bytes, serde: Serde) -> AtlanApp: AtlanApp.METRICS = RelationField("metrics") AtlanApp.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AtlanApp.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AtlanApp.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AtlanApp.MEANINGS = RelationField("meanings") AtlanApp.MC_MONITORS = RelationField("mcMonitors") AtlanApp.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/atlan_app_deployment.py b/pyatlan_v9/model/assets/atlan_app_deployment.py index 82abb2b7b..7df94a485 100644 --- a/pyatlan_v9/model/assets/atlan_app_deployment.py +++ b/pyatlan_v9/model/assets/atlan_app_deployment.py @@ -38,14 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .atlan_app_related import ( - RelatedAtlanAppDeployment, - RelatedAtlanAppTool, - RelatedAtlanAppWorkflow, -) +from .atlan_app_related import RelatedAtlanAppTool, RelatedAtlanAppWorkflow from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -95,6 +92,7 @@ class AtlanAppDeployment(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -112,6 +110,8 @@ class AtlanAppDeployment(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AtlanAppDeployment" + atlan_app_version_id: Union[int, None, UnsetType] = UNSET """Version identifier for deployment.""" @@ -199,6 +199,11 @@ class AtlanAppDeployment(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -254,66 +259,6 @@ class AtlanAppDeployment(Asset): def __post_init__(self) -> None: self.type_name = "AtlanAppDeployment" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AtlanAppDeployment instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AtlanAppDeployment validation failed: {errors}") - - def minimize(self) -> "AtlanAppDeployment": - """ - Return a minimal copy of this AtlanAppDeployment with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AtlanAppDeployment with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AtlanAppDeployment instance with only the minimum required fields. - """ - self.validate() - return AtlanAppDeployment(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAtlanAppDeployment": - """ - Create a :class:`RelatedAtlanAppDeployment` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAtlanAppDeployment reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAtlanAppDeployment(guid=self.guid) - return RelatedAtlanAppDeployment(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -462,6 +407,11 @@ class AtlanAppDeploymentRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -552,6 +502,7 @@ class AtlanAppDeploymentNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -643,9 +594,6 @@ def _atlan_app_deployment_to_nested( is_incomplete=atlan_app_deployment.is_incomplete, provenance_type=atlan_app_deployment.provenance_type, home_id=atlan_app_deployment.home_id, - depth=atlan_app_deployment.depth, - immediate_upstream=atlan_app_deployment.immediate_upstream, - immediate_downstream=atlan_app_deployment.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -681,6 +629,7 @@ def _atlan_app_deployment_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -689,9 +638,6 @@ def _atlan_app_deployment_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_atlan_app_deployment_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -772,6 +718,9 @@ def _atlan_app_deployment_from_nested_bytes( AtlanAppDeployment.METRICS = RelationField("metrics") AtlanAppDeployment.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AtlanAppDeployment.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AtlanAppDeployment.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AtlanAppDeployment.MEANINGS = RelationField("meanings") AtlanAppDeployment.MC_MONITORS = RelationField("mcMonitors") AtlanAppDeployment.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/atlan_app_installed.py b/pyatlan_v9/model/assets/atlan_app_installed.py index bac5bbf30..88e3ff1cd 100644 --- a/pyatlan_v9/model/assets/atlan_app_installed.py +++ b/pyatlan_v9/model/assets/atlan_app_installed.py @@ -38,14 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .atlan_app_related import ( - RelatedAtlanAppInstalled, - RelatedAtlanAppTool, - RelatedAtlanAppWorkflow, -) +from .atlan_app_related import RelatedAtlanAppTool, RelatedAtlanAppWorkflow from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -93,6 +90,7 @@ class AtlanAppInstalled(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -110,6 +108,8 @@ class AtlanAppInstalled(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AtlanAppInstalled" + atlan_app_current_version_id: Union[int, None, UnsetType] = UNSET """Current version identifier for the atlan application.""" @@ -191,6 +191,11 @@ class AtlanAppInstalled(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -246,66 +251,6 @@ class AtlanAppInstalled(Asset): def __post_init__(self) -> None: self.type_name = "AtlanAppInstalled" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AtlanAppInstalled instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AtlanAppInstalled validation failed: {errors}") - - def minimize(self) -> "AtlanAppInstalled": - """ - Return a minimal copy of this AtlanAppInstalled with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AtlanAppInstalled with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AtlanAppInstalled instance with only the minimum required fields. - """ - self.validate() - return AtlanAppInstalled(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAtlanAppInstalled": - """ - Create a :class:`RelatedAtlanAppInstalled` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAtlanAppInstalled reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAtlanAppInstalled(guid=self.guid) - return RelatedAtlanAppInstalled(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -448,6 +393,11 @@ class AtlanAppInstalledRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -538,6 +488,7 @@ class AtlanAppInstalledNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -625,9 +576,6 @@ def _atlan_app_installed_to_nested( is_incomplete=atlan_app_installed.is_incomplete, provenance_type=atlan_app_installed.provenance_type, home_id=atlan_app_installed.home_id, - depth=atlan_app_installed.depth, - immediate_upstream=atlan_app_installed.immediate_upstream, - immediate_downstream=atlan_app_installed.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -663,6 +611,7 @@ def _atlan_app_installed_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -671,9 +620,6 @@ def _atlan_app_installed_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_atlan_app_installed_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -746,6 +692,9 @@ def _atlan_app_installed_from_nested_bytes( AtlanAppInstalled.METRICS = RelationField("metrics") AtlanAppInstalled.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AtlanAppInstalled.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AtlanAppInstalled.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AtlanAppInstalled.MEANINGS = RelationField("meanings") AtlanAppInstalled.MC_MONITORS = RelationField("mcMonitors") AtlanAppInstalled.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/atlan_app_related.py b/pyatlan_v9/model/assets/atlan_app_related.py index 13605625c..80ad9c470 100644 --- a/pyatlan_v9/model/assets/atlan_app_related.py +++ b/pyatlan_v9/model/assets/atlan_app_related.py @@ -126,16 +126,16 @@ class RelatedAtlanAppTool(RelatedAtlanApp): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "AtlanAppTool" so it serializes correctly - atlan_app_tool_input_schema: Union[str, None, UnsetType] = UNSET + atlan_app_input_schema: Union[str, None, UnsetType] = UNSET """Input schema for the Atlan application tool (escaped JSON string of JSONSchema).""" - atlan_app_tool_output_schema: Union[str, None, UnsetType] = UNSET + atlan_app_output_schema: Union[str, None, UnsetType] = UNSET """Output schema for the Atlan application tool (escaped JSON string of JSONSchema).""" - atlan_app_tool_task_queue: Union[str, None, UnsetType] = UNSET + atlan_app_task_queue: Union[str, None, UnsetType] = UNSET """Name of the Temporal task queue for the Atlan application tool.""" - atlan_app_tool_category: Union[str, None, UnsetType] = UNSET + atlan_app_category: Union[str, None, UnsetType] = UNSET """Category of the tool.""" def __post_init__(self) -> None: @@ -153,25 +153,25 @@ class RelatedAtlanAppWorkflow(RelatedAtlanApp): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "AtlanAppWorkflow" so it serializes correctly - atlan_app_workflow_version: Union[str, None, UnsetType] = UNSET + atlan_app_version: Union[str, None, UnsetType] = UNSET """Version of the workflow.""" - atlan_app_workflow_slug: Union[str, None, UnsetType] = UNSET + atlan_app_slug: Union[str, None, UnsetType] = UNSET """Slug of the workflow.""" - atlan_app_workflow_dag: Union[str, None, UnsetType] = UNSET + atlan_app_dag: Union[str, None, UnsetType] = UNSET """Map of all activity steps for the workflow (escaped JSON string).""" - atlan_app_workflow_status: Union[str, None, UnsetType] = UNSET + atlan_app_status: Union[str, None, UnsetType] = UNSET """Status of the workflow.""" - atlan_app_workflow_error_handling: Union[Dict[str, Any], None, UnsetType] = UNSET + atlan_app_error_handling: Union[Dict[str, Any], None, UnsetType] = UNSET """Error handling strategy for the workflow.""" - atlan_app_workflow_ownership: Union[str, None, UnsetType] = UNSET + atlan_app_ownership: Union[str, None, UnsetType] = UNSET """Ownership type of the workflow, indicating whether it is managed by Atlan or by a user.""" - atlan_app_workflow_triggers: Union[str, None, UnsetType] = UNSET + atlan_app_triggers: Union[str, None, UnsetType] = UNSET """Triggers configured for this workflow (escaped JSON string).""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/atlan_app_tool.py b/pyatlan_v9/model/assets/atlan_app_tool.py index 31a085007..53e0c5845 100644 --- a/pyatlan_v9/model/assets/atlan_app_tool.py +++ b/pyatlan_v9/model/assets/atlan_app_tool.py @@ -46,6 +46,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -68,10 +69,10 @@ class AtlanAppTool(Asset): Instance of a tool defined in an Atlan application. """ - ATLAN_APP_TOOL_INPUT_SCHEMA: ClassVar[Any] = None - ATLAN_APP_TOOL_OUTPUT_SCHEMA: ClassVar[Any] = None - ATLAN_APP_TOOL_TASK_QUEUE: ClassVar[Any] = None - ATLAN_APP_TOOL_CATEGORY: ClassVar[Any] = None + ATLAN_APP_INPUT_SCHEMA: ClassVar[Any] = None + ATLAN_APP_OUTPUT_SCHEMA: ClassVar[Any] = None + ATLAN_APP_TASK_QUEUE: ClassVar[Any] = None + ATLAN_APP_CATEGORY: ClassVar[Any] = None ATLAN_APP_QUALIFIED_NAME: ClassVar[Any] = None ATLAN_APP_NAME: ClassVar[Any] = None ATLAN_APP_METADATA: ClassVar[Any] = None @@ -94,6 +95,7 @@ class AtlanAppTool(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -111,16 +113,18 @@ class AtlanAppTool(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - atlan_app_tool_input_schema: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "AtlanAppTool" + + atlan_app_input_schema: Union[str, None, UnsetType] = UNSET """Input schema for the Atlan application tool (escaped JSON string of JSONSchema).""" - atlan_app_tool_output_schema: Union[str, None, UnsetType] = UNSET + atlan_app_output_schema: Union[str, None, UnsetType] = UNSET """Output schema for the Atlan application tool (escaped JSON string of JSONSchema).""" - atlan_app_tool_task_queue: Union[str, None, UnsetType] = UNSET + atlan_app_task_queue: Union[str, None, UnsetType] = UNSET """Name of the Temporal task queue for the Atlan application tool.""" - atlan_app_tool_category: Union[str, None, UnsetType] = UNSET + atlan_app_category: Union[str, None, UnsetType] = UNSET """Category of the tool.""" atlan_app_qualified_name: Union[str, None, UnsetType] = UNSET @@ -193,6 +197,11 @@ class AtlanAppTool(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -254,74 +263,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AtlanAppTool instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.atlan_app_name is UNSET: - errors.append("atlan_app_name is required for creation") - if self.atlan_app_qualified_name is UNSET: - errors.append("atlan_app_qualified_name is required for creation") - if errors: - raise ValueError(f"AtlanAppTool validation failed: {errors}") - - def minimize(self) -> "AtlanAppTool": - """ - Return a minimal copy of this AtlanAppTool with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AtlanAppTool with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AtlanAppTool instance with only the minimum required fields. - """ - self.validate() - return AtlanAppTool(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAtlanAppTool": - """ - Create a :class:`RelatedAtlanAppTool` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAtlanAppTool reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAtlanAppTool(guid=self.guid) - return RelatedAtlanAppTool(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -377,16 +318,16 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> AtlanAppToo class AtlanAppToolAttributes(AssetAttributes): """AtlanAppTool-specific attributes for nested API format.""" - atlan_app_tool_input_schema: Union[str, None, UnsetType] = UNSET + atlan_app_input_schema: Union[str, None, UnsetType] = UNSET """Input schema for the Atlan application tool (escaped JSON string of JSONSchema).""" - atlan_app_tool_output_schema: Union[str, None, UnsetType] = UNSET + atlan_app_output_schema: Union[str, None, UnsetType] = UNSET """Output schema for the Atlan application tool (escaped JSON string of JSONSchema).""" - atlan_app_tool_task_queue: Union[str, None, UnsetType] = UNSET + atlan_app_task_queue: Union[str, None, UnsetType] = UNSET """Name of the Temporal task queue for the Atlan application tool.""" - atlan_app_tool_category: Union[str, None, UnsetType] = UNSET + atlan_app_category: Union[str, None, UnsetType] = UNSET """Category of the tool.""" atlan_app_qualified_name: Union[str, None, UnsetType] = UNSET @@ -463,6 +404,11 @@ class AtlanAppToolRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -554,6 +500,7 @@ class AtlanAppToolNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -578,10 +525,10 @@ def _populate_atlan_app_tool_attrs( ) -> None: """Populate AtlanAppTool-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.atlan_app_tool_input_schema = obj.atlan_app_tool_input_schema - attrs.atlan_app_tool_output_schema = obj.atlan_app_tool_output_schema - attrs.atlan_app_tool_task_queue = obj.atlan_app_tool_task_queue - attrs.atlan_app_tool_category = obj.atlan_app_tool_category + attrs.atlan_app_input_schema = obj.atlan_app_input_schema + attrs.atlan_app_output_schema = obj.atlan_app_output_schema + attrs.atlan_app_task_queue = obj.atlan_app_task_queue + attrs.atlan_app_category = obj.atlan_app_category attrs.atlan_app_qualified_name = obj.atlan_app_qualified_name attrs.atlan_app_name = obj.atlan_app_name attrs.atlan_app_metadata = obj.atlan_app_metadata @@ -592,10 +539,10 @@ def _populate_atlan_app_tool_attrs( def _extract_atlan_app_tool_attrs(attrs: AtlanAppToolAttributes) -> dict: """Extract all AtlanAppTool attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["atlan_app_tool_input_schema"] = attrs.atlan_app_tool_input_schema - result["atlan_app_tool_output_schema"] = attrs.atlan_app_tool_output_schema - result["atlan_app_tool_task_queue"] = attrs.atlan_app_tool_task_queue - result["atlan_app_tool_category"] = attrs.atlan_app_tool_category + result["atlan_app_input_schema"] = attrs.atlan_app_input_schema + result["atlan_app_output_schema"] = attrs.atlan_app_output_schema + result["atlan_app_task_queue"] = attrs.atlan_app_task_queue + result["atlan_app_category"] = attrs.atlan_app_category result["atlan_app_qualified_name"] = attrs.atlan_app_qualified_name result["atlan_app_name"] = attrs.atlan_app_name result["atlan_app_metadata"] = attrs.atlan_app_metadata @@ -637,9 +584,6 @@ def _atlan_app_tool_to_nested(atlan_app_tool: AtlanAppTool) -> AtlanAppToolNeste is_incomplete=atlan_app_tool.is_incomplete, provenance_type=atlan_app_tool.provenance_type, home_id=atlan_app_tool.home_id, - depth=atlan_app_tool.depth, - immediate_upstream=atlan_app_tool.immediate_upstream, - immediate_downstream=atlan_app_tool.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -673,6 +617,7 @@ def _atlan_app_tool_from_nested(nested: AtlanAppToolNested) -> AtlanAppTool: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -681,9 +626,6 @@ def _atlan_app_tool_from_nested(nested: AtlanAppToolNested) -> AtlanAppTool: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_atlan_app_tool_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -712,18 +654,16 @@ def _atlan_app_tool_from_nested_bytes(data: bytes, serde: Serde) -> AtlanAppTool TextField, ) -AtlanAppTool.ATLAN_APP_TOOL_INPUT_SCHEMA = TextField( - "atlanAppToolInputSchema", "atlanAppToolInputSchema" -) -AtlanAppTool.ATLAN_APP_TOOL_OUTPUT_SCHEMA = TextField( - "atlanAppToolOutputSchema", "atlanAppToolOutputSchema" +AtlanAppTool.ATLAN_APP_INPUT_SCHEMA = TextField( + "atlanAppInputSchema", "atlanAppInputSchema" ) -AtlanAppTool.ATLAN_APP_TOOL_TASK_QUEUE = KeywordField( - "atlanAppToolTaskQueue", "atlanAppToolTaskQueue" +AtlanAppTool.ATLAN_APP_OUTPUT_SCHEMA = TextField( + "atlanAppOutputSchema", "atlanAppOutputSchema" ) -AtlanAppTool.ATLAN_APP_TOOL_CATEGORY = KeywordField( - "atlanAppToolCategory", "atlanAppToolCategory" +AtlanAppTool.ATLAN_APP_TASK_QUEUE = KeywordField( + "atlanAppTaskQueue", "atlanAppTaskQueue" ) +AtlanAppTool.ATLAN_APP_CATEGORY = KeywordField("atlanAppCategory", "atlanAppCategory") AtlanAppTool.ATLAN_APP_QUALIFIED_NAME = KeywordField( "atlanAppQualifiedName", "atlanAppQualifiedName" ) @@ -752,6 +692,9 @@ def _atlan_app_tool_from_nested_bytes(data: bytes, serde: Serde) -> AtlanAppTool AtlanAppTool.METRICS = RelationField("metrics") AtlanAppTool.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AtlanAppTool.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AtlanAppTool.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AtlanAppTool.MEANINGS = RelationField("meanings") AtlanAppTool.MC_MONITORS = RelationField("mcMonitors") AtlanAppTool.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/atlan_app_workflow.py b/pyatlan_v9/model/assets/atlan_app_workflow.py index 0604550d3..398665ade 100644 --- a/pyatlan_v9/model/assets/atlan_app_workflow.py +++ b/pyatlan_v9/model/assets/atlan_app_workflow.py @@ -47,6 +47,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -69,13 +70,13 @@ class AtlanAppWorkflow(Asset): Instance of a workflow in an Atlan application. """ - ATLAN_APP_WORKFLOW_VERSION: ClassVar[Any] = None - ATLAN_APP_WORKFLOW_SLUG: ClassVar[Any] = None - ATLAN_APP_WORKFLOW_DAG: ClassVar[Any] = None - ATLAN_APP_WORKFLOW_STATUS: ClassVar[Any] = None - ATLAN_APP_WORKFLOW_ERROR_HANDLING: ClassVar[Any] = None - ATLAN_APP_WORKFLOW_OWNERSHIP: ClassVar[Any] = None - ATLAN_APP_WORKFLOW_TRIGGERS: ClassVar[Any] = None + ATLAN_APP_VERSION: ClassVar[Any] = None + ATLAN_APP_SLUG: ClassVar[Any] = None + ATLAN_APP_DAG: ClassVar[Any] = None + ATLAN_APP_STATUS: ClassVar[Any] = None + ATLAN_APP_ERROR_HANDLING: ClassVar[Any] = None + ATLAN_APP_OWNERSHIP: ClassVar[Any] = None + ATLAN_APP_TRIGGERS: ClassVar[Any] = None ATLAN_APP_QUALIFIED_NAME: ClassVar[Any] = None ATLAN_APP_NAME: ClassVar[Any] = None ATLAN_APP_METADATA: ClassVar[Any] = None @@ -99,6 +100,7 @@ class AtlanAppWorkflow(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -116,25 +118,27 @@ class AtlanAppWorkflow(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - atlan_app_workflow_version: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "AtlanAppWorkflow" + + atlan_app_version: Union[str, None, UnsetType] = UNSET """Version of the workflow.""" - atlan_app_workflow_slug: Union[str, None, UnsetType] = UNSET + atlan_app_slug: Union[str, None, UnsetType] = UNSET """Slug of the workflow.""" - atlan_app_workflow_dag: Union[str, None, UnsetType] = UNSET + atlan_app_dag: Union[str, None, UnsetType] = UNSET """Map of all activity steps for the workflow (escaped JSON string).""" - atlan_app_workflow_status: Union[str, None, UnsetType] = UNSET + atlan_app_status: Union[str, None, UnsetType] = UNSET """Status of the workflow.""" - atlan_app_workflow_error_handling: Union[Dict[str, Any], None, UnsetType] = UNSET + atlan_app_error_handling: Union[Dict[str, Any], None, UnsetType] = UNSET """Error handling strategy for the workflow.""" - atlan_app_workflow_ownership: Union[str, None, UnsetType] = UNSET + atlan_app_ownership: Union[str, None, UnsetType] = UNSET """Ownership type of the workflow, indicating whether it is managed by Atlan or by a user.""" - atlan_app_workflow_triggers: Union[str, None, UnsetType] = UNSET + atlan_app_triggers: Union[str, None, UnsetType] = UNSET """Triggers configured for this workflow (escaped JSON string).""" atlan_app_qualified_name: Union[str, None, UnsetType] = UNSET @@ -210,6 +214,11 @@ class AtlanAppWorkflow(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -271,74 +280,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AtlanAppWorkflow instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.atlan_app_name is UNSET: - errors.append("atlan_app_name is required for creation") - if self.atlan_app_qualified_name is UNSET: - errors.append("atlan_app_qualified_name is required for creation") - if errors: - raise ValueError(f"AtlanAppWorkflow validation failed: {errors}") - - def minimize(self) -> "AtlanAppWorkflow": - """ - Return a minimal copy of this AtlanAppWorkflow with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AtlanAppWorkflow with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AtlanAppWorkflow instance with only the minimum required fields. - """ - self.validate() - return AtlanAppWorkflow(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAtlanAppWorkflow": - """ - Create a :class:`RelatedAtlanAppWorkflow` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAtlanAppWorkflow reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAtlanAppWorkflow(guid=self.guid) - return RelatedAtlanAppWorkflow(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -396,25 +337,25 @@ def from_json( class AtlanAppWorkflowAttributes(AssetAttributes): """AtlanAppWorkflow-specific attributes for nested API format.""" - atlan_app_workflow_version: Union[str, None, UnsetType] = UNSET + atlan_app_version: Union[str, None, UnsetType] = UNSET """Version of the workflow.""" - atlan_app_workflow_slug: Union[str, None, UnsetType] = UNSET + atlan_app_slug: Union[str, None, UnsetType] = UNSET """Slug of the workflow.""" - atlan_app_workflow_dag: Union[str, None, UnsetType] = UNSET + atlan_app_dag: Union[str, None, UnsetType] = UNSET """Map of all activity steps for the workflow (escaped JSON string).""" - atlan_app_workflow_status: Union[str, None, UnsetType] = UNSET + atlan_app_status: Union[str, None, UnsetType] = UNSET """Status of the workflow.""" - atlan_app_workflow_error_handling: Union[Dict[str, Any], None, UnsetType] = UNSET + atlan_app_error_handling: Union[Dict[str, Any], None, UnsetType] = UNSET """Error handling strategy for the workflow.""" - atlan_app_workflow_ownership: Union[str, None, UnsetType] = UNSET + atlan_app_ownership: Union[str, None, UnsetType] = UNSET """Ownership type of the workflow, indicating whether it is managed by Atlan or by a user.""" - atlan_app_workflow_triggers: Union[str, None, UnsetType] = UNSET + atlan_app_triggers: Union[str, None, UnsetType] = UNSET """Triggers configured for this workflow (escaped JSON string).""" atlan_app_qualified_name: Union[str, None, UnsetType] = UNSET @@ -494,6 +435,11 @@ class AtlanAppWorkflowRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -586,6 +532,7 @@ class AtlanAppWorkflowNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -610,13 +557,13 @@ def _populate_atlan_app_workflow_attrs( ) -> None: """Populate AtlanAppWorkflow-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.atlan_app_workflow_version = obj.atlan_app_workflow_version - attrs.atlan_app_workflow_slug = obj.atlan_app_workflow_slug - attrs.atlan_app_workflow_dag = obj.atlan_app_workflow_dag - attrs.atlan_app_workflow_status = obj.atlan_app_workflow_status - attrs.atlan_app_workflow_error_handling = obj.atlan_app_workflow_error_handling - attrs.atlan_app_workflow_ownership = obj.atlan_app_workflow_ownership - attrs.atlan_app_workflow_triggers = obj.atlan_app_workflow_triggers + attrs.atlan_app_version = obj.atlan_app_version + attrs.atlan_app_slug = obj.atlan_app_slug + attrs.atlan_app_dag = obj.atlan_app_dag + attrs.atlan_app_status = obj.atlan_app_status + attrs.atlan_app_error_handling = obj.atlan_app_error_handling + attrs.atlan_app_ownership = obj.atlan_app_ownership + attrs.atlan_app_triggers = obj.atlan_app_triggers attrs.atlan_app_qualified_name = obj.atlan_app_qualified_name attrs.atlan_app_name = obj.atlan_app_name attrs.atlan_app_metadata = obj.atlan_app_metadata @@ -627,15 +574,13 @@ def _populate_atlan_app_workflow_attrs( def _extract_atlan_app_workflow_attrs(attrs: AtlanAppWorkflowAttributes) -> dict: """Extract all AtlanAppWorkflow attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["atlan_app_workflow_version"] = attrs.atlan_app_workflow_version - result["atlan_app_workflow_slug"] = attrs.atlan_app_workflow_slug - result["atlan_app_workflow_dag"] = attrs.atlan_app_workflow_dag - result["atlan_app_workflow_status"] = attrs.atlan_app_workflow_status - result["atlan_app_workflow_error_handling"] = ( - attrs.atlan_app_workflow_error_handling - ) - result["atlan_app_workflow_ownership"] = attrs.atlan_app_workflow_ownership - result["atlan_app_workflow_triggers"] = attrs.atlan_app_workflow_triggers + result["atlan_app_version"] = attrs.atlan_app_version + result["atlan_app_slug"] = attrs.atlan_app_slug + result["atlan_app_dag"] = attrs.atlan_app_dag + result["atlan_app_status"] = attrs.atlan_app_status + result["atlan_app_error_handling"] = attrs.atlan_app_error_handling + result["atlan_app_ownership"] = attrs.atlan_app_ownership + result["atlan_app_triggers"] = attrs.atlan_app_triggers result["atlan_app_qualified_name"] = attrs.atlan_app_qualified_name result["atlan_app_name"] = attrs.atlan_app_name result["atlan_app_metadata"] = attrs.atlan_app_metadata @@ -681,9 +626,6 @@ def _atlan_app_workflow_to_nested( is_incomplete=atlan_app_workflow.is_incomplete, provenance_type=atlan_app_workflow.provenance_type, home_id=atlan_app_workflow.home_id, - depth=atlan_app_workflow.depth, - immediate_upstream=atlan_app_workflow.immediate_upstream, - immediate_downstream=atlan_app_workflow.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -717,6 +659,7 @@ def _atlan_app_workflow_from_nested(nested: AtlanAppWorkflowNested) -> AtlanAppW updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -725,9 +668,6 @@ def _atlan_app_workflow_from_nested(nested: AtlanAppWorkflowNested) -> AtlanAppW is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_atlan_app_workflow_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -758,27 +698,17 @@ def _atlan_app_workflow_from_nested_bytes( TextField, ) -AtlanAppWorkflow.ATLAN_APP_WORKFLOW_VERSION = KeywordField( - "atlanAppWorkflowVersion", "atlanAppWorkflowVersion" -) -AtlanAppWorkflow.ATLAN_APP_WORKFLOW_SLUG = KeywordField( - "atlanAppWorkflowSlug", "atlanAppWorkflowSlug" -) -AtlanAppWorkflow.ATLAN_APP_WORKFLOW_DAG = TextField( - "atlanAppWorkflowDag", "atlanAppWorkflowDag" -) -AtlanAppWorkflow.ATLAN_APP_WORKFLOW_STATUS = KeywordField( - "atlanAppWorkflowStatus", "atlanAppWorkflowStatus" +AtlanAppWorkflow.ATLAN_APP_VERSION = KeywordField("atlanAppVersion", "atlanAppVersion") +AtlanAppWorkflow.ATLAN_APP_SLUG = KeywordField("atlanAppSlug", "atlanAppSlug") +AtlanAppWorkflow.ATLAN_APP_DAG = TextField("atlanAppDag", "atlanAppDag") +AtlanAppWorkflow.ATLAN_APP_STATUS = KeywordField("atlanAppStatus", "atlanAppStatus") +AtlanAppWorkflow.ATLAN_APP_ERROR_HANDLING = KeywordField( + "atlanAppErrorHandling", "atlanAppErrorHandling" ) -AtlanAppWorkflow.ATLAN_APP_WORKFLOW_ERROR_HANDLING = KeywordField( - "atlanAppWorkflowErrorHandling", "atlanAppWorkflowErrorHandling" -) -AtlanAppWorkflow.ATLAN_APP_WORKFLOW_OWNERSHIP = KeywordField( - "atlanAppWorkflowOwnership", "atlanAppWorkflowOwnership" -) -AtlanAppWorkflow.ATLAN_APP_WORKFLOW_TRIGGERS = TextField( - "atlanAppWorkflowTriggers", "atlanAppWorkflowTriggers" +AtlanAppWorkflow.ATLAN_APP_OWNERSHIP = KeywordField( + "atlanAppOwnership", "atlanAppOwnership" ) +AtlanAppWorkflow.ATLAN_APP_TRIGGERS = TextField("atlanAppTriggers", "atlanAppTriggers") AtlanAppWorkflow.ATLAN_APP_QUALIFIED_NAME = KeywordField( "atlanAppQualifiedName", "atlanAppQualifiedName" ) @@ -810,6 +740,9 @@ def _atlan_app_workflow_from_nested_bytes( AtlanAppWorkflow.METRICS = RelationField("metrics") AtlanAppWorkflow.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AtlanAppWorkflow.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AtlanAppWorkflow.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AtlanAppWorkflow.MEANINGS = RelationField("meanings") AtlanAppWorkflow.MC_MONITORS = RelationField("mcMonitors") AtlanAppWorkflow.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/atlas_glossary.py b/pyatlan_v9/model/assets/atlas_glossary.py index facf603ea..b42bf516c 100644 --- a/pyatlan_v9/model/assets/atlas_glossary.py +++ b/pyatlan_v9/model/assets/atlas_glossary.py @@ -38,13 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) +from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .gtc_related import ( - RelatedAtlasGlossary, - RelatedAtlasGlossaryCategory, - RelatedAtlasGlossaryTerm, -) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType +from .gtc_related import RelatedAtlasGlossaryCategory, RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -71,11 +69,14 @@ class AtlasGlossary(Asset): ANOMALO_CHECKS: ClassVar[Any] = None APPLICATION: ClassVar[Any] = None APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None TERMS: ClassVar[Any] = None CATEGORIES: ClassVar[Any] = None @@ -89,6 +90,8 @@ class AtlasGlossary(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AtlasGlossary" + short_description: Union[str, None, UnsetType] = UNSET """Unused. A short definition of the glossary. See 'description' and 'userDescription' instead.""" @@ -116,6 +119,12 @@ class AtlasGlossary(Asset): application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET """ApplicationField owning the Asset.""" + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET """Data products for which this asset is an output port.""" @@ -133,6 +142,11 @@ class AtlasGlossary(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -168,7 +182,7 @@ class AtlasGlossary(Asset): schema_registry_subjects: Union[ List[RelatedSchemaRegistrySubject], None, UnsetType ] = UNSET - """""" + """Schema registry subjects associated with this asset.""" soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET """""" @@ -176,70 +190,6 @@ class AtlasGlossary(Asset): def __post_init__(self) -> None: self.type_name = "AtlasGlossary" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AtlasGlossary instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if self.guid is UNSET: - errors.append("guid is required") - if errors: - raise ValueError(f"AtlasGlossary validation failed: {errors}") - - def minimize(self) -> "AtlasGlossary": - """ - Return a minimal copy of this AtlasGlossary with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AtlasGlossary with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AtlasGlossary instance with only the minimum required fields. - """ - self.validate() - return AtlasGlossary( - guid=self.guid, name=self.name, qualified_name=self.qualified_name - ) - - def relate(self) -> "RelatedAtlasGlossary": - """ - Create a :class:`RelatedAtlasGlossary` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAtlasGlossary reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAtlasGlossary(guid=self.guid) - return RelatedAtlasGlossary(qualified_name=self.qualified_name) - @classmethod @init_guid def creator(cls, *, name: str) -> "AtlasGlossary": @@ -363,6 +313,12 @@ class AtlasGlossaryRelationshipAttributes(AssetRelationshipAttributes): application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET """ApplicationField owning the Asset.""" + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET """Data products for which this asset is an output port.""" @@ -380,6 +336,11 @@ class AtlasGlossaryRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -415,7 +376,7 @@ class AtlasGlossaryRelationshipAttributes(AssetRelationshipAttributes): schema_registry_subjects: Union[ List[RelatedSchemaRegistrySubject], None, UnsetType ] = UNSET - """""" + """Schema registry subjects associated with this asset.""" soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET """""" @@ -445,11 +406,14 @@ class AtlasGlossaryNested(AssetNested): "anomalo_checks", "application", "application_field", + "data_contract_latest", + "data_contract_latest_certified", "output_port_data_products", "input_port_data_products", "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "terms", "categories", @@ -523,9 +487,6 @@ def _atlas_glossary_to_nested(atlas_glossary: AtlasGlossary) -> AtlasGlossaryNes is_incomplete=atlas_glossary.is_incomplete, provenance_type=atlas_glossary.provenance_type, home_id=atlas_glossary.home_id, - depth=atlas_glossary.depth, - immediate_upstream=atlas_glossary.immediate_upstream, - immediate_downstream=atlas_glossary.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -559,6 +520,7 @@ def _atlas_glossary_from_nested(nested: AtlasGlossaryNested) -> AtlasGlossary: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -567,9 +529,6 @@ def _atlas_glossary_from_nested(nested: AtlasGlossaryNested) -> AtlasGlossary: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_atlas_glossary_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -605,11 +564,18 @@ def _atlas_glossary_from_nested_bytes(data: bytes, serde: Serde) -> AtlasGlossar AtlasGlossary.ANOMALO_CHECKS = RelationField("anomaloChecks") AtlasGlossary.APPLICATION = RelationField("application") AtlasGlossary.APPLICATION_FIELD = RelationField("applicationField") +AtlasGlossary.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +AtlasGlossary.DATA_CONTRACT_LATEST_CERTIFIED = RelationField( + "dataContractLatestCertified" +) AtlasGlossary.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") AtlasGlossary.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") AtlasGlossary.METRICS = RelationField("metrics") AtlasGlossary.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AtlasGlossary.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AtlasGlossary.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AtlasGlossary.MEANINGS = RelationField("meanings") AtlasGlossary.TERMS = RelationField("terms") AtlasGlossary.CATEGORIES = RelationField("categories") diff --git a/pyatlan_v9/model/assets/atlas_glossary_category.py b/pyatlan_v9/model/assets/atlas_glossary_category.py index 2c1abcfb4..3a1a987df 100644 --- a/pyatlan_v9/model/assets/atlas_glossary_category.py +++ b/pyatlan_v9/model/assets/atlas_glossary_category.py @@ -38,8 +38,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) +from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import ( RelatedAtlasGlossary, RelatedAtlasGlossaryCategory, @@ -66,17 +68,20 @@ class AtlasGlossaryCategory(Asset): LONG_DESCRIPTION: ClassVar[Any] = None ADDITIONAL_ATTRIBUTES: ClassVar[Any] = None CATEGORY_TYPE: ClassVar[Any] = None - ANCHOR: ClassVar[Any] = None ANOMALO_CHECKS: ClassVar[Any] = None APPLICATION: ClassVar[Any] = None APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None TERMS: ClassVar[Any] = None + ANCHOR: ClassVar[Any] = None CHILDREN_CATEGORIES: ClassVar[Any] = None PARENT_CATEGORY: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -89,6 +94,8 @@ class AtlasGlossaryCategory(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AtlasGlossaryCategory" + short_description: Union[str, None, UnsetType] = UNSET """Unused. Brief summary of the category. See 'description' and 'userDescription' instead.""" @@ -101,9 +108,6 @@ class AtlasGlossaryCategory(Asset): category_type: Union[str, None, UnsetType] = UNSET """""" - anchor: Union[RelatedAtlasGlossary, None, UnsetType] = UNSET - """Glossary in which this category is contained.""" - anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET """Checks that run on this asset.""" @@ -113,6 +117,12 @@ class AtlasGlossaryCategory(Asset): application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET """ApplicationField owning the Asset.""" + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET """Data products for which this asset is an output port.""" @@ -130,12 +140,20 @@ class AtlasGlossaryCategory(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" terms: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Terms organized within this category.""" + anchor: Union[RelatedAtlasGlossary, None, UnsetType] = UNSET + """Glossary in which this category is contained.""" + children_categories: Union[List[RelatedAtlasGlossaryCategory], None, UnsetType] = ( UNSET ) @@ -170,7 +188,7 @@ class AtlasGlossaryCategory(Asset): schema_registry_subjects: Union[ List[RelatedSchemaRegistrySubject], None, UnsetType ] = UNSET - """""" + """Schema registry subjects associated with this asset.""" soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET """""" @@ -178,71 +196,6 @@ class AtlasGlossaryCategory(Asset): def __post_init__(self) -> None: self.type_name = "AtlasGlossaryCategory" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AtlasGlossaryCategory instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.anchor is UNSET: - errors.append("anchor is required for creation") - if errors: - raise ValueError(f"AtlasGlossaryCategory validation failed: {errors}") - - def minimize(self) -> "AtlasGlossaryCategory": - """ - Return a minimal copy of this AtlasGlossaryCategory with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AtlasGlossaryCategory with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AtlasGlossaryCategory instance with only the minimum required fields. - """ - self.validate() - return AtlasGlossaryCategory( - qualified_name=self.qualified_name, name=self.name, anchor=self.anchor - ) - - def relate(self) -> "RelatedAtlasGlossaryCategory": - """ - Create a :class:`RelatedAtlasGlossaryCategory` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAtlasGlossaryCategory reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAtlasGlossaryCategory(guid=self.guid) - return RelatedAtlasGlossaryCategory(qualified_name=self.qualified_name) - @classmethod def can_be_archived(cls) -> bool: return False @@ -478,9 +431,6 @@ class AtlasGlossaryCategoryAttributes(AssetAttributes): category_type: Union[str, None, UnsetType] = UNSET """""" - anchor: Union[RelatedAtlasGlossary, None, UnsetType] = UNSET - """Glossary in which this category is contained.""" - class AtlasGlossaryCategoryRelationshipAttributes(AssetRelationshipAttributes): """AtlasGlossaryCategory-specific relationship attributes for nested API format.""" @@ -494,6 +444,12 @@ class AtlasGlossaryCategoryRelationshipAttributes(AssetRelationshipAttributes): application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET """ApplicationField owning the Asset.""" + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET """Data products for which this asset is an output port.""" @@ -511,12 +467,20 @@ class AtlasGlossaryCategoryRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" terms: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Terms organized within this category.""" + anchor: Union[RelatedAtlasGlossary, None, UnsetType] = UNSET + """Glossary in which this category is contained.""" + children_categories: Union[List[RelatedAtlasGlossaryCategory], None, UnsetType] = ( UNSET ) @@ -551,7 +515,7 @@ class AtlasGlossaryCategoryRelationshipAttributes(AssetRelationshipAttributes): schema_registry_subjects: Union[ List[RelatedSchemaRegistrySubject], None, UnsetType ] = UNSET - """""" + """Schema registry subjects associated with this asset.""" soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET """""" @@ -581,13 +545,17 @@ class AtlasGlossaryCategoryNested(AssetNested): "anomalo_checks", "application", "application_field", + "data_contract_latest", + "data_contract_latest_certified", "output_port_data_products", "input_port_data_products", "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "terms", + "anchor", "children_categories", "parent_category", "mc_monitors", @@ -611,7 +579,6 @@ def _populate_atlas_glossary_category_attrs( attrs.long_description = obj.long_description attrs.additional_attributes = obj.additional_attributes attrs.category_type = obj.category_type - attrs.anchor = obj.anchor def _extract_atlas_glossary_category_attrs( @@ -623,7 +590,6 @@ def _extract_atlas_glossary_category_attrs( result["long_description"] = attrs.long_description result["additional_attributes"] = attrs.additional_attributes result["category_type"] = attrs.category_type - result["anchor"] = attrs.anchor return result @@ -664,9 +630,6 @@ def _atlas_glossary_category_to_nested( is_incomplete=atlas_glossary_category.is_incomplete, provenance_type=atlas_glossary_category.provenance_type, home_id=atlas_glossary_category.home_id, - depth=atlas_glossary_category.depth, - immediate_upstream=atlas_glossary_category.immediate_upstream, - immediate_downstream=atlas_glossary_category.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -702,6 +665,7 @@ def _atlas_glossary_category_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -710,9 +674,6 @@ def _atlas_glossary_category_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_atlas_glossary_category_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -749,10 +710,13 @@ def _atlas_glossary_category_from_nested_bytes( "additionalAttributes", "additionalAttributes" ) AtlasGlossaryCategory.CATEGORY_TYPE = KeywordField("categoryType", "categoryType") -AtlasGlossaryCategory.ANCHOR = KeywordField("anchor", "anchor") AtlasGlossaryCategory.ANOMALO_CHECKS = RelationField("anomaloChecks") AtlasGlossaryCategory.APPLICATION = RelationField("application") AtlasGlossaryCategory.APPLICATION_FIELD = RelationField("applicationField") +AtlasGlossaryCategory.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +AtlasGlossaryCategory.DATA_CONTRACT_LATEST_CERTIFIED = RelationField( + "dataContractLatestCertified" +) AtlasGlossaryCategory.OUTPUT_PORT_DATA_PRODUCTS = RelationField( "outputPortDataProducts" ) @@ -762,8 +726,12 @@ def _atlas_glossary_category_from_nested_bytes( AtlasGlossaryCategory.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +AtlasGlossaryCategory.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AtlasGlossaryCategory.MEANINGS = RelationField("meanings") AtlasGlossaryCategory.TERMS = RelationField("terms") +AtlasGlossaryCategory.ANCHOR = RelationField("anchor") AtlasGlossaryCategory.CHILDREN_CATEGORIES = RelationField("childrenCategories") AtlasGlossaryCategory.PARENT_CATEGORY = RelationField("parentCategory") AtlasGlossaryCategory.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/atlas_glossary_term.py b/pyatlan_v9/model/assets/atlas_glossary_term.py index bbd635f64..de1c0110c 100644 --- a/pyatlan_v9/model/assets/atlas_glossary_term.py +++ b/pyatlan_v9/model/assets/atlas_glossary_term.py @@ -38,8 +38,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) +from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import ( RelatedAtlasGlossary, RelatedAtlasGlossaryCategory, @@ -69,17 +71,20 @@ class AtlasGlossaryTerm(Asset): USAGE: ClassVar[Any] = None ADDITIONAL_ATTRIBUTES: ClassVar[Any] = None TERM_TYPE: ClassVar[Any] = None - ANCHOR: ClassVar[Any] = None ANOMALO_CHECKS: ClassVar[Any] = None APPLICATION: ClassVar[Any] = None APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None ASSIGNED_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None + ANCHOR: ClassVar[Any] = None CATEGORIES: ClassVar[Any] = None SEE_ALSO: ClassVar[Any] = None SYNONYMS: ClassVar[Any] = None @@ -104,6 +109,8 @@ class AtlasGlossaryTerm(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AtlasGlossaryTerm" + short_description: Union[str, None, UnsetType] = UNSET """Unused. Brief summary of the term. See 'description' and 'userDescription' instead.""" @@ -125,9 +132,6 @@ class AtlasGlossaryTerm(Asset): term_type: Union[str, None, UnsetType] = UNSET """""" - anchor: Union[RelatedAtlasGlossary, None, UnsetType] = UNSET - """Glossary in which this term is contained.""" - anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET """Checks that run on this asset.""" @@ -137,6 +141,12 @@ class AtlasGlossaryTerm(Asset): application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET """ApplicationField owning the Asset.""" + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET """Data products for which this asset is an output port.""" @@ -154,12 +164,20 @@ class AtlasGlossaryTerm(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + assigned_entities: Union[List[RelatedReferenceable], None, UnsetType] = UNSET """Assets assigned this term.""" meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" + anchor: Union[RelatedAtlasGlossary, None, UnsetType] = UNSET + """Glossary in which this term is contained.""" + categories: Union[List[RelatedAtlasGlossaryCategory], None, UnsetType] = UNSET """Categories within which this term is organized.""" @@ -228,7 +246,7 @@ class AtlasGlossaryTerm(Asset): schema_registry_subjects: Union[ List[RelatedSchemaRegistrySubject], None, UnsetType ] = UNSET - """""" + """Schema registry subjects associated with this asset.""" soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET """""" @@ -236,71 +254,6 @@ class AtlasGlossaryTerm(Asset): def __post_init__(self) -> None: self.type_name = "AtlasGlossaryTerm" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AtlasGlossaryTerm instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.anchor is UNSET: - errors.append("anchor is required for creation") - if errors: - raise ValueError(f"AtlasGlossaryTerm validation failed: {errors}") - - def minimize(self) -> "AtlasGlossaryTerm": - """ - Return a minimal copy of this AtlasGlossaryTerm with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AtlasGlossaryTerm with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AtlasGlossaryTerm instance with only the minimum required fields. - """ - self.validate() - return AtlasGlossaryTerm( - qualified_name=self.qualified_name, name=self.name, anchor=self.anchor - ) - - def relate(self) -> "RelatedAtlasGlossaryTerm": - """ - Create a :class:`RelatedAtlasGlossaryTerm` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAtlasGlossaryTerm reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAtlasGlossaryTerm(guid=self.guid) - return RelatedAtlasGlossaryTerm(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -527,9 +480,6 @@ class AtlasGlossaryTermAttributes(AssetAttributes): term_type: Union[str, None, UnsetType] = UNSET """""" - anchor: Union[RelatedAtlasGlossary, None, UnsetType] = UNSET - """Glossary in which this term is contained.""" - class AtlasGlossaryTermRelationshipAttributes(AssetRelationshipAttributes): """AtlasGlossaryTerm-specific relationship attributes for nested API format.""" @@ -543,6 +493,12 @@ class AtlasGlossaryTermRelationshipAttributes(AssetRelationshipAttributes): application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET """ApplicationField owning the Asset.""" + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET """Data products for which this asset is an output port.""" @@ -560,12 +516,20 @@ class AtlasGlossaryTermRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + assigned_entities: Union[List[RelatedReferenceable], None, UnsetType] = UNSET """Assets assigned this term.""" meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" + anchor: Union[RelatedAtlasGlossary, None, UnsetType] = UNSET + """Glossary in which this term is contained.""" + categories: Union[List[RelatedAtlasGlossaryCategory], None, UnsetType] = UNSET """Categories within which this term is organized.""" @@ -634,7 +598,7 @@ class AtlasGlossaryTermRelationshipAttributes(AssetRelationshipAttributes): schema_registry_subjects: Union[ List[RelatedSchemaRegistrySubject], None, UnsetType ] = UNSET - """""" + """Schema registry subjects associated with this asset.""" soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET """""" @@ -664,13 +628,17 @@ class AtlasGlossaryTermNested(AssetNested): "anomalo_checks", "application", "application_field", + "data_contract_latest", + "data_contract_latest_certified", "output_port_data_products", "input_port_data_products", "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "assigned_entities", "meanings", + "anchor", "categories", "see_also", "synonyms", @@ -709,7 +677,6 @@ def _populate_atlas_glossary_term_attrs( attrs.usage = obj.usage attrs.additional_attributes = obj.additional_attributes attrs.term_type = obj.term_type - attrs.anchor = obj.anchor def _extract_atlas_glossary_term_attrs(attrs: AtlasGlossaryTermAttributes) -> dict: @@ -722,7 +689,6 @@ def _extract_atlas_glossary_term_attrs(attrs: AtlasGlossaryTermAttributes) -> di result["usage"] = attrs.usage result["additional_attributes"] = attrs.additional_attributes result["term_type"] = attrs.term_type - result["anchor"] = attrs.anchor return result @@ -763,9 +729,6 @@ def _atlas_glossary_term_to_nested( is_incomplete=atlas_glossary_term.is_incomplete, provenance_type=atlas_glossary_term.provenance_type, home_id=atlas_glossary_term.home_id, - depth=atlas_glossary_term.depth, - immediate_upstream=atlas_glossary_term.immediate_upstream, - immediate_downstream=atlas_glossary_term.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -801,6 +764,7 @@ def _atlas_glossary_term_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -809,9 +773,6 @@ def _atlas_glossary_term_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_atlas_glossary_term_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -849,17 +810,24 @@ def _atlas_glossary_term_from_nested_bytes( "additionalAttributes", "additionalAttributes" ) AtlasGlossaryTerm.TERM_TYPE = KeywordField("termType", "termType") -AtlasGlossaryTerm.ANCHOR = KeywordField("anchor", "anchor") AtlasGlossaryTerm.ANOMALO_CHECKS = RelationField("anomaloChecks") AtlasGlossaryTerm.APPLICATION = RelationField("application") AtlasGlossaryTerm.APPLICATION_FIELD = RelationField("applicationField") +AtlasGlossaryTerm.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +AtlasGlossaryTerm.DATA_CONTRACT_LATEST_CERTIFIED = RelationField( + "dataContractLatestCertified" +) AtlasGlossaryTerm.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") AtlasGlossaryTerm.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") AtlasGlossaryTerm.METRICS = RelationField("metrics") AtlasGlossaryTerm.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AtlasGlossaryTerm.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AtlasGlossaryTerm.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AtlasGlossaryTerm.ASSIGNED_ENTITIES = RelationField("assignedEntities") AtlasGlossaryTerm.MEANINGS = RelationField("meanings") +AtlasGlossaryTerm.ANCHOR = RelationField("anchor") AtlasGlossaryTerm.CATEGORIES = RelationField("categories") AtlasGlossaryTerm.SEE_ALSO = RelationField("seeAlso") AtlasGlossaryTerm.SYNONYMS = RelationField("synonyms") diff --git a/pyatlan_v9/model/assets/auth_policy.py b/pyatlan_v9/model/assets/auth_policy.py index 4ea969f63..c3e667ef4 100644 --- a/pyatlan_v9/model/assets/auth_policy.py +++ b/pyatlan_v9/model/assets/auth_policy.py @@ -1,49 +1,249 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# ruff: noqa: ARG002 # SPDX-License-Identifier: Apache-2.0 -# Copyright 2026 Atlan Pte. Ltd. +# Copyright 2024 Atlan Pte. Ltd. -"""AuthPolicy asset model for pyatlan_v9.""" +""" +AuthPolicy asset model with flattened inheritance. + +This module provides: +- AuthPolicy: Flat asset class (easy to use) +- AuthPolicyAttributes: Nested attributes struct (extends AssetAttributes) +- AuthPolicyNested: Nested API format struct +""" from __future__ import annotations -from typing import Any, ClassVar, Set, Union +import re +from typing import Any, ClassVar, Dict, List, Union from msgspec import UNSET, UnsetType from pyatlan_v9.model.conversion_utils import ( - build_attributes_kwargs, - build_flat_kwargs, + categorize_relationships, merge_relationships, ) from pyatlan_v9.model.serde import Serde, get_serde from pyatlan_v9.model.transform import register_asset from pyatlan_v9.utils import init_guid, validate_required_fields -from .asset import Asset, AssetAttributes, AssetNested +from .access_control_related import RelatedAccessControl, RelatedAuthPolicy +from .anomalo_related import RelatedAnomaloCheck +from .app_related import RelatedApplication, RelatedApplicationField +from .asset import ( + _ASSET_REL_FIELDS, + Asset, + AssetAttributes, + AssetNested, + AssetRelationshipAttributes, + _extract_asset_attrs, + _populate_asset_attrs, +) +from .data_contract_related import RelatedDataContract +from .data_mesh_related import RelatedDataProduct +from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType +from .gtc_related import RelatedAtlasGlossaryTerm +from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor +from .referenceable_related import RelatedReferenceable +from .resource_related import RelatedFile, RelatedLink, RelatedReadme +from .schema_registry_related import RelatedSchemaRegistrySubject +from .soda_related import RelatedSodaCheck + +# ============================================================================= +# FLAT ASSET CLASS +# ============================================================================= @register_asset class AuthPolicy(Asset): - """AuthPolicy asset — defines access policies for Persona and Purpose.""" + """ + Model to store an accesscontrol policy in Atlas + """ + IS_POLICY_ENABLED: ClassVar[Any] = None + POLICY_ACTIONS: ClassVar[Any] = None + POLICY_CATEGORY: ClassVar[Any] = None + POLICY_CONDITIONS: ClassVar[Any] = None + POLICY_DELEGATE_ADMIN: ClassVar[Any] = None POLICY_FILTER_CRITERIA: ClassVar[Any] = None - POLICY_TYPE: ClassVar[Any] = None + POLICY_GROUPS: ClassVar[Any] = None + POLICY_MASK_TYPE: ClassVar[Any] = None + POLICY_PRIORITY: ClassVar[Any] = None + POLICY_RESOURCE_CATEGORY: ClassVar[Any] = None + POLICY_RESOURCE_SIGNATURE: ClassVar[Any] = None + POLICY_RESOURCES: ClassVar[Any] = None + POLICY_ROLES: ClassVar[Any] = None POLICY_SERVICE_NAME: ClassVar[Any] = None - POLICY_CATEGORY: ClassVar[Any] = None POLICY_SUB_CATEGORY: ClassVar[Any] = None + POLICY_TYPE: ClassVar[Any] = None POLICY_USERS: ClassVar[Any] = None - POLICY_GROUPS: ClassVar[Any] = None - POLICY_ROLES: ClassVar[Any] = None - POLICY_ACTIONS: ClassVar[Any] = None - POLICY_RESOURCES: ClassVar[Any] = None - POLICY_RESOURCE_CATEGORY: ClassVar[Any] = None - POLICY_PRIORITY: ClassVar[Any] = None - IS_POLICY_ENABLED: ClassVar[Any] = None - POLICY_MASK_TYPE: ClassVar[Any] = None POLICY_VALIDITY_SCHEDULE: ClassVar[Any] = None - POLICY_RESOURCE_SIGNATURE: ClassVar[Any] = None - POLICY_DELEGATE_ADMIN: ClassVar[Any] = None - POLICY_CONDITIONS: ClassVar[Any] = None + POLICIES: ClassVar[Any] = None ACCESS_CONTROL: ClassVar[Any] = None + ANOMALO_CHECKS: ClassVar[Any] = None + APPLICATION: ClassVar[Any] = None + APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None + OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + METRICS: ClassVar[Any] = None + DQ_BASE_DATASET_RULES: ClassVar[Any] = None + DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None + MEANINGS: ClassVar[Any] = None + MC_MONITORS: ClassVar[Any] = None + MC_INCIDENTS: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_TO: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_FROM: ClassVar[Any] = None + FILES: ClassVar[Any] = None + LINKS: ClassVar[Any] = None + README: ClassVar[Any] = None + SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None + SODA_CHECKS: ClassVar[Any] = None + + type_name: Union[str, UnsetType] = "AuthPolicy" + + is_policy_enabled: Union[bool, None, UnsetType] = UNSET + """TBC""" + + policy_actions: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + policy_category: Union[str, None, UnsetType] = UNSET + """TBC""" + + policy_conditions: Union[List[Dict[str, Any]], None, UnsetType] = UNSET + """TBC""" + + policy_delegate_admin: Union[bool, None, UnsetType] = UNSET + """TBC""" + + policy_filter_criteria: Union[str, None, UnsetType] = UNSET + """TBC""" + + policy_groups: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + policy_mask_type: Union[str, None, UnsetType] = UNSET + """TBC""" + + policy_priority: Union[int, None, UnsetType] = UNSET + """TBC""" + + policy_resource_category: Union[str, None, UnsetType] = UNSET + """TBC""" + + policy_resource_signature: Union[str, None, UnsetType] = UNSET + """TBC""" + + policy_resources: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + policy_roles: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + policy_service_name: Union[str, None, UnsetType] = UNSET + """TBC""" + + policy_sub_category: Union[str, None, UnsetType] = UNSET + """TBC""" + + policy_type: Union[str, None, UnsetType] = UNSET + """TBC""" + + policy_users: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + policy_validity_schedule: Union[List[Dict[str, Any]], None, UnsetType] = UNSET + """TBC""" + + policies: Union[List[RelatedAuthPolicy], None, UnsetType] = UNSET + """Access control entity to which this policy applies.""" + + access_control: Union[RelatedAccessControl, None, UnsetType] = UNSET + """Policies assigned to this access control entity.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = ( + UNSET + ) + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = ( + UNSET + ) + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[ + List[RelatedSchemaRegistrySubject], None, UnsetType + ] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + def __post_init__(self) -> None: + self.type_name = "AuthPolicy" + + # ========================================================================= + # SDK Methods + # ========================================================================= + + _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") @classmethod @init_guid @@ -51,39 +251,46 @@ def _create(cls, *, name: str) -> "AuthPolicy": validate_required_fields(["name"], [name]) return cls(qualified_name=name, name=name, display_name="") - type_name: Union[str, UnsetType] = "AuthPolicy" - policy_filter_criteria: Union[str, None, UnsetType] = UNSET - policy_type: Union[str, None, UnsetType] = UNSET - policy_service_name: Union[str, None, UnsetType] = UNSET - policy_category: Union[str, None, UnsetType] = UNSET - policy_sub_category: Union[str, None, UnsetType] = UNSET - policy_users: Union[Set[str], None, UnsetType] = UNSET - policy_groups: Union[Set[str], None, UnsetType] = UNSET - policy_roles: Union[Set[str], None, UnsetType] = UNSET - policy_actions: Union[Set[str], None, UnsetType] = UNSET - policy_resources: Union[Set[str], None, UnsetType] = UNSET - policy_resource_category: Union[str, None, UnsetType] = UNSET - policy_priority: Union[int, None, UnsetType] = UNSET - is_policy_enabled: Union[bool, None, UnsetType] = UNSET - policy_mask_type: Union[str, None, UnsetType] = UNSET - policy_validity_schedule: Union[list[Any], None, UnsetType] = UNSET - policy_resource_signature: Union[str, None, UnsetType] = UNSET - policy_delegate_admin: Union[bool, None, UnsetType] = UNSET - policy_conditions: Union[list[Any], None, UnsetType] = UNSET - access_control: Union[Any, None, UnsetType] = UNSET - connection_qualified_name: Union[str, None, UnsetType] = UNSET + # ========================================================================= + # Optimized Serialization Methods (override Asset base class) + # ========================================================================= def to_json(self, nested: bool = True, serde: Serde | None = None) -> str: + """ + Convert to JSON string using optimized nested struct serialization. + + Args: + nested: If True (default), use nested API format. If False, use flat format. + serde: Optional Serde instance for encoder reuse. Uses shared singleton if None. + + Returns: + JSON string representation + """ if serde is None: serde = get_serde() if nested: - return _auth_policy_to_nested_bytes(self, serde).decode("utf-8") - return serde.encode(self).decode("utf-8") + return self.to_nested_bytes(serde).decode("utf-8") + else: + return serde.encode(self).decode("utf-8") + + def to_nested_bytes(self, serde: Serde | None = None) -> bytes: + """Serialize to Atlas nested-format JSON bytes (pure msgspec, no dict intermediate).""" + if serde is None: + serde = get_serde() + return _auth_policy_to_nested_bytes(self, serde) @staticmethod - def from_json( - json_data: Union[str, bytes], serde: Serde | None = None - ) -> "AuthPolicy": + def from_json(json_data: str | bytes, serde: Serde | None = None) -> AuthPolicy: + """ + Create from JSON string or bytes using optimized nested struct deserialization. + + Args: + json_data: JSON string or bytes to deserialize + serde: Optional Serde instance for decoder reuse. Uses shared singleton if None. + + Returns: + AuthPolicy instance + """ if isinstance(json_data, str): json_data = json_data.encode("utf-8") if serde is None: @@ -91,127 +298,392 @@ def from_json( return _auth_policy_from_nested_bytes(json_data, serde) -# --------------------------------------------------------------------------- -# Deferred field descriptor initialization -# --------------------------------------------------------------------------- -from pyatlan.model.fields.atlan_fields import ( - BooleanField, - KeywordField, - NumericField, - RelationField, - TextField, -) - -AuthPolicy.POLICY_FILTER_CRITERIA = TextField( - "policyFilterCriteria", "policyFilterCriteria" -) -AuthPolicy.POLICY_TYPE = KeywordField("policyType", "policyType") -AuthPolicy.POLICY_SERVICE_NAME = KeywordField("policyServiceName", "policyServiceName") -AuthPolicy.POLICY_CATEGORY = KeywordField("policyCategory", "policyCategory") -AuthPolicy.POLICY_SUB_CATEGORY = KeywordField("policySubCategory", "policySubCategory") -AuthPolicy.POLICY_USERS = KeywordField("policyUsers", "policyUsers") -AuthPolicy.POLICY_GROUPS = KeywordField("policyGroups", "policyGroups") -AuthPolicy.POLICY_ROLES = KeywordField("policyRoles", "policyRoles") -AuthPolicy.POLICY_ACTIONS = KeywordField("policyActions", "policyActions") -AuthPolicy.POLICY_RESOURCES = KeywordField("policyResources", "policyResources") -AuthPolicy.POLICY_RESOURCE_CATEGORY = KeywordField( - "policyResourceCategory", "policyResourceCategory" -) -AuthPolicy.POLICY_PRIORITY = NumericField("policyPriority", "policyPriority") -AuthPolicy.IS_POLICY_ENABLED = BooleanField("isPolicyEnabled", "isPolicyEnabled") -AuthPolicy.POLICY_MASK_TYPE = KeywordField("policyMaskType", "policyMaskType") -AuthPolicy.POLICY_VALIDITY_SCHEDULE = KeywordField( - "policyValiditySchedule", "policyValiditySchedule" -) -AuthPolicy.POLICY_RESOURCE_SIGNATURE = KeywordField( - "policyResourceSignature", "policyResourceSignature" -) -AuthPolicy.POLICY_DELEGATE_ADMIN = BooleanField( - "policyDelegateAdmin", "policyDelegateAdmin" -) -AuthPolicy.POLICY_CONDITIONS = KeywordField("policyConditions", "policyConditions") -AuthPolicy.ACCESS_CONTROL = RelationField("accessControl") - - # ============================================================================= # NESTED FORMAT CLASSES # ============================================================================= class AuthPolicyAttributes(AssetAttributes): - policy_filter_criteria: Union[str, None, UnsetType] = UNSET - policy_type: Union[str, None, UnsetType] = UNSET - policy_service_name: Union[str, None, UnsetType] = UNSET - policy_category: Union[str, None, UnsetType] = UNSET - policy_sub_category: Union[str, None, UnsetType] = UNSET - policy_users: Union[Set[str], None, UnsetType] = UNSET - policy_groups: Union[Set[str], None, UnsetType] = UNSET - policy_roles: Union[Set[str], None, UnsetType] = UNSET - policy_actions: Union[Set[str], None, UnsetType] = UNSET - policy_resources: Union[Set[str], None, UnsetType] = UNSET - policy_resource_category: Union[str, None, UnsetType] = UNSET - policy_priority: Union[int, None, UnsetType] = UNSET + """AuthPolicy-specific attributes for nested API format.""" + is_policy_enabled: Union[bool, None, UnsetType] = UNSET + """TBC""" + + policy_actions: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + policy_category: Union[str, None, UnsetType] = UNSET + """TBC""" + + policy_conditions: Union[List[Dict[str, Any]], None, UnsetType] = UNSET + """TBC""" + + policy_delegate_admin: Union[bool, None, UnsetType] = UNSET + """TBC""" + + policy_filter_criteria: Union[str, None, UnsetType] = UNSET + """TBC""" + + policy_groups: Union[List[str], None, UnsetType] = UNSET + """TBC""" + policy_mask_type: Union[str, None, UnsetType] = UNSET - policy_validity_schedule: Union[list[Any], None, UnsetType] = UNSET + """TBC""" + + policy_priority: Union[int, None, UnsetType] = UNSET + """TBC""" + + policy_resource_category: Union[str, None, UnsetType] = UNSET + """TBC""" + policy_resource_signature: Union[str, None, UnsetType] = UNSET - policy_delegate_admin: Union[bool, None, UnsetType] = UNSET - policy_conditions: Union[list[Any], None, UnsetType] = UNSET - connection_qualified_name: Union[str, None, UnsetType] = UNSET + """TBC""" + + policy_resources: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + policy_roles: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + policy_service_name: Union[str, None, UnsetType] = UNSET + """TBC""" + + policy_sub_category: Union[str, None, UnsetType] = UNSET + """TBC""" + + policy_type: Union[str, None, UnsetType] = UNSET + """TBC""" + + policy_users: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + policy_validity_schedule: Union[List[Dict[str, Any]], None, UnsetType] = UNSET + """TBC""" + + +class AuthPolicyRelationshipAttributes(AssetRelationshipAttributes): + """AuthPolicy-specific relationship attributes for nested API format.""" + + policies: Union[List[RelatedAuthPolicy], None, UnsetType] = UNSET + """Access control entity to which this policy applies.""" + + access_control: Union[RelatedAccessControl, None, UnsetType] = UNSET + """Policies assigned to this access control entity.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = ( + UNSET + ) + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = ( + UNSET + ) + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[ + List[RelatedSchemaRegistrySubject], None, UnsetType + ] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" class AuthPolicyNested(AssetNested): + """AuthPolicy in nested API format for high-performance serialization.""" + attributes: Union[AuthPolicyAttributes, UnsetType] = UNSET + relationship_attributes: Union[AuthPolicyRelationshipAttributes, UnsetType] = UNSET + append_relationship_attributes: Union[ + AuthPolicyRelationshipAttributes, UnsetType + ] = UNSET + remove_relationship_attributes: Union[ + AuthPolicyRelationshipAttributes, UnsetType + ] = UNSET + + +# ============================================================================= +# CONVERSION HELPERS & CONSTANTS +# ============================================================================= + +_AUTH_POLICY_REL_FIELDS: List[str] = [ + *_ASSET_REL_FIELDS, + "policies", + "access_control", + "anomalo_checks", + "application", + "application_field", + "data_contract_latest", + "data_contract_latest_certified", + "output_port_data_products", + "input_port_data_products", + "metrics", + "dq_base_dataset_rules", + "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", + "meanings", + "mc_monitors", + "mc_incidents", + "user_def_relationship_to", + "user_def_relationship_from", + "files", + "links", + "readme", + "schema_registry_subjects", + "soda_checks", +] -def _auth_policy_to_nested(ap: AuthPolicy) -> AuthPolicyNested: - attrs_kwargs = build_attributes_kwargs(ap, AuthPolicyAttributes) - attrs = AuthPolicyAttributes(**attrs_kwargs) +def _populate_auth_policy_attrs(attrs: AuthPolicyAttributes, obj: AuthPolicy) -> None: + """Populate AuthPolicy-specific attributes on the attrs struct.""" + _populate_asset_attrs(attrs, obj) + attrs.is_policy_enabled = obj.is_policy_enabled + attrs.policy_actions = obj.policy_actions + attrs.policy_category = obj.policy_category + attrs.policy_conditions = obj.policy_conditions + attrs.policy_delegate_admin = obj.policy_delegate_admin + attrs.policy_filter_criteria = obj.policy_filter_criteria + attrs.policy_groups = obj.policy_groups + attrs.policy_mask_type = obj.policy_mask_type + attrs.policy_priority = obj.policy_priority + attrs.policy_resource_category = obj.policy_resource_category + attrs.policy_resource_signature = obj.policy_resource_signature + attrs.policy_resources = obj.policy_resources + attrs.policy_roles = obj.policy_roles + attrs.policy_service_name = obj.policy_service_name + attrs.policy_sub_category = obj.policy_sub_category + attrs.policy_type = obj.policy_type + attrs.policy_users = obj.policy_users + attrs.policy_validity_schedule = obj.policy_validity_schedule + + +def _extract_auth_policy_attrs(attrs: AuthPolicyAttributes) -> dict: + """Extract all AuthPolicy attributes from the attrs struct into a flat dict.""" + result = _extract_asset_attrs(attrs) + result["is_policy_enabled"] = attrs.is_policy_enabled + result["policy_actions"] = attrs.policy_actions + result["policy_category"] = attrs.policy_category + result["policy_conditions"] = attrs.policy_conditions + result["policy_delegate_admin"] = attrs.policy_delegate_admin + result["policy_filter_criteria"] = attrs.policy_filter_criteria + result["policy_groups"] = attrs.policy_groups + result["policy_mask_type"] = attrs.policy_mask_type + result["policy_priority"] = attrs.policy_priority + result["policy_resource_category"] = attrs.policy_resource_category + result["policy_resource_signature"] = attrs.policy_resource_signature + result["policy_resources"] = attrs.policy_resources + result["policy_roles"] = attrs.policy_roles + result["policy_service_name"] = attrs.policy_service_name + result["policy_sub_category"] = attrs.policy_sub_category + result["policy_type"] = attrs.policy_type + result["policy_users"] = attrs.policy_users + result["policy_validity_schedule"] = attrs.policy_validity_schedule + return result + + +# ============================================================================= +# CONVERSION FUNCTIONS +# ============================================================================= + + +def _auth_policy_to_nested(auth_policy: AuthPolicy) -> AuthPolicyNested: + """Convert flat AuthPolicy to nested format.""" + attrs = AuthPolicyAttributes() + _populate_auth_policy_attrs(attrs, auth_policy) + # Categorize relationships by save semantic (REPLACE, APPEND, REMOVE) + replace_rels, append_rels, remove_rels = categorize_relationships( + auth_policy, _AUTH_POLICY_REL_FIELDS, AuthPolicyRelationshipAttributes + ) return AuthPolicyNested( - guid=ap.guid, - type_name=ap.type_name, - status=ap.status, - version=ap.version, - create_time=ap.create_time, - update_time=ap.update_time, - created_by=ap.created_by, - updated_by=ap.updated_by, - classifications=ap.classifications, - classification_names=ap.classification_names, - meanings=ap.meanings, - labels=ap.labels, - business_attributes=ap.business_attributes, - custom_attributes=ap.custom_attributes, - pending_tasks=ap.pending_tasks, - proxy=ap.proxy, - is_incomplete=ap.is_incomplete, - provenance_type=ap.provenance_type, - home_id=ap.home_id, + guid=auth_policy.guid, + type_name=auth_policy.type_name, + status=auth_policy.status, + version=auth_policy.version, + create_time=auth_policy.create_time, + update_time=auth_policy.update_time, + created_by=auth_policy.created_by, + updated_by=auth_policy.updated_by, + classifications=auth_policy.classifications, + classification_names=auth_policy.classification_names, + meanings=auth_policy.meanings, + labels=auth_policy.labels, + business_attributes=auth_policy.business_attributes, + custom_attributes=auth_policy.custom_attributes, + pending_tasks=auth_policy.pending_tasks, + proxy=auth_policy.proxy, + is_incomplete=auth_policy.is_incomplete, + provenance_type=auth_policy.provenance_type, + home_id=auth_policy.home_id, attributes=attrs, + relationship_attributes=replace_rels, + append_relationship_attributes=append_rels, + remove_relationship_attributes=remove_rels, ) def _auth_policy_from_nested(nested: AuthPolicyNested) -> AuthPolicy: + """Convert nested format to flat AuthPolicy.""" attrs = ( nested.attributes if nested.attributes is not UNSET else AuthPolicyAttributes() ) + # Merge relationships from all three buckets merged_rels = merge_relationships( nested.relationship_attributes, nested.append_relationship_attributes, nested.remove_relationship_attributes, - [], - object, + _AUTH_POLICY_REL_FIELDS, + AuthPolicyRelationshipAttributes, ) - kwargs = build_flat_kwargs( - nested, attrs, merged_rels, AssetNested, AuthPolicyAttributes + return AuthPolicy( + guid=nested.guid, + type_name=nested.type_name, + status=nested.status, + version=nested.version, + create_time=nested.create_time, + update_time=nested.update_time, + created_by=nested.created_by, + updated_by=nested.updated_by, + classifications=nested.classifications, + classification_names=nested.classification_names, + meanings=nested.meanings, + labels=nested.labels, + business_attributes=nested.business_attributes, + custom_attributes=nested.custom_attributes, + pending_tasks=nested.pending_tasks, + proxy=nested.proxy, + is_incomplete=nested.is_incomplete, + provenance_type=nested.provenance_type, + home_id=nested.home_id, + **_extract_auth_policy_attrs(attrs), + # Merged relationship attributes + **merged_rels, ) - return AuthPolicy(**kwargs) -def _auth_policy_to_nested_bytes(ap: AuthPolicy, serde: Serde) -> bytes: - return serde.encode(_auth_policy_to_nested(ap)) +def _auth_policy_to_nested_bytes(auth_policy: AuthPolicy, serde: Serde) -> bytes: + """Convert flat AuthPolicy to nested JSON bytes.""" + return serde.encode(_auth_policy_to_nested(auth_policy)) def _auth_policy_from_nested_bytes(data: bytes, serde: Serde) -> AuthPolicy: + """Convert nested JSON bytes to flat AuthPolicy.""" nested = serde.decode(data, AuthPolicyNested) return _auth_policy_from_nested(nested) + + +# --------------------------------------------------------------------------- +# Deferred field descriptor initialization +# --------------------------------------------------------------------------- +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + BooleanField, + KeywordField, + NumericField, + RelationField, +) + +AuthPolicy.IS_POLICY_ENABLED = BooleanField("isPolicyEnabled", "isPolicyEnabled") +AuthPolicy.POLICY_ACTIONS = KeywordField("policyActions", "policyActions") +AuthPolicy.POLICY_CATEGORY = KeywordField("policyCategory", "policyCategory") +AuthPolicy.POLICY_CONDITIONS = KeywordField("policyConditions", "policyConditions") +AuthPolicy.POLICY_DELEGATE_ADMIN = BooleanField( + "policyDelegateAdmin", "policyDelegateAdmin" +) +AuthPolicy.POLICY_FILTER_CRITERIA = KeywordField( + "policyFilterCriteria", "policyFilterCriteria" +) +AuthPolicy.POLICY_GROUPS = KeywordField("policyGroups", "policyGroups") +AuthPolicy.POLICY_MASK_TYPE = KeywordField("policyMaskType", "policyMaskType") +AuthPolicy.POLICY_PRIORITY = NumericField("policyPriority", "policyPriority") +AuthPolicy.POLICY_RESOURCE_CATEGORY = KeywordField( + "policyResourceCategory", "policyResourceCategory" +) +AuthPolicy.POLICY_RESOURCE_SIGNATURE = KeywordField( + "policyResourceSignature", "policyResourceSignature" +) +AuthPolicy.POLICY_RESOURCES = KeywordField("policyResources", "policyResources") +AuthPolicy.POLICY_ROLES = KeywordField("policyRoles", "policyRoles") +AuthPolicy.POLICY_SERVICE_NAME = KeywordField("policyServiceName", "policyServiceName") +AuthPolicy.POLICY_SUB_CATEGORY = KeywordField("policySubCategory", "policySubCategory") +AuthPolicy.POLICY_TYPE = KeywordField("policyType", "policyType") +AuthPolicy.POLICY_USERS = KeywordField("policyUsers", "policyUsers") +AuthPolicy.POLICY_VALIDITY_SCHEDULE = KeywordField( + "policyValiditySchedule", "policyValiditySchedule" +) +AuthPolicy.POLICIES = RelationField("policies") +AuthPolicy.ACCESS_CONTROL = RelationField("accessControl") +AuthPolicy.ANOMALO_CHECKS = RelationField("anomaloChecks") +AuthPolicy.APPLICATION = RelationField("application") +AuthPolicy.APPLICATION_FIELD = RelationField("applicationField") +AuthPolicy.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +AuthPolicy.DATA_CONTRACT_LATEST_CERTIFIED = RelationField("dataContractLatestCertified") +AuthPolicy.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") +AuthPolicy.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") +AuthPolicy.METRICS = RelationField("metrics") +AuthPolicy.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") +AuthPolicy.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AuthPolicy.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) +AuthPolicy.MEANINGS = RelationField("meanings") +AuthPolicy.MC_MONITORS = RelationField("mcMonitors") +AuthPolicy.MC_INCIDENTS = RelationField("mcIncidents") +AuthPolicy.USER_DEF_RELATIONSHIP_TO = RelationField("userDefRelationshipTo") +AuthPolicy.USER_DEF_RELATIONSHIP_FROM = RelationField("userDefRelationshipFrom") +AuthPolicy.FILES = RelationField("files") +AuthPolicy.LINKS = RelationField("links") +AuthPolicy.README = RelationField("readme") +AuthPolicy.SCHEMA_REGISTRY_SUBJECTS = RelationField("schemaRegistrySubjects") +AuthPolicy.SODA_CHECKS = RelationField("sodaChecks") diff --git a/pyatlan_v9/model/assets/auth_service.py b/pyatlan_v9/model/assets/auth_service.py index 9c3aadd40..78dfbc695 100644 --- a/pyatlan_v9/model/assets/auth_service.py +++ b/pyatlan_v9/model/assets/auth_service.py @@ -25,7 +25,7 @@ from pyatlan_v9.model.serde import Serde, get_serde from pyatlan_v9.model.transform import register_asset -from .access_control_related import RelatedAuthPolicy, RelatedAuthService +from .access_control_related import RelatedAuthPolicy from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField from .asset import ( @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable @@ -75,6 +76,7 @@ class AuthService(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -86,6 +88,8 @@ class AuthService(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AuthService" + abac_service: Union[str, None, UnsetType] = UNSET """TBC""" @@ -139,6 +143,11 @@ class AuthService(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -176,66 +185,6 @@ class AuthService(Asset): def __post_init__(self) -> None: self.type_name = "AuthService" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AuthService instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AuthService validation failed: {errors}") - - def minimize(self) -> "AuthService": - """ - Return a minimal copy of this AuthService with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AuthService with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AuthService instance with only the minimum required fields. - """ - self.validate() - return AuthService(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAuthService": - """ - Create a :class:`RelatedAuthService` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAuthService reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAuthService(guid=self.guid) - return RelatedAuthService(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -348,6 +297,11 @@ class AuthServiceRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -413,6 +367,7 @@ class AuthServiceNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -484,9 +439,6 @@ def _auth_service_to_nested(auth_service: AuthService) -> AuthServiceNested: is_incomplete=auth_service.is_incomplete, provenance_type=auth_service.provenance_type, home_id=auth_service.home_id, - depth=auth_service.depth, - immediate_upstream=auth_service.immediate_upstream, - immediate_downstream=auth_service.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -518,6 +470,7 @@ def _auth_service_from_nested(nested: AuthServiceNested) -> AuthService: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -526,9 +479,6 @@ def _auth_service_from_nested(nested: AuthServiceNested) -> AuthService: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_auth_service_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -579,6 +529,9 @@ def _auth_service_from_nested_bytes(data: bytes, serde: Serde) -> AuthService: AuthService.METRICS = RelationField("metrics") AuthService.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AuthService.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AuthService.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AuthService.MEANINGS = RelationField("meanings") AuthService.MC_MONITORS = RelationField("mcMonitors") AuthService.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/aws.py b/pyatlan_v9/model/assets/aws.py index 33f2c4d5c..810dd5a99 100644 --- a/pyatlan_v9/model/assets/aws.py +++ b/pyatlan_v9/model/assets/aws.py @@ -36,10 +36,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cloud_related import RelatedAWS from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable @@ -78,6 +78,7 @@ class AWS(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -89,6 +90,8 @@ class AWS(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AWS" + aws_arn: Union[str, None, UnsetType] = UNSET """DEPRECATED: This legacy attribute must be unique across all AWS asset instances. This can create non-obvious edge cases for creating / updating assets, and we therefore recommended NOT using it. See and use cloudResourceName instead.""" @@ -151,6 +154,11 @@ class AWS(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -188,66 +196,6 @@ class AWS(Asset): def __post_init__(self) -> None: self.type_name = "AWS" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AWS instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AWS validation failed: {errors}") - - def minimize(self) -> "AWS": - """ - Return a minimal copy of this AWS with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AWS with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AWS instance with only the minimum required fields. - """ - self.validate() - return AWS(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAWS": - """ - Create a :class:`RelatedAWS` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAWS reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAWS(guid=self.guid) - return RelatedAWS(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -369,6 +317,11 @@ class AWSRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -429,6 +382,7 @@ class AWSNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -506,9 +460,6 @@ def _aws_to_nested(aws: AWS) -> AWSNested: is_incomplete=aws.is_incomplete, provenance_type=aws.provenance_type, home_id=aws.home_id, - depth=aws.depth, - immediate_upstream=aws.immediate_upstream, - immediate_downstream=aws.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -538,6 +489,7 @@ def _aws_from_nested(nested: AWSNested) -> AWS: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -546,9 +498,6 @@ def _aws_from_nested(nested: AWSNested) -> AWS: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_aws_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -599,6 +548,9 @@ def _aws_from_nested_bytes(data: bytes, serde: Serde) -> AWS: AWS.METRICS = RelationField("metrics") AWS.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AWS.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AWS.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AWS.MEANINGS = RelationField("meanings") AWS.MC_MONITORS = RelationField("mcMonitors") AWS.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/azure.py b/pyatlan_v9/model/assets/azure.py index ec0679b00..a2fa89ed2 100644 --- a/pyatlan_v9/model/assets/azure.py +++ b/pyatlan_v9/model/assets/azure.py @@ -36,10 +36,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cloud_related import RelatedAzure from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable @@ -73,6 +73,7 @@ class Azure(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -84,6 +85,8 @@ class Azure(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Azure" + azure_resource_id: Union[str, None, UnsetType] = UNSET """Resource identifier of this asset in Azure.""" @@ -131,6 +134,11 @@ class Azure(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -168,66 +176,6 @@ class Azure(Asset): def __post_init__(self) -> None: self.type_name = "Azure" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Azure instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Azure validation failed: {errors}") - - def minimize(self) -> "Azure": - """ - Return a minimal copy of this Azure with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Azure with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Azure instance with only the minimum required fields. - """ - self.validate() - return Azure(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAzure": - """ - Create a :class:`RelatedAzure` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAzure reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAzure(guid=self.guid) - return RelatedAzure(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -334,6 +282,11 @@ class AzureRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -398,6 +351,7 @@ class AzureNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -465,9 +419,6 @@ def _azure_to_nested(azure: Azure) -> AzureNested: is_incomplete=azure.is_incomplete, provenance_type=azure.provenance_type, home_id=azure.home_id, - depth=azure.depth, - immediate_upstream=azure.immediate_upstream, - immediate_downstream=azure.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -497,6 +448,7 @@ def _azure_from_nested(nested: AzureNested) -> Azure: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -505,9 +457,6 @@ def _azure_from_nested(nested: AzureNested) -> Azure: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_azure_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -555,6 +504,9 @@ def _azure_from_nested_bytes(data: bytes, serde: Serde) -> Azure: Azure.METRICS = RelationField("metrics") Azure.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Azure.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Azure.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Azure.MEANINGS = RelationField("meanings") Azure.MC_MONITORS = RelationField("mcMonitors") Azure.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/azure_service_bus.py b/pyatlan_v9/model/assets/azure_service_bus.py index 1959ce421..50f111646 100644 --- a/pyatlan_v9/model/assets/azure_service_bus.py +++ b/pyatlan_v9/model/assets/azure_service_bus.py @@ -37,10 +37,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .azure_service_bus_related import RelatedAzureServiceBus from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -81,6 +81,7 @@ class AzureServiceBus(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -98,6 +99,8 @@ class AzureServiceBus(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AzureServiceBus" + azure_service_bus_namespace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AzureServiceBus Namespace in which this asset exists.""" @@ -156,6 +159,11 @@ class AzureServiceBus(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -211,66 +219,6 @@ class AzureServiceBus(Asset): def __post_init__(self) -> None: self.type_name = "AzureServiceBus" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AzureServiceBus instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AzureServiceBus validation failed: {errors}") - - def minimize(self) -> "AzureServiceBus": - """ - Return a minimal copy of this AzureServiceBus with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AzureServiceBus with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AzureServiceBus instance with only the minimum required fields. - """ - self.validate() - return AzureServiceBus(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAzureServiceBus": - """ - Create a :class:`RelatedAzureServiceBus` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAzureServiceBus reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAzureServiceBus(guid=self.guid) - return RelatedAzureServiceBus(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -390,6 +338,11 @@ class AzureServiceBusRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -478,6 +431,7 @@ class AzureServiceBusNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -563,9 +517,6 @@ def _azure_service_bus_to_nested( is_incomplete=azure_service_bus.is_incomplete, provenance_type=azure_service_bus.provenance_type, home_id=azure_service_bus.home_id, - depth=azure_service_bus.depth, - immediate_upstream=azure_service_bus.immediate_upstream, - immediate_downstream=azure_service_bus.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -599,6 +550,7 @@ def _azure_service_bus_from_nested(nested: AzureServiceBusNested) -> AzureServic updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -607,9 +559,6 @@ def _azure_service_bus_from_nested(nested: AzureServiceBusNested) -> AzureServic is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_azure_service_bus_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -670,6 +619,9 @@ def _azure_service_bus_from_nested_bytes(data: bytes, serde: Serde) -> AzureServ AzureServiceBus.METRICS = RelationField("metrics") AzureServiceBus.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") AzureServiceBus.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +AzureServiceBus.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AzureServiceBus.MEANINGS = RelationField("meanings") AzureServiceBus.MC_MONITORS = RelationField("mcMonitors") AzureServiceBus.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/azure_service_bus_namespace.py b/pyatlan_v9/model/assets/azure_service_bus_namespace.py index fe27dfda3..e497e84a7 100644 --- a/pyatlan_v9/model/assets/azure_service_bus_namespace.py +++ b/pyatlan_v9/model/assets/azure_service_bus_namespace.py @@ -37,13 +37,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .azure_service_bus_related import ( - RelatedAzureServiceBusNamespace, - RelatedAzureServiceBusTopic, -) +from .azure_service_bus_related import RelatedAzureServiceBusTopic from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +83,7 @@ class AzureServiceBusNamespace(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +101,8 @@ class AzureServiceBusNamespace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AzureServiceBusNamespace" + azure_service_bus_namespace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AzureServiceBus Namespace in which this asset exists.""" @@ -165,6 +166,11 @@ class AzureServiceBusNamespace(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -220,68 +226,6 @@ class AzureServiceBusNamespace(Asset): def __post_init__(self) -> None: self.type_name = "AzureServiceBusNamespace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AzureServiceBusNamespace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AzureServiceBusNamespace validation failed: {errors}") - - def minimize(self) -> "AzureServiceBusNamespace": - """ - Return a minimal copy of this AzureServiceBusNamespace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AzureServiceBusNamespace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AzureServiceBusNamespace instance with only the minimum required fields. - """ - self.validate() - return AzureServiceBusNamespace( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedAzureServiceBusNamespace": - """ - Create a :class:`RelatedAzureServiceBusNamespace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAzureServiceBusNamespace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAzureServiceBusNamespace(guid=self.guid) - return RelatedAzureServiceBusNamespace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -406,6 +350,11 @@ class AzureServiceBusNamespaceRelationshipAttributes(AssetRelationshipAttributes ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -495,6 +444,7 @@ class AzureServiceBusNamespaceNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -582,9 +532,6 @@ def _azure_service_bus_namespace_to_nested( is_incomplete=azure_service_bus_namespace.is_incomplete, provenance_type=azure_service_bus_namespace.provenance_type, home_id=azure_service_bus_namespace.home_id, - depth=azure_service_bus_namespace.depth, - immediate_upstream=azure_service_bus_namespace.immediate_upstream, - immediate_downstream=azure_service_bus_namespace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -620,6 +567,7 @@ def _azure_service_bus_namespace_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -628,9 +576,6 @@ def _azure_service_bus_namespace_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_azure_service_bus_namespace_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -708,6 +653,9 @@ def _azure_service_bus_namespace_from_nested_bytes( AzureServiceBusNamespace.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +AzureServiceBusNamespace.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AzureServiceBusNamespace.MEANINGS = RelationField("meanings") AzureServiceBusNamespace.MC_MONITORS = RelationField("mcMonitors") AzureServiceBusNamespace.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/azure_service_bus_schema.py b/pyatlan_v9/model/assets/azure_service_bus_schema.py index 03e007b74..bc982759c 100644 --- a/pyatlan_v9/model/assets/azure_service_bus_schema.py +++ b/pyatlan_v9/model/assets/azure_service_bus_schema.py @@ -37,13 +37,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .azure_service_bus_related import ( - RelatedAzureServiceBusSchema, - RelatedAzureServiceBusTopic, -) +from .azure_service_bus_related import RelatedAzureServiceBusTopic from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +83,7 @@ class AzureServiceBusSchema(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +101,8 @@ class AzureServiceBusSchema(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AzureServiceBusSchema" + azure_service_bus_namespace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AzureServiceBus Namespace in which this asset exists.""" @@ -165,6 +166,11 @@ class AzureServiceBusSchema(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -220,66 +226,6 @@ class AzureServiceBusSchema(Asset): def __post_init__(self) -> None: self.type_name = "AzureServiceBusSchema" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AzureServiceBusSchema instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"AzureServiceBusSchema validation failed: {errors}") - - def minimize(self) -> "AzureServiceBusSchema": - """ - Return a minimal copy of this AzureServiceBusSchema with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AzureServiceBusSchema with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AzureServiceBusSchema instance with only the minimum required fields. - """ - self.validate() - return AzureServiceBusSchema(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAzureServiceBusSchema": - """ - Create a :class:`RelatedAzureServiceBusSchema` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAzureServiceBusSchema reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAzureServiceBusSchema(guid=self.guid) - return RelatedAzureServiceBusSchema(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -404,6 +350,11 @@ class AzureServiceBusSchemaRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -493,6 +444,7 @@ class AzureServiceBusSchemaNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -580,9 +532,6 @@ def _azure_service_bus_schema_to_nested( is_incomplete=azure_service_bus_schema.is_incomplete, provenance_type=azure_service_bus_schema.provenance_type, home_id=azure_service_bus_schema.home_id, - depth=azure_service_bus_schema.depth, - immediate_upstream=azure_service_bus_schema.immediate_upstream, - immediate_downstream=azure_service_bus_schema.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -618,6 +567,7 @@ def _azure_service_bus_schema_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -626,9 +576,6 @@ def _azure_service_bus_schema_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_azure_service_bus_schema_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -700,6 +647,9 @@ def _azure_service_bus_schema_from_nested_bytes( AzureServiceBusSchema.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +AzureServiceBusSchema.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AzureServiceBusSchema.MEANINGS = RelationField("meanings") AzureServiceBusSchema.MC_MONITORS = RelationField("mcMonitors") AzureServiceBusSchema.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/azure_service_bus_topic.py b/pyatlan_v9/model/assets/azure_service_bus_topic.py index c00ac7f9b..37b67bec6 100644 --- a/pyatlan_v9/model/assets/azure_service_bus_topic.py +++ b/pyatlan_v9/model/assets/azure_service_bus_topic.py @@ -41,11 +41,11 @@ from .azure_service_bus_related import ( RelatedAzureServiceBusNamespace, RelatedAzureServiceBusSchema, - RelatedAzureServiceBusTopic, ) from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -88,6 +88,7 @@ class AzureServiceBusTopic(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +106,8 @@ class AzureServiceBusTopic(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "AzureServiceBusTopic" + azure_service_bus_namespace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the AzureServiceBus Namespace in which this asset exists.""" @@ -173,6 +176,11 @@ class AzureServiceBusTopic(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -234,80 +242,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this AzureServiceBusTopic instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.azure_service_bus_namespace is UNSET: - errors.append("azure_service_bus_namespace is required for creation") - if self.azure_service_bus_namespace_name is UNSET: - errors.append( - "azure_service_bus_namespace_name is required for creation" - ) - if self.azure_service_bus_namespace_qualified_name is UNSET: - errors.append( - "azure_service_bus_namespace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"AzureServiceBusTopic validation failed: {errors}") - - def minimize(self) -> "AzureServiceBusTopic": - """ - Return a minimal copy of this AzureServiceBusTopic with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new AzureServiceBusTopic with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new AzureServiceBusTopic instance with only the minimum required fields. - """ - self.validate() - return AzureServiceBusTopic(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedAzureServiceBusTopic": - """ - Create a :class:`RelatedAzureServiceBusTopic` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedAzureServiceBusTopic reference to this asset. - """ - if self.guid is not UNSET: - return RelatedAzureServiceBusTopic(guid=self.guid) - return RelatedAzureServiceBusTopic(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -437,6 +371,11 @@ class AzureServiceBusTopicRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -527,6 +466,7 @@ class AzureServiceBusTopicNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -614,9 +554,6 @@ def _azure_service_bus_topic_to_nested( is_incomplete=azure_service_bus_topic.is_incomplete, provenance_type=azure_service_bus_topic.provenance_type, home_id=azure_service_bus_topic.home_id, - depth=azure_service_bus_topic.depth, - immediate_upstream=azure_service_bus_topic.immediate_upstream, - immediate_downstream=azure_service_bus_topic.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -652,6 +589,7 @@ def _azure_service_bus_topic_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -660,9 +598,6 @@ def _azure_service_bus_topic_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_azure_service_bus_topic_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -733,6 +668,9 @@ def _azure_service_bus_topic_from_nested_bytes( AzureServiceBusTopic.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +AzureServiceBusTopic.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) AzureServiceBusTopic.MEANINGS = RelationField("meanings") AzureServiceBusTopic.MC_MONITORS = RelationField("mcMonitors") AzureServiceBusTopic.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/bi.py b/pyatlan_v9/model/assets/bi.py index 8a7e1e5b8..5f12ee4c8 100644 --- a/pyatlan_v9/model/assets/bi.py +++ b/pyatlan_v9/model/assets/bi.py @@ -37,10 +37,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .catalog_related import RelatedBI from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -78,6 +78,7 @@ class BI(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +96,8 @@ class BI(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "BI" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -144,6 +147,11 @@ class BI(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -199,66 +207,6 @@ class BI(Asset): def __post_init__(self) -> None: self.type_name = "BI" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this BI instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"BI validation failed: {errors}") - - def minimize(self) -> "BI": - """ - Return a minimal copy of this BI with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new BI with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new BI instance with only the minimum required fields. - """ - self.validate() - return BI(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedBI": - """ - Create a :class:`RelatedBI` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedBI reference to this asset. - """ - if self.guid is not UNSET: - return RelatedBI(guid=self.guid) - return RelatedBI(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -367,6 +315,11 @@ class BIRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -449,6 +402,7 @@ class BINested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -514,9 +468,6 @@ def _bi_to_nested(bi: BI) -> BINested: is_incomplete=bi.is_incomplete, provenance_type=bi.provenance_type, home_id=bi.home_id, - depth=bi.depth, - immediate_upstream=bi.immediate_upstream, - immediate_downstream=bi.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -546,6 +497,7 @@ def _bi_from_nested(nested: BINested) -> BI: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -554,9 +506,6 @@ def _bi_from_nested(nested: BINested) -> BI: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_bi_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -594,6 +543,9 @@ def _bi_from_nested_bytes(data: bytes, serde: Serde) -> BI: BI.METRICS = RelationField("metrics") BI.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") BI.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +BI.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) BI.MEANINGS = RelationField("meanings") BI.MC_MONITORS = RelationField("mcMonitors") BI.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/bi_process.py b/pyatlan_v9/model/assets/bi_process.py index fbdb107b9..78a4430f5 100644 --- a/pyatlan_v9/model/assets/bi_process.py +++ b/pyatlan_v9/model/assets/bi_process.py @@ -46,11 +46,12 @@ from .fabric_related import RelatedFabricActivity from .fivetran_related import RelatedFivetranConnector from .flow_related import RelatedFlowControlOperation +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .matillion_related import RelatedMatillionComponent from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .power_bi_related import RelatedPowerBIDataflow -from .process_related import RelatedBIProcess, RelatedColumnProcess +from .process_related import RelatedColumnProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -75,6 +76,7 @@ class BIProcess(Asset): AST: ClassVar[Any] = None ADDITIONAL_ETL_CONTEXT: ClassVar[Any] = None AI_DATASET_TYPE: ClassVar[Any] = None + IS_PASS_THROUGH: ClassVar[Any] = None ADF_ACTIVITY: ClassVar[Any] = None AIRFLOW_TASKS: ClassVar[Any] = None ANOMALO_CHECKS: ClassVar[Any] = None @@ -90,6 +92,7 @@ class BIProcess(Asset): FABRIC_ACTIVITIES: ClassVar[Any] = None FIVETRAN_CONNECTOR: ClassVar[Any] = None FLOW_ORCHESTRATED_BY: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MATILLION_COMPONENT: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -109,6 +112,8 @@ class BIProcess(Asset): SODA_CHECKS: ClassVar[Any] = None SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "BIProcess" + code: Union[str, None, UnsetType] = UNSET """Code that ran within the process.""" @@ -127,6 +132,9 @@ class BIProcess(Asset): ai_dataset_type: Union[str, None, UnsetType] = UNSET """Dataset type for AI Model - dataset process.""" + is_pass_through: Union[bool, None, UnsetType] = UNSET + """Whether this process represents a pass-through data flow where data is moved without transformation, as opposed to a flow where data is actively modified.""" + adf_activity: Union[RelatedAdfActivity, None, UnsetType] = UNSET """ADF Activity that is associated with this lineage process.""" @@ -174,6 +182,11 @@ class BIProcess(Asset): flow_orchestrated_by: Union[RelatedFlowControlOperation, None, UnsetType] = UNSET """Orchestrated control operation that ran these data flows (process).""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -237,66 +250,6 @@ class BIProcess(Asset): def __post_init__(self) -> None: self.type_name = "BIProcess" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this BIProcess instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"BIProcess validation failed: {errors}") - - def minimize(self) -> "BIProcess": - """ - Return a minimal copy of this BIProcess with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new BIProcess with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new BIProcess instance with only the minimum required fields. - """ - self.validate() - return BIProcess(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedBIProcess": - """ - Create a :class:`RelatedBIProcess` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedBIProcess reference to this asset. - """ - if self.guid is not UNSET: - return RelatedBIProcess(guid=self.guid) - return RelatedBIProcess(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -370,6 +323,9 @@ class BIProcessAttributes(AssetAttributes): ai_dataset_type: Union[str, None, UnsetType] = UNSET """Dataset type for AI Model - dataset process.""" + is_pass_through: Union[bool, None, UnsetType] = UNSET + """Whether this process represents a pass-through data flow where data is moved without transformation, as opposed to a flow where data is actively modified.""" + class BIProcessRelationshipAttributes(AssetRelationshipAttributes): """BIProcess-specific relationship attributes for nested API format.""" @@ -421,6 +377,11 @@ class BIProcessRelationshipAttributes(AssetRelationshipAttributes): flow_orchestrated_by: Union[RelatedFlowControlOperation, None, UnsetType] = UNSET """Orchestrated control operation that ran these data flows (process).""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -516,6 +477,7 @@ class BIProcessNested(AssetNested): "fabric_activities", "fivetran_connector", "flow_orchestrated_by", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "matillion_component", "mc_monitors", @@ -548,6 +510,7 @@ def _populate_bi_process_attrs(attrs: BIProcessAttributes, obj: BIProcess) -> No attrs.ast = obj.ast attrs.additional_etl_context = obj.additional_etl_context attrs.ai_dataset_type = obj.ai_dataset_type + attrs.is_pass_through = obj.is_pass_through def _extract_bi_process_attrs(attrs: BIProcessAttributes) -> dict: @@ -561,6 +524,7 @@ def _extract_bi_process_attrs(attrs: BIProcessAttributes) -> dict: result["ast"] = attrs.ast result["additional_etl_context"] = attrs.additional_etl_context result["ai_dataset_type"] = attrs.ai_dataset_type + result["is_pass_through"] = attrs.is_pass_through return result @@ -597,9 +561,6 @@ def _bi_process_to_nested(bi_process: BIProcess) -> BIProcessNested: is_incomplete=bi_process.is_incomplete, provenance_type=bi_process.provenance_type, home_id=bi_process.home_id, - depth=bi_process.depth, - immediate_upstream=bi_process.immediate_upstream, - immediate_downstream=bi_process.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -631,6 +592,7 @@ def _bi_process_from_nested(nested: BIProcessNested) -> BIProcess: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -639,9 +601,6 @@ def _bi_process_from_nested(nested: BIProcessNested) -> BIProcess: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_bi_process_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -662,7 +621,11 @@ def _bi_process_from_nested_bytes(data: bytes, serde: Serde) -> BIProcess: # --------------------------------------------------------------------------- # Deferred field descriptor initialization # --------------------------------------------------------------------------- -from pyatlan.model.fields.atlan_fields import KeywordField, RelationField # noqa: E402 +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + BooleanField, + KeywordField, + RelationField, +) BIProcess.CODE = KeywordField("code", "code") BIProcess.SQL = KeywordField("sql", "sql") @@ -674,6 +637,7 @@ def _bi_process_from_nested_bytes(data: bytes, serde: Serde) -> BIProcess: "additionalEtlContext", "additionalEtlContext" ) BIProcess.AI_DATASET_TYPE = KeywordField("aiDatasetType", "aiDatasetType") +BIProcess.IS_PASS_THROUGH = BooleanField("isPassThrough", "isPassThrough") BIProcess.ADF_ACTIVITY = RelationField("adfActivity") BIProcess.AIRFLOW_TASKS = RelationField("airflowTasks") BIProcess.ANOMALO_CHECKS = RelationField("anomaloChecks") @@ -689,6 +653,9 @@ def _bi_process_from_nested_bytes(data: bytes, serde: Serde) -> BIProcess: BIProcess.FABRIC_ACTIVITIES = RelationField("fabricActivities") BIProcess.FIVETRAN_CONNECTOR = RelationField("fivetranConnector") BIProcess.FLOW_ORCHESTRATED_BY = RelationField("flowOrchestratedBy") +BIProcess.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) BIProcess.MEANINGS = RelationField("meanings") BIProcess.MATILLION_COMPONENT = RelationField("matillionComponent") BIProcess.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/bigquery_related.py b/pyatlan_v9/model/assets/bigquery_related.py index e77fa73a3..c140f1815 100644 --- a/pyatlan_v9/model/assets/bigquery_related.py +++ b/pyatlan_v9/model/assets/bigquery_related.py @@ -59,19 +59,19 @@ class RelatedBigqueryRoutine(RelatedProcedure): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "BigqueryRoutine" so it serializes correctly - bigquery_routine_type: Union[str, None, UnsetType] = UNSET + bigquery_type: Union[str, None, UnsetType] = UNSET """Type of bigquery routine (sp, udf, or tvf).""" - bigquery_routine_arguments: Union[List[str], None, UnsetType] = UNSET + bigquery_arguments: Union[List[str], None, UnsetType] = UNSET """Arguments that are passed in to the routine.""" - bigquery_routine_return_type: Union[str, None, UnsetType] = UNSET + bigquery_return_type: Union[str, None, UnsetType] = UNSET """Return data type of the bigquery routine (null for stored procedures).""" - bigquery_routine_security_type: Union[str, None, UnsetType] = UNSET + bigquery_security_type: Union[str, None, UnsetType] = UNSET """Security type of the routine, always null.""" - bigquery_routine_ddl: Union[str, None, UnsetType] = UNSET + bigquery_ddl: Union[str, None, UnsetType] = UNSET """The ddl statement used to create the bigquery routine.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/bigquery_routine.py b/pyatlan_v9/model/assets/bigquery_routine.py index d3ad896db..12decec9d 100644 --- a/pyatlan_v9/model/assets/bigquery_routine.py +++ b/pyatlan_v9/model/assets/bigquery_routine.py @@ -38,7 +38,6 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .bigquery_related import RelatedBigqueryRoutine from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric @@ -48,6 +47,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -76,11 +76,11 @@ class BigqueryRoutine(Asset): Instance of a bigquery routine in atlan. Can be a stored procedure, udf, or tvf. """ - BIGQUERY_ROUTINE_TYPE: ClassVar[Any] = None - BIGQUERY_ROUTINE_ARGUMENTS: ClassVar[Any] = None - BIGQUERY_ROUTINE_RETURN_TYPE: ClassVar[Any] = None - BIGQUERY_ROUTINE_SECURITY_TYPE: ClassVar[Any] = None - BIGQUERY_ROUTINE_DDL: ClassVar[Any] = None + BIGQUERY_TYPE: ClassVar[Any] = None + BIGQUERY_ARGUMENTS: ClassVar[Any] = None + BIGQUERY_RETURN_TYPE: ClassVar[Any] = None + BIGQUERY_SECURITY_TYPE: ClassVar[Any] = None + BIGQUERY_DDL: ClassVar[Any] = None DEFINITION: ClassVar[Any] = None SQL_LANGUAGE: ClassVar[Any] = None SQL_RUNTIME_VERSION: ClassVar[Any] = None @@ -138,6 +138,7 @@ class BigqueryRoutine(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -161,19 +162,21 @@ class BigqueryRoutine(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None - bigquery_routine_type: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "BigqueryRoutine" + + bigquery_type: Union[str, None, UnsetType] = UNSET """Type of bigquery routine (sp, udf, or tvf).""" - bigquery_routine_arguments: Union[List[str], None, UnsetType] = UNSET + bigquery_arguments: Union[List[str], None, UnsetType] = UNSET """Arguments that are passed in to the routine.""" - bigquery_routine_return_type: Union[str, None, UnsetType] = UNSET + bigquery_return_type: Union[str, None, UnsetType] = UNSET """Return data type of the bigquery routine (null for stored procedures).""" - bigquery_routine_security_type: Union[str, None, UnsetType] = UNSET + bigquery_security_type: Union[str, None, UnsetType] = UNSET """Security type of the routine, always null.""" - bigquery_routine_ddl: Union[str, None, UnsetType] = UNSET + bigquery_ddl: Union[str, None, UnsetType] = UNSET """The ddl statement used to create the bigquery routine.""" definition: Union[str, None, UnsetType] = UNSET @@ -355,6 +358,11 @@ class BigqueryRoutine(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -436,69 +444,6 @@ class BigqueryRoutine(Asset): def __post_init__(self) -> None: self.type_name = "BigqueryRoutine" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this BigqueryRoutine instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.definition is UNSET: - errors.append("definition is required for creation") - if errors: - raise ValueError(f"BigqueryRoutine validation failed: {errors}") - - def minimize(self) -> "BigqueryRoutine": - """ - Return a minimal copy of this BigqueryRoutine with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new BigqueryRoutine with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new BigqueryRoutine instance with only the minimum required fields. - """ - self.validate() - return BigqueryRoutine(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedBigqueryRoutine": - """ - Create a :class:`RelatedBigqueryRoutine` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedBigqueryRoutine reference to this asset. - """ - if self.guid is not UNSET: - return RelatedBigqueryRoutine(guid=self.guid) - return RelatedBigqueryRoutine(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -556,19 +501,19 @@ def from_json( class BigqueryRoutineAttributes(AssetAttributes): """BigqueryRoutine-specific attributes for nested API format.""" - bigquery_routine_type: Union[str, None, UnsetType] = UNSET + bigquery_type: Union[str, None, UnsetType] = UNSET """Type of bigquery routine (sp, udf, or tvf).""" - bigquery_routine_arguments: Union[List[str], None, UnsetType] = UNSET + bigquery_arguments: Union[List[str], None, UnsetType] = UNSET """Arguments that are passed in to the routine.""" - bigquery_routine_return_type: Union[str, None, UnsetType] = UNSET + bigquery_return_type: Union[str, None, UnsetType] = UNSET """Return data type of the bigquery routine (null for stored procedures).""" - bigquery_routine_security_type: Union[str, None, UnsetType] = UNSET + bigquery_security_type: Union[str, None, UnsetType] = UNSET """Security type of the routine, always null.""" - bigquery_routine_ddl: Union[str, None, UnsetType] = UNSET + bigquery_ddl: Union[str, None, UnsetType] = UNSET """The ddl statement used to create the bigquery routine.""" definition: Union[str, None, UnsetType] = UNSET @@ -754,6 +699,11 @@ class BigqueryRoutineRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -874,6 +824,7 @@ class BigqueryRoutineNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -904,11 +855,11 @@ def _populate_bigquery_routine_attrs( ) -> None: """Populate BigqueryRoutine-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.bigquery_routine_type = obj.bigquery_routine_type - attrs.bigquery_routine_arguments = obj.bigquery_routine_arguments - attrs.bigquery_routine_return_type = obj.bigquery_routine_return_type - attrs.bigquery_routine_security_type = obj.bigquery_routine_security_type - attrs.bigquery_routine_ddl = obj.bigquery_routine_ddl + attrs.bigquery_type = obj.bigquery_type + attrs.bigquery_arguments = obj.bigquery_arguments + attrs.bigquery_return_type = obj.bigquery_return_type + attrs.bigquery_security_type = obj.bigquery_security_type + attrs.bigquery_ddl = obj.bigquery_ddl attrs.definition = obj.definition attrs.sql_language = obj.sql_language attrs.sql_runtime_version = obj.sql_runtime_version @@ -955,11 +906,11 @@ def _populate_bigquery_routine_attrs( def _extract_bigquery_routine_attrs(attrs: BigqueryRoutineAttributes) -> dict: """Extract all BigqueryRoutine attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["bigquery_routine_type"] = attrs.bigquery_routine_type - result["bigquery_routine_arguments"] = attrs.bigquery_routine_arguments - result["bigquery_routine_return_type"] = attrs.bigquery_routine_return_type - result["bigquery_routine_security_type"] = attrs.bigquery_routine_security_type - result["bigquery_routine_ddl"] = attrs.bigquery_routine_ddl + result["bigquery_type"] = attrs.bigquery_type + result["bigquery_arguments"] = attrs.bigquery_arguments + result["bigquery_return_type"] = attrs.bigquery_return_type + result["bigquery_security_type"] = attrs.bigquery_security_type + result["bigquery_ddl"] = attrs.bigquery_ddl result["definition"] = attrs.definition result["sql_language"] = attrs.sql_language result["sql_runtime_version"] = attrs.sql_runtime_version @@ -1047,9 +998,6 @@ def _bigquery_routine_to_nested( is_incomplete=bigquery_routine.is_incomplete, provenance_type=bigquery_routine.provenance_type, home_id=bigquery_routine.home_id, - depth=bigquery_routine.depth, - immediate_upstream=bigquery_routine.immediate_upstream, - immediate_downstream=bigquery_routine.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1083,6 +1031,7 @@ def _bigquery_routine_from_nested(nested: BigqueryRoutineNested) -> BigqueryRout updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1091,9 +1040,6 @@ def _bigquery_routine_from_nested(nested: BigqueryRoutineNested) -> BigqueryRout is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_bigquery_routine_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1124,21 +1070,17 @@ def _bigquery_routine_from_nested_bytes(data: bytes, serde: Serde) -> BigqueryRo RelationField, ) -BigqueryRoutine.BIGQUERY_ROUTINE_TYPE = KeywordField( - "bigqueryRoutineType", "bigqueryRoutineType" +BigqueryRoutine.BIGQUERY_TYPE = KeywordField("bigqueryType", "bigqueryType") +BigqueryRoutine.BIGQUERY_ARGUMENTS = KeywordField( + "bigqueryArguments", "bigqueryArguments" ) -BigqueryRoutine.BIGQUERY_ROUTINE_ARGUMENTS = KeywordField( - "bigqueryRoutineArguments", "bigqueryRoutineArguments" +BigqueryRoutine.BIGQUERY_RETURN_TYPE = KeywordField( + "bigqueryReturnType", "bigqueryReturnType" ) -BigqueryRoutine.BIGQUERY_ROUTINE_RETURN_TYPE = KeywordField( - "bigqueryRoutineReturnType", "bigqueryRoutineReturnType" -) -BigqueryRoutine.BIGQUERY_ROUTINE_SECURITY_TYPE = KeywordField( - "bigqueryRoutineSecurityType", "bigqueryRoutineSecurityType" -) -BigqueryRoutine.BIGQUERY_ROUTINE_DDL = KeywordField( - "bigqueryRoutineDdl", "bigqueryRoutineDdl" +BigqueryRoutine.BIGQUERY_SECURITY_TYPE = KeywordField( + "bigquerySecurityType", "bigquerySecurityType" ) +BigqueryRoutine.BIGQUERY_DDL = KeywordField("bigqueryDdl", "bigqueryDdl") BigqueryRoutine.DEFINITION = KeywordField("definition", "definition") BigqueryRoutine.SQL_LANGUAGE = KeywordTextField( "sqlLanguage", "sqlLanguage", "sqlLanguage.text" @@ -1243,6 +1185,9 @@ def _bigquery_routine_from_nested_bytes(data: bytes, serde: Serde) -> BigqueryRo BigqueryRoutine.DBT_SOURCES = RelationField("dbtSources") BigqueryRoutine.SQL_DBT_SOURCES = RelationField("sqlDBTSources") BigqueryRoutine.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +BigqueryRoutine.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) BigqueryRoutine.MEANINGS = RelationField("meanings") BigqueryRoutine.MC_MONITORS = RelationField("mcMonitors") BigqueryRoutine.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/business_policy.py b/pyatlan_v9/model/assets/business_policy.py index 56e8d958e..672661436 100644 --- a/pyatlan_v9/model/assets/business_policy.py +++ b/pyatlan_v9/model/assets/business_policy.py @@ -44,6 +44,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable @@ -84,6 +85,7 @@ class BusinessPolicy(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +97,8 @@ class BusinessPolicy(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "BusinessPolicy" + business_policy_type: Union[str, None, UnsetType] = UNSET """Type of business policy""" @@ -171,6 +175,11 @@ class BusinessPolicy(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -208,66 +217,6 @@ class BusinessPolicy(Asset): def __post_init__(self) -> None: self.type_name = "BusinessPolicy" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this BusinessPolicy instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"BusinessPolicy validation failed: {errors}") - - def minimize(self) -> "BusinessPolicy": - """ - Return a minimal copy of this BusinessPolicy with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new BusinessPolicy with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new BusinessPolicy instance with only the minimum required fields. - """ - self.validate() - return BusinessPolicy(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedBusinessPolicy": - """ - Create a :class:`RelatedBusinessPolicy` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedBusinessPolicy reference to this asset. - """ - if self.guid is not UNSET: - return RelatedBusinessPolicy(guid=self.guid) - return RelatedBusinessPolicy(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -403,6 +352,11 @@ class BusinessPolicyRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -471,6 +425,7 @@ class BusinessPolicyNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -556,9 +511,6 @@ def _business_policy_to_nested(business_policy: BusinessPolicy) -> BusinessPolic is_incomplete=business_policy.is_incomplete, provenance_type=business_policy.provenance_type, home_id=business_policy.home_id, - depth=business_policy.depth, - immediate_upstream=business_policy.immediate_upstream, - immediate_downstream=business_policy.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -592,6 +544,7 @@ def _business_policy_from_nested(nested: BusinessPolicyNested) -> BusinessPolicy updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -600,9 +553,6 @@ def _business_policy_from_nested(nested: BusinessPolicyNested) -> BusinessPolicy is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_business_policy_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -677,6 +627,9 @@ def _business_policy_from_nested_bytes(data: bytes, serde: Serde) -> BusinessPol BusinessPolicy.METRICS = RelationField("metrics") BusinessPolicy.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") BusinessPolicy.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +BusinessPolicy.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) BusinessPolicy.MEANINGS = RelationField("meanings") BusinessPolicy.MC_MONITORS = RelationField("mcMonitors") BusinessPolicy.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/calculation_view.py b/pyatlan_v9/model/assets/calculation_view.py index f64217eeb..f2b354e6b 100644 --- a/pyatlan_v9/model/assets/calculation_view.py +++ b/pyatlan_v9/model/assets/calculation_view.py @@ -48,6 +48,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -63,7 +64,7 @@ RelatedSqlInsightBusinessQuestion, RelatedSqlInsightJoin, ) -from .sql_related import RelatedCalculationView, RelatedColumn, RelatedSchema +from .sql_related import RelatedColumn, RelatedSchema # ============================================================================= # FLAT ASSET CLASS @@ -77,10 +78,10 @@ class CalculationView(Asset): """ COLUMN_COUNT: ClassVar[Any] = None - CALCULATION_VIEW_VERSION_ID: ClassVar[Any] = None - CALCULATION_VIEW_ACTIVATED_BY: ClassVar[Any] = None - CALCULATION_VIEW_ACTIVATED_AT: ClassVar[Any] = None - CALCULATION_VIEW_PACKAGE_ID: ClassVar[Any] = None + SQL_VERSION_ID: ClassVar[Any] = None + SQL_ACTIVATED_BY: ClassVar[Any] = None + SQL_ACTIVATED_AT: ClassVar[Any] = None + SQL_PACKAGE_ID: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -126,6 +127,7 @@ class CalculationView(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -149,19 +151,21 @@ class CalculationView(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CalculationView" + column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this calculation view.""" - calculation_view_version_id: Union[int, None, UnsetType] = UNSET + sql_version_id: Union[int, None, UnsetType] = UNSET """The version ID of this calculation view.""" - calculation_view_activated_by: Union[str, None, UnsetType] = UNSET + sql_activated_by: Union[str, None, UnsetType] = UNSET """The owner who activated the calculation view""" - calculation_view_activated_at: Union[int, None, UnsetType] = UNSET + sql_activated_at: Union[int, None, UnsetType] = UNSET """Time at which this calculation view was activated at""" - calculation_view_package_id: Union[str, None, UnsetType] = UNSET + sql_package_id: Union[str, None, UnsetType] = UNSET """The full package id path to which a calculation view belongs/resides in the repository.""" query_count: Union[int, None, UnsetType] = UNSET @@ -307,6 +311,11 @@ class CalculationView(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -396,80 +405,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CalculationView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.atlan_schema is UNSET: - errors.append("atlan_schema is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"CalculationView validation failed: {errors}") - - def minimize(self) -> "CalculationView": - """ - Return a minimal copy of this CalculationView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CalculationView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CalculationView instance with only the minimum required fields. - """ - self.validate() - return CalculationView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCalculationView": - """ - Create a :class:`RelatedCalculationView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCalculationView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCalculationView(guid=self.guid) - return RelatedCalculationView(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -530,16 +465,16 @@ class CalculationViewAttributes(AssetAttributes): column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this calculation view.""" - calculation_view_version_id: Union[int, None, UnsetType] = UNSET + sql_version_id: Union[int, None, UnsetType] = UNSET """The version ID of this calculation view.""" - calculation_view_activated_by: Union[str, None, UnsetType] = UNSET + sql_activated_by: Union[str, None, UnsetType] = UNSET """The owner who activated the calculation view""" - calculation_view_activated_at: Union[int, None, UnsetType] = UNSET + sql_activated_at: Union[int, None, UnsetType] = UNSET """Time at which this calculation view was activated at""" - calculation_view_package_id: Union[str, None, UnsetType] = UNSET + sql_package_id: Union[str, None, UnsetType] = UNSET """The full package id path to which a calculation view belongs/resides in the repository.""" query_count: Union[int, None, UnsetType] = UNSET @@ -689,6 +624,11 @@ class CalculationViewRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -809,6 +749,7 @@ class CalculationViewNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -840,10 +781,10 @@ def _populate_calculation_view_attrs( """Populate CalculationView-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) attrs.column_count = obj.column_count - attrs.calculation_view_version_id = obj.calculation_view_version_id - attrs.calculation_view_activated_by = obj.calculation_view_activated_by - attrs.calculation_view_activated_at = obj.calculation_view_activated_at - attrs.calculation_view_package_id = obj.calculation_view_package_id + attrs.sql_version_id = obj.sql_version_id + attrs.sql_activated_by = obj.sql_activated_by + attrs.sql_activated_at = obj.sql_activated_at + attrs.sql_package_id = obj.sql_package_id attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -879,10 +820,10 @@ def _extract_calculation_view_attrs(attrs: CalculationViewAttributes) -> dict: """Extract all CalculationView attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) result["column_count"] = attrs.column_count - result["calculation_view_version_id"] = attrs.calculation_view_version_id - result["calculation_view_activated_by"] = attrs.calculation_view_activated_by - result["calculation_view_activated_at"] = attrs.calculation_view_activated_at - result["calculation_view_package_id"] = attrs.calculation_view_package_id + result["sql_version_id"] = attrs.sql_version_id + result["sql_activated_by"] = attrs.sql_activated_by + result["sql_activated_at"] = attrs.sql_activated_at + result["sql_package_id"] = attrs.sql_package_id result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -958,9 +899,6 @@ def _calculation_view_to_nested( is_incomplete=calculation_view.is_incomplete, provenance_type=calculation_view.provenance_type, home_id=calculation_view.home_id, - depth=calculation_view.depth, - immediate_upstream=calculation_view.immediate_upstream, - immediate_downstream=calculation_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -994,6 +932,7 @@ def _calculation_view_from_nested(nested: CalculationViewNested) -> CalculationV updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1002,9 +941,6 @@ def _calculation_view_from_nested(nested: CalculationViewNested) -> CalculationV is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_calculation_view_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1035,18 +971,10 @@ def _calculation_view_from_nested_bytes(data: bytes, serde: Serde) -> Calculatio ) CalculationView.COLUMN_COUNT = NumericField("columnCount", "columnCount") -CalculationView.CALCULATION_VIEW_VERSION_ID = NumericField( - "calculationViewVersionId", "calculationViewVersionId" -) -CalculationView.CALCULATION_VIEW_ACTIVATED_BY = KeywordField( - "calculationViewActivatedBy", "calculationViewActivatedBy" -) -CalculationView.CALCULATION_VIEW_ACTIVATED_AT = NumericField( - "calculationViewActivatedAt", "calculationViewActivatedAt" -) -CalculationView.CALCULATION_VIEW_PACKAGE_ID = KeywordField( - "calculationViewPackageId", "calculationViewPackageId" -) +CalculationView.SQL_VERSION_ID = NumericField("sqlVersionId", "sqlVersionId") +CalculationView.SQL_ACTIVATED_BY = KeywordField("sqlActivatedBy", "sqlActivatedBy") +CalculationView.SQL_ACTIVATED_AT = NumericField("sqlActivatedAt", "sqlActivatedAt") +CalculationView.SQL_PACKAGE_ID = KeywordField("sqlPackageId", "sqlPackageId") CalculationView.QUERY_COUNT = NumericField("queryCount", "queryCount") CalculationView.QUERY_USER_COUNT = NumericField("queryUserCount", "queryUserCount") CalculationView.QUERY_USER_MAP = KeywordField("queryUserMap", "queryUserMap") @@ -1127,6 +1055,9 @@ def _calculation_view_from_nested_bytes(data: bytes, serde: Serde) -> Calculatio CalculationView.DBT_SOURCES = RelationField("dbtSources") CalculationView.SQL_DBT_SOURCES = RelationField("sqlDBTSources") CalculationView.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +CalculationView.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CalculationView.MEANINGS = RelationField("meanings") CalculationView.MC_MONITORS = RelationField("mcMonitors") CalculationView.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cassandra.py b/pyatlan_v9/model/assets/cassandra.py index 769fd2109..70fc86c65 100644 --- a/pyatlan_v9/model/assets/cassandra.py +++ b/pyatlan_v9/model/assets/cassandra.py @@ -38,10 +38,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cassandra_related import RelatedCassandra from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +85,7 @@ class Cassandra(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +103,8 @@ class Cassandra(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Cassandra" + cassandra_keyspace_name: Union[str, None, UnsetType] = UNSET """Name of the keyspace for the Cassandra asset.""" @@ -171,6 +174,11 @@ class Cassandra(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -226,66 +234,6 @@ class Cassandra(Asset): def __post_init__(self) -> None: self.type_name = "Cassandra" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Cassandra instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Cassandra validation failed: {errors}") - - def minimize(self) -> "Cassandra": - """ - Return a minimal copy of this Cassandra with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Cassandra with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Cassandra instance with only the minimum required fields. - """ - self.validate() - return Cassandra(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCassandra": - """ - Create a :class:`RelatedCassandra` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCassandra reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCassandra(guid=self.guid) - return RelatedCassandra(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -414,6 +362,11 @@ class CassandraRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -500,6 +453,7 @@ class CassandraNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -577,9 +531,6 @@ def _cassandra_to_nested(cassandra: Cassandra) -> CassandraNested: is_incomplete=cassandra.is_incomplete, provenance_type=cassandra.provenance_type, home_id=cassandra.home_id, - depth=cassandra.depth, - immediate_upstream=cassandra.immediate_upstream, - immediate_downstream=cassandra.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -611,6 +562,7 @@ def _cassandra_from_nested(nested: CassandraNested) -> Cassandra: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -619,9 +571,6 @@ def _cassandra_from_nested(nested: CassandraNested) -> Cassandra: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cassandra_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -677,6 +626,9 @@ def _cassandra_from_nested_bytes(data: bytes, serde: Serde) -> Cassandra: Cassandra.METRICS = RelationField("metrics") Cassandra.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Cassandra.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Cassandra.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Cassandra.MEANINGS = RelationField("meanings") Cassandra.MC_MONITORS = RelationField("mcMonitors") Cassandra.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cassandra_column.py b/pyatlan_v9/model/assets/cassandra_column.py index 59a07e4a2..9fce9a90d 100644 --- a/pyatlan_v9/model/assets/cassandra_column.py +++ b/pyatlan_v9/model/assets/cassandra_column.py @@ -39,14 +39,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cassandra_related import ( - RelatedCassandraColumn, - RelatedCassandraTable, - RelatedCassandraView, -) +from .cassandra_related import RelatedCassandraTable, RelatedCassandraView from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -99,6 +96,7 @@ class CassandraColumn(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -116,6 +114,8 @@ class CassandraColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CassandraColumn" + cassandra_column_clustering_order: Union[str, None, UnsetType] = UNSET """Clustering order of the CassandraColumn.""" @@ -212,6 +212,11 @@ class CassandraColumn(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -275,78 +280,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CassandraColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cassandra_table is UNSET: - errors.append("cassandra_table is required for creation") - if self.cassandra_table_name is UNSET: - errors.append("cassandra_table_name is required for creation") - if self.cassandra_table_qualified_name is UNSET: - errors.append("cassandra_table_qualified_name is required for creation") - if self.cassandra_keyspace_name is UNSET: - errors.append("cassandra_keyspace_name is required for creation") - if errors: - raise ValueError(f"CassandraColumn validation failed: {errors}") - - def minimize(self) -> "CassandraColumn": - """ - Return a minimal copy of this CassandraColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CassandraColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CassandraColumn instance with only the minimum required fields. - """ - self.validate() - return CassandraColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCassandraColumn": - """ - Create a :class:`RelatedCassandraColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCassandraColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCassandraColumn(guid=self.guid) - return RelatedCassandraColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -504,6 +437,11 @@ class CassandraColumnRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -594,6 +532,7 @@ class CassandraColumnNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -697,9 +636,6 @@ def _cassandra_column_to_nested( is_incomplete=cassandra_column.is_incomplete, provenance_type=cassandra_column.provenance_type, home_id=cassandra_column.home_id, - depth=cassandra_column.depth, - immediate_upstream=cassandra_column.immediate_upstream, - immediate_downstream=cassandra_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -733,6 +669,7 @@ def _cassandra_column_from_nested(nested: CassandraColumnNested) -> CassandraCol updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -741,9 +678,6 @@ def _cassandra_column_from_nested(nested: CassandraColumnNested) -> CassandraCol is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cassandra_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -836,6 +770,9 @@ def _cassandra_column_from_nested_bytes(data: bytes, serde: Serde) -> CassandraC CassandraColumn.METRICS = RelationField("metrics") CassandraColumn.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CassandraColumn.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CassandraColumn.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CassandraColumn.MEANINGS = RelationField("meanings") CassandraColumn.MC_MONITORS = RelationField("mcMonitors") CassandraColumn.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cassandra_index.py b/pyatlan_v9/model/assets/cassandra_index.py index eabd966fd..8e01e241a 100644 --- a/pyatlan_v9/model/assets/cassandra_index.py +++ b/pyatlan_v9/model/assets/cassandra_index.py @@ -39,10 +39,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cassandra_related import RelatedCassandraIndex, RelatedCassandraTable +from .cassandra_related import RelatedCassandraTable from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -90,6 +91,7 @@ class CassandraIndex(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -107,6 +109,8 @@ class CassandraIndex(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CassandraIndex" + cassandra_index_kind: Union[str, None, UnsetType] = UNSET """Kind of index (e.g. COMPOSITES).""" @@ -188,6 +192,11 @@ class CassandraIndex(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -251,78 +260,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CassandraIndex instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cassandra_table is UNSET: - errors.append("cassandra_table is required for creation") - if self.cassandra_table_name is UNSET: - errors.append("cassandra_table_name is required for creation") - if self.cassandra_table_qualified_name is UNSET: - errors.append("cassandra_table_qualified_name is required for creation") - if self.cassandra_keyspace_name is UNSET: - errors.append("cassandra_keyspace_name is required for creation") - if errors: - raise ValueError(f"CassandraIndex validation failed: {errors}") - - def minimize(self) -> "CassandraIndex": - """ - Return a minimal copy of this CassandraIndex with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CassandraIndex with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CassandraIndex instance with only the minimum required fields. - """ - self.validate() - return CassandraIndex(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCassandraIndex": - """ - Create a :class:`RelatedCassandraIndex` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCassandraIndex reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCassandraIndex(guid=self.guid) - return RelatedCassandraIndex(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -463,6 +400,11 @@ class CassandraIndexRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -552,6 +494,7 @@ class CassandraIndexNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -639,9 +582,6 @@ def _cassandra_index_to_nested(cassandra_index: CassandraIndex) -> CassandraInde is_incomplete=cassandra_index.is_incomplete, provenance_type=cassandra_index.provenance_type, home_id=cassandra_index.home_id, - depth=cassandra_index.depth, - immediate_upstream=cassandra_index.immediate_upstream, - immediate_downstream=cassandra_index.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -675,6 +615,7 @@ def _cassandra_index_from_nested(nested: CassandraIndexNested) -> CassandraIndex updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -683,9 +624,6 @@ def _cassandra_index_from_nested(nested: CassandraIndexNested) -> CassandraIndex is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cassandra_index_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -759,6 +697,9 @@ def _cassandra_index_from_nested_bytes(data: bytes, serde: Serde) -> CassandraIn CassandraIndex.METRICS = RelationField("metrics") CassandraIndex.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CassandraIndex.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CassandraIndex.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CassandraIndex.MEANINGS = RelationField("meanings") CassandraIndex.MC_MONITORS = RelationField("mcMonitors") CassandraIndex.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cassandra_keyspace.py b/pyatlan_v9/model/assets/cassandra_keyspace.py index 67c4cee3a..421a63010 100644 --- a/pyatlan_v9/model/assets/cassandra_keyspace.py +++ b/pyatlan_v9/model/assets/cassandra_keyspace.py @@ -38,14 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cassandra_related import ( - RelatedCassandraKeyspace, - RelatedCassandraTable, - RelatedCassandraView, -) +from .cassandra_related import RelatedCassandraTable, RelatedCassandraView from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -95,6 +92,7 @@ class CassandraKeyspace(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -112,6 +110,8 @@ class CassandraKeyspace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CassandraKeyspace" + cassandra_keyspace_durable_writes: Union[bool, None, UnsetType] = UNSET """Indicates whether durable writes are enabled for the CassandraKeyspace.""" @@ -199,6 +199,11 @@ class CassandraKeyspace(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -254,66 +259,6 @@ class CassandraKeyspace(Asset): def __post_init__(self) -> None: self.type_name = "CassandraKeyspace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CassandraKeyspace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"CassandraKeyspace validation failed: {errors}") - - def minimize(self) -> "CassandraKeyspace": - """ - Return a minimal copy of this CassandraKeyspace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CassandraKeyspace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CassandraKeyspace instance with only the minimum required fields. - """ - self.validate() - return CassandraKeyspace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCassandraKeyspace": - """ - Create a :class:`RelatedCassandraKeyspace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCassandraKeyspace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCassandraKeyspace(guid=self.guid) - return RelatedCassandraKeyspace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -462,6 +407,11 @@ class CassandraKeyspaceRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -552,6 +502,7 @@ class CassandraKeyspaceNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -645,9 +596,6 @@ def _cassandra_keyspace_to_nested( is_incomplete=cassandra_keyspace.is_incomplete, provenance_type=cassandra_keyspace.provenance_type, home_id=cassandra_keyspace.home_id, - depth=cassandra_keyspace.depth, - immediate_upstream=cassandra_keyspace.immediate_upstream, - immediate_downstream=cassandra_keyspace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -683,6 +631,7 @@ def _cassandra_keyspace_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -691,9 +640,6 @@ def _cassandra_keyspace_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cassandra_keyspace_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -777,6 +723,9 @@ def _cassandra_keyspace_from_nested_bytes( CassandraKeyspace.METRICS = RelationField("metrics") CassandraKeyspace.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CassandraKeyspace.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CassandraKeyspace.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CassandraKeyspace.MEANINGS = RelationField("meanings") CassandraKeyspace.MC_MONITORS = RelationField("mcMonitors") CassandraKeyspace.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cassandra_table.py b/pyatlan_v9/model/assets/cassandra_table.py index a89122b5c..8520847b5 100644 --- a/pyatlan_v9/model/assets/cassandra_table.py +++ b/pyatlan_v9/model/assets/cassandra_table.py @@ -43,11 +43,11 @@ RelatedCassandraColumn, RelatedCassandraIndex, RelatedCassandraKeyspace, - RelatedCassandraTable, ) from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -112,6 +112,7 @@ class CassandraTable(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -129,6 +130,8 @@ class CassandraTable(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CassandraTable" + cassandra_table_bloom_filter_fp_chance: Union[float, None, UnsetType] = ( msgspec.field(default=UNSET, name="cassandraTableBloomFilterFPChance") ) @@ -271,6 +274,11 @@ class CassandraTable(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -332,74 +340,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CassandraTable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cassandra_keyspace is UNSET: - errors.append("cassandra_keyspace is required for creation") - if self.cassandra_keyspace_name is UNSET: - errors.append("cassandra_keyspace_name is required for creation") - if errors: - raise ValueError(f"CassandraTable validation failed: {errors}") - - def minimize(self) -> "CassandraTable": - """ - Return a minimal copy of this CassandraTable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CassandraTable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CassandraTable instance with only the minimum required fields. - """ - self.validate() - return CassandraTable(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCassandraTable": - """ - Create a :class:`RelatedCassandraTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCassandraTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCassandraTable(guid=self.guid) - return RelatedCassandraTable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -601,6 +541,11 @@ class CassandraTableRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -692,6 +637,7 @@ class CassandraTableNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -829,9 +775,6 @@ def _cassandra_table_to_nested(cassandra_table: CassandraTable) -> CassandraTabl is_incomplete=cassandra_table.is_incomplete, provenance_type=cassandra_table.provenance_type, home_id=cassandra_table.home_id, - depth=cassandra_table.depth, - immediate_upstream=cassandra_table.immediate_upstream, - immediate_downstream=cassandra_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -865,6 +808,7 @@ def _cassandra_table_from_nested(nested: CassandraTableNested) -> CassandraTable updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -873,9 +817,6 @@ def _cassandra_table_from_nested(nested: CassandraTableNested) -> CassandraTable is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cassandra_table_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -999,6 +940,9 @@ def _cassandra_table_from_nested_bytes(data: bytes, serde: Serde) -> CassandraTa CassandraTable.METRICS = RelationField("metrics") CassandraTable.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CassandraTable.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CassandraTable.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CassandraTable.MEANINGS = RelationField("meanings") CassandraTable.MC_MONITORS = RelationField("mcMonitors") CassandraTable.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cassandra_view.py b/pyatlan_v9/model/assets/cassandra_view.py index 04ad312ee..cec0c200f 100644 --- a/pyatlan_v9/model/assets/cassandra_view.py +++ b/pyatlan_v9/model/assets/cassandra_view.py @@ -39,14 +39,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cassandra_related import ( - RelatedCassandraColumn, - RelatedCassandraKeyspace, - RelatedCassandraView, -) +from .cassandra_related import RelatedCassandraColumn, RelatedCassandraKeyspace from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -109,6 +106,7 @@ class CassandraView(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -126,6 +124,8 @@ class CassandraView(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CassandraView" + cassandra_view_table_id: Union[str, None, UnsetType] = UNSET """ID of the base table in the CassandraView.""" @@ -264,6 +264,11 @@ class CassandraView(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -325,74 +330,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CassandraView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cassandra_keyspace is UNSET: - errors.append("cassandra_keyspace is required for creation") - if self.cassandra_keyspace_name is UNSET: - errors.append("cassandra_keyspace_name is required for creation") - if errors: - raise ValueError(f"CassandraView validation failed: {errors}") - - def minimize(self) -> "CassandraView": - """ - Return a minimal copy of this CassandraView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CassandraView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CassandraView instance with only the minimum required fields. - """ - self.validate() - return CassandraView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCassandraView": - """ - Create a :class:`RelatedCassandraView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCassandraView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCassandraView(guid=self.guid) - return RelatedCassandraView(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -590,6 +527,11 @@ class CassandraViewRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -680,6 +622,7 @@ class CassandraViewNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -813,9 +756,6 @@ def _cassandra_view_to_nested(cassandra_view: CassandraView) -> CassandraViewNes is_incomplete=cassandra_view.is_incomplete, provenance_type=cassandra_view.provenance_type, home_id=cassandra_view.home_id, - depth=cassandra_view.depth, - immediate_upstream=cassandra_view.immediate_upstream, - immediate_downstream=cassandra_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -849,6 +789,7 @@ def _cassandra_view_from_nested(nested: CassandraViewNested) -> CassandraView: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -857,9 +798,6 @@ def _cassandra_view_from_nested(nested: CassandraViewNested) -> CassandraView: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cassandra_view_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -979,6 +917,9 @@ def _cassandra_view_from_nested_bytes(data: bytes, serde: Serde) -> CassandraVie CassandraView.METRICS = RelationField("metrics") CassandraView.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CassandraView.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CassandraView.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CassandraView.MEANINGS = RelationField("meanings") CassandraView.MC_MONITORS = RelationField("mcMonitors") CassandraView.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/catalog.py b/pyatlan_v9/model/assets/catalog.py index 091c82a33..3a8c96cb2 100644 --- a/pyatlan_v9/model/assets/catalog.py +++ b/pyatlan_v9/model/assets/catalog.py @@ -37,10 +37,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .catalog_related import RelatedCatalog from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -78,6 +78,7 @@ class Catalog(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +96,8 @@ class Catalog(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Catalog" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -144,6 +147,11 @@ class Catalog(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -199,66 +207,6 @@ class Catalog(Asset): def __post_init__(self) -> None: self.type_name = "Catalog" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Catalog instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Catalog validation failed: {errors}") - - def minimize(self) -> "Catalog": - """ - Return a minimal copy of this Catalog with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Catalog with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Catalog instance with only the minimum required fields. - """ - self.validate() - return Catalog(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCatalog": - """ - Create a :class:`RelatedCatalog` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCatalog reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCatalog(guid=self.guid) - return RelatedCatalog(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -367,6 +315,11 @@ class CatalogRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -453,6 +406,7 @@ class CatalogNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -518,9 +472,6 @@ def _catalog_to_nested(catalog: Catalog) -> CatalogNested: is_incomplete=catalog.is_incomplete, provenance_type=catalog.provenance_type, home_id=catalog.home_id, - depth=catalog.depth, - immediate_upstream=catalog.immediate_upstream, - immediate_downstream=catalog.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -550,6 +501,7 @@ def _catalog_from_nested(nested: CatalogNested) -> Catalog: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -558,9 +510,6 @@ def _catalog_from_nested(nested: CatalogNested) -> Catalog: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_catalog_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -598,6 +547,9 @@ def _catalog_from_nested_bytes(data: bytes, serde: Serde) -> Catalog: Catalog.METRICS = RelationField("metrics") Catalog.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Catalog.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Catalog.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Catalog.MEANINGS = RelationField("meanings") Catalog.MC_MONITORS = RelationField("mcMonitors") Catalog.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cloud.py b/pyatlan_v9/model/assets/cloud.py index 359471074..4b1da7448 100644 --- a/pyatlan_v9/model/assets/cloud.py +++ b/pyatlan_v9/model/assets/cloud.py @@ -36,10 +36,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cloud_related import RelatedCloud from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable @@ -69,6 +69,7 @@ class Cloud(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -80,6 +81,8 @@ class Cloud(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Cloud" + cloud_uniform_resource_name: Union[str, None, UnsetType] = UNSET """Uniform resource name (URN) for the asset: AWS ARN, Google Cloud URI, Azure resource ID, Oracle OCID, and so on.""" @@ -115,6 +118,11 @@ class Cloud(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -152,66 +160,6 @@ class Cloud(Asset): def __post_init__(self) -> None: self.type_name = "Cloud" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Cloud instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Cloud validation failed: {errors}") - - def minimize(self) -> "Cloud": - """ - Return a minimal copy of this Cloud with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Cloud with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Cloud instance with only the minimum required fields. - """ - self.validate() - return Cloud(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCloud": - """ - Create a :class:`RelatedCloud` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCloud reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCloud(guid=self.guid) - return RelatedCloud(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -306,6 +254,11 @@ class CloudRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -370,6 +323,7 @@ class CloudNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -429,9 +383,6 @@ def _cloud_to_nested(cloud: Cloud) -> CloudNested: is_incomplete=cloud.is_incomplete, provenance_type=cloud.provenance_type, home_id=cloud.home_id, - depth=cloud.depth, - immediate_upstream=cloud.immediate_upstream, - immediate_downstream=cloud.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -461,6 +412,7 @@ def _cloud_from_nested(nested: CloudNested) -> Cloud: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -469,9 +421,6 @@ def _cloud_from_nested(nested: CloudNested) -> Cloud: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cloud_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -507,6 +456,9 @@ def _cloud_from_nested_bytes(data: bytes, serde: Serde) -> Cloud: Cloud.METRICS = RelationField("metrics") Cloud.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Cloud.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Cloud.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Cloud.MEANINGS = RelationField("meanings") Cloud.MC_MONITORS = RelationField("mcMonitors") Cloud.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cloud_related.py b/pyatlan_v9/model/assets/cloud_related.py index 957e6c0e1..ab23076ed 100644 --- a/pyatlan_v9/model/assets/cloud_related.py +++ b/pyatlan_v9/model/assets/cloud_related.py @@ -132,7 +132,7 @@ class RelatedGoogle(RelatedCloud): google_project_id: Union[str, None, UnsetType] = UNSET """ID of the project in which the asset exists.""" - google_project_number: Union[int, None, UnsetType] = UNSET + cloud_project_number: Union[int, None, UnsetType] = UNSET """Number of the project in which the asset exists.""" google_location: Union[str, None, UnsetType] = UNSET diff --git a/pyatlan_v9/model/assets/cognite.py b/pyatlan_v9/model/assets/cognite.py index 9e1aa6a85..f6cc4f1b1 100644 --- a/pyatlan_v9/model/assets/cognite.py +++ b/pyatlan_v9/model/assets/cognite.py @@ -37,10 +37,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognite_related import RelatedCognite from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -78,6 +78,7 @@ class Cognite(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +96,8 @@ class Cognite(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Cognite" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -144,6 +147,11 @@ class Cognite(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -199,66 +207,6 @@ class Cognite(Asset): def __post_init__(self) -> None: self.type_name = "Cognite" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Cognite instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Cognite validation failed: {errors}") - - def minimize(self) -> "Cognite": - """ - Return a minimal copy of this Cognite with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Cognite with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Cognite instance with only the minimum required fields. - """ - self.validate() - return Cognite(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognite": - """ - Create a :class:`RelatedCognite` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognite reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognite(guid=self.guid) - return RelatedCognite(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -367,6 +315,11 @@ class CogniteRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -453,6 +406,7 @@ class CogniteNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -518,9 +472,6 @@ def _cognite_to_nested(cognite: Cognite) -> CogniteNested: is_incomplete=cognite.is_incomplete, provenance_type=cognite.provenance_type, home_id=cognite.home_id, - depth=cognite.depth, - immediate_upstream=cognite.immediate_upstream, - immediate_downstream=cognite.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -550,6 +501,7 @@ def _cognite_from_nested(nested: CogniteNested) -> Cognite: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -558,9 +510,6 @@ def _cognite_from_nested(nested: CogniteNested) -> Cognite: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognite_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -598,6 +547,9 @@ def _cognite_from_nested_bytes(data: bytes, serde: Serde) -> Cognite: Cognite.METRICS = RelationField("metrics") Cognite.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Cognite.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Cognite.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Cognite.MEANINGS = RelationField("meanings") Cognite.MC_MONITORS = RelationField("mcMonitors") Cognite.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognite3d_model.py b/pyatlan_v9/model/assets/cognite3d_model.py index da64e4e43..6a7cdf341 100644 --- a/pyatlan_v9/model/assets/cognite3d_model.py +++ b/pyatlan_v9/model/assets/cognite3d_model.py @@ -38,10 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognite_related import RelatedCognite3DModel, RelatedCogniteAsset +from .cognite_related import RelatedCogniteAsset from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -80,6 +81,7 @@ class Cognite3DModel(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +99,8 @@ class Cognite3DModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Cognite3DModel" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -149,6 +153,11 @@ class Cognite3DModel(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -210,72 +219,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Cognite3DModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognite_asset is UNSET: - errors.append("cognite_asset is required for creation") - if errors: - raise ValueError(f"Cognite3DModel validation failed: {errors}") - - def minimize(self) -> "Cognite3DModel": - """ - Return a minimal copy of this Cognite3DModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Cognite3DModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Cognite3DModel instance with only the minimum required fields. - """ - self.validate() - return Cognite3DModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognite3DModel": - """ - Create a :class:`RelatedCognite3DModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognite3DModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognite3DModel(guid=self.guid) - return RelatedCognite3DModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -387,6 +330,11 @@ class Cognite3DModelRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -476,6 +424,7 @@ class Cognite3DModelNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -545,9 +494,6 @@ def _cognite3d_model_to_nested(cognite3d_model: Cognite3DModel) -> Cognite3DMode is_incomplete=cognite3d_model.is_incomplete, provenance_type=cognite3d_model.provenance_type, home_id=cognite3d_model.home_id, - depth=cognite3d_model.depth, - immediate_upstream=cognite3d_model.immediate_upstream, - immediate_downstream=cognite3d_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -581,6 +527,7 @@ def _cognite3d_model_from_nested(nested: Cognite3DModelNested) -> Cognite3DModel updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -589,9 +536,6 @@ def _cognite3d_model_from_nested(nested: Cognite3DModelNested) -> Cognite3DModel is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognite3d_model_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -638,6 +582,9 @@ def _cognite3d_model_from_nested_bytes(data: bytes, serde: Serde) -> Cognite3DMo Cognite3DModel.METRICS = RelationField("metrics") Cognite3DModel.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Cognite3DModel.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Cognite3DModel.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Cognite3DModel.MEANINGS = RelationField("meanings") Cognite3DModel.MC_MONITORS = RelationField("mcMonitors") Cognite3DModel.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognite_asset.py b/pyatlan_v9/model/assets/cognite_asset.py index 447eeb0de..ea6fe9503 100644 --- a/pyatlan_v9/model/assets/cognite_asset.py +++ b/pyatlan_v9/model/assets/cognite_asset.py @@ -39,7 +39,6 @@ ) from .cognite_related import ( RelatedCognite3DModel, - RelatedCogniteAsset, RelatedCogniteEvent, RelatedCogniteFile, RelatedCogniteSequence, @@ -48,6 +47,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -90,6 +90,7 @@ class CogniteAsset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -107,6 +108,8 @@ class CogniteAsset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CogniteAsset" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -171,6 +174,11 @@ class CogniteAsset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -226,66 +234,6 @@ class CogniteAsset(Asset): def __post_init__(self) -> None: self.type_name = "CogniteAsset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CogniteAsset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"CogniteAsset validation failed: {errors}") - - def minimize(self) -> "CogniteAsset": - """ - Return a minimal copy of this CogniteAsset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CogniteAsset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CogniteAsset instance with only the minimum required fields. - """ - self.validate() - return CogniteAsset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCogniteAsset": - """ - Create a :class:`RelatedCogniteAsset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCogniteAsset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCogniteAsset(guid=self.guid) - return RelatedCogniteAsset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -409,6 +357,11 @@ class CogniteAssetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -502,6 +455,7 @@ class CogniteAssetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -569,9 +523,6 @@ def _cognite_asset_to_nested(cognite_asset: CogniteAsset) -> CogniteAssetNested: is_incomplete=cognite_asset.is_incomplete, provenance_type=cognite_asset.provenance_type, home_id=cognite_asset.home_id, - depth=cognite_asset.depth, - immediate_upstream=cognite_asset.immediate_upstream, - immediate_downstream=cognite_asset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -605,6 +556,7 @@ def _cognite_asset_from_nested(nested: CogniteAssetNested) -> CogniteAsset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -613,9 +565,6 @@ def _cognite_asset_from_nested(nested: CogniteAssetNested) -> CogniteAsset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognite_asset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -662,6 +611,9 @@ def _cognite_asset_from_nested_bytes(data: bytes, serde: Serde) -> CogniteAsset: CogniteAsset.METRICS = RelationField("metrics") CogniteAsset.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CogniteAsset.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CogniteAsset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CogniteAsset.MEANINGS = RelationField("meanings") CogniteAsset.MC_MONITORS = RelationField("mcMonitors") CogniteAsset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognite_event.py b/pyatlan_v9/model/assets/cognite_event.py index ec57f4a49..23432940f 100644 --- a/pyatlan_v9/model/assets/cognite_event.py +++ b/pyatlan_v9/model/assets/cognite_event.py @@ -38,10 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognite_related import RelatedCogniteAsset, RelatedCogniteEvent +from .cognite_related import RelatedCogniteAsset from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -80,6 +81,7 @@ class CogniteEvent(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +99,8 @@ class CogniteEvent(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CogniteEvent" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -149,6 +153,11 @@ class CogniteEvent(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -210,72 +219,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CogniteEvent instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognite_asset is UNSET: - errors.append("cognite_asset is required for creation") - if errors: - raise ValueError(f"CogniteEvent validation failed: {errors}") - - def minimize(self) -> "CogniteEvent": - """ - Return a minimal copy of this CogniteEvent with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CogniteEvent with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CogniteEvent instance with only the minimum required fields. - """ - self.validate() - return CogniteEvent(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCogniteEvent": - """ - Create a :class:`RelatedCogniteEvent` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCogniteEvent reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCogniteEvent(guid=self.guid) - return RelatedCogniteEvent(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -387,6 +330,11 @@ class CogniteEventRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -476,6 +424,7 @@ class CogniteEventNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -543,9 +492,6 @@ def _cognite_event_to_nested(cognite_event: CogniteEvent) -> CogniteEventNested: is_incomplete=cognite_event.is_incomplete, provenance_type=cognite_event.provenance_type, home_id=cognite_event.home_id, - depth=cognite_event.depth, - immediate_upstream=cognite_event.immediate_upstream, - immediate_downstream=cognite_event.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -579,6 +525,7 @@ def _cognite_event_from_nested(nested: CogniteEventNested) -> CogniteEvent: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -587,9 +534,6 @@ def _cognite_event_from_nested(nested: CogniteEventNested) -> CogniteEvent: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognite_event_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -632,6 +576,9 @@ def _cognite_event_from_nested_bytes(data: bytes, serde: Serde) -> CogniteEvent: CogniteEvent.METRICS = RelationField("metrics") CogniteEvent.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CogniteEvent.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CogniteEvent.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CogniteEvent.MEANINGS = RelationField("meanings") CogniteEvent.MC_MONITORS = RelationField("mcMonitors") CogniteEvent.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognite_file.py b/pyatlan_v9/model/assets/cognite_file.py index 5eaa21101..6803f7c32 100644 --- a/pyatlan_v9/model/assets/cognite_file.py +++ b/pyatlan_v9/model/assets/cognite_file.py @@ -38,10 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognite_related import RelatedCogniteAsset, RelatedCogniteFile +from .cognite_related import RelatedCogniteAsset from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -80,6 +81,7 @@ class CogniteFile(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +99,8 @@ class CogniteFile(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CogniteFile" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -149,6 +153,11 @@ class CogniteFile(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -210,72 +219,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CogniteFile instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognite_asset is UNSET: - errors.append("cognite_asset is required for creation") - if errors: - raise ValueError(f"CogniteFile validation failed: {errors}") - - def minimize(self) -> "CogniteFile": - """ - Return a minimal copy of this CogniteFile with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CogniteFile with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CogniteFile instance with only the minimum required fields. - """ - self.validate() - return CogniteFile(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCogniteFile": - """ - Create a :class:`RelatedCogniteFile` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCogniteFile reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCogniteFile(guid=self.guid) - return RelatedCogniteFile(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -387,6 +330,11 @@ class CogniteFileRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -474,6 +422,7 @@ class CogniteFileNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -541,9 +490,6 @@ def _cognite_file_to_nested(cognite_file: CogniteFile) -> CogniteFileNested: is_incomplete=cognite_file.is_incomplete, provenance_type=cognite_file.provenance_type, home_id=cognite_file.home_id, - depth=cognite_file.depth, - immediate_upstream=cognite_file.immediate_upstream, - immediate_downstream=cognite_file.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -575,6 +521,7 @@ def _cognite_file_from_nested(nested: CogniteFileNested) -> CogniteFile: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -583,9 +530,6 @@ def _cognite_file_from_nested(nested: CogniteFileNested) -> CogniteFile: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognite_file_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -628,6 +572,9 @@ def _cognite_file_from_nested_bytes(data: bytes, serde: Serde) -> CogniteFile: CogniteFile.METRICS = RelationField("metrics") CogniteFile.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CogniteFile.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CogniteFile.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CogniteFile.MEANINGS = RelationField("meanings") CogniteFile.MC_MONITORS = RelationField("mcMonitors") CogniteFile.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognite_sequence.py b/pyatlan_v9/model/assets/cognite_sequence.py index b120ae5e7..16603661e 100644 --- a/pyatlan_v9/model/assets/cognite_sequence.py +++ b/pyatlan_v9/model/assets/cognite_sequence.py @@ -38,10 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognite_related import RelatedCogniteAsset, RelatedCogniteSequence +from .cognite_related import RelatedCogniteAsset from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -80,6 +81,7 @@ class CogniteSequence(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +99,8 @@ class CogniteSequence(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CogniteSequence" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -149,6 +153,11 @@ class CogniteSequence(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -210,72 +219,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CogniteSequence instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognite_asset is UNSET: - errors.append("cognite_asset is required for creation") - if errors: - raise ValueError(f"CogniteSequence validation failed: {errors}") - - def minimize(self) -> "CogniteSequence": - """ - Return a minimal copy of this CogniteSequence with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CogniteSequence with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CogniteSequence instance with only the minimum required fields. - """ - self.validate() - return CogniteSequence(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCogniteSequence": - """ - Create a :class:`RelatedCogniteSequence` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCogniteSequence reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCogniteSequence(guid=self.guid) - return RelatedCogniteSequence(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -389,6 +332,11 @@ class CogniteSequenceRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -478,6 +426,7 @@ class CogniteSequenceNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -549,9 +498,6 @@ def _cognite_sequence_to_nested( is_incomplete=cognite_sequence.is_incomplete, provenance_type=cognite_sequence.provenance_type, home_id=cognite_sequence.home_id, - depth=cognite_sequence.depth, - immediate_upstream=cognite_sequence.immediate_upstream, - immediate_downstream=cognite_sequence.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -585,6 +531,7 @@ def _cognite_sequence_from_nested(nested: CogniteSequenceNested) -> CogniteSeque updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -593,9 +540,6 @@ def _cognite_sequence_from_nested(nested: CogniteSequenceNested) -> CogniteSeque is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognite_sequence_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -642,6 +586,9 @@ def _cognite_sequence_from_nested_bytes(data: bytes, serde: Serde) -> CogniteSeq CogniteSequence.METRICS = RelationField("metrics") CogniteSequence.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CogniteSequence.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CogniteSequence.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CogniteSequence.MEANINGS = RelationField("meanings") CogniteSequence.MC_MONITORS = RelationField("mcMonitors") CogniteSequence.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognite_time_series.py b/pyatlan_v9/model/assets/cognite_time_series.py index 7c01c4f7f..543bcb6f7 100644 --- a/pyatlan_v9/model/assets/cognite_time_series.py +++ b/pyatlan_v9/model/assets/cognite_time_series.py @@ -38,10 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognite_related import RelatedCogniteAsset, RelatedCogniteTimeSeries +from .cognite_related import RelatedCogniteAsset from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -80,6 +81,7 @@ class CogniteTimeSeries(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +99,8 @@ class CogniteTimeSeries(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CogniteTimeSeries" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -149,6 +153,11 @@ class CogniteTimeSeries(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -210,72 +219,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CogniteTimeSeries instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognite_asset is UNSET: - errors.append("cognite_asset is required for creation") - if errors: - raise ValueError(f"CogniteTimeSeries validation failed: {errors}") - - def minimize(self) -> "CogniteTimeSeries": - """ - Return a minimal copy of this CogniteTimeSeries with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CogniteTimeSeries with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CogniteTimeSeries instance with only the minimum required fields. - """ - self.validate() - return CogniteTimeSeries(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCogniteTimeSeries": - """ - Create a :class:`RelatedCogniteTimeSeries` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCogniteTimeSeries reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCogniteTimeSeries(guid=self.guid) - return RelatedCogniteTimeSeries(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -389,6 +332,11 @@ class CogniteTimeSeriesRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -478,6 +426,7 @@ class CogniteTimeSeriesNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -549,9 +498,6 @@ def _cognite_time_series_to_nested( is_incomplete=cognite_time_series.is_incomplete, provenance_type=cognite_time_series.provenance_type, home_id=cognite_time_series.home_id, - depth=cognite_time_series.depth, - immediate_upstream=cognite_time_series.immediate_upstream, - immediate_downstream=cognite_time_series.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -587,6 +533,7 @@ def _cognite_time_series_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -595,9 +542,6 @@ def _cognite_time_series_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognite_time_series_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -646,6 +590,9 @@ def _cognite_time_series_from_nested_bytes( CogniteTimeSeries.METRICS = RelationField("metrics") CogniteTimeSeries.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CogniteTimeSeries.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CogniteTimeSeries.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CogniteTimeSeries.MEANINGS = RelationField("meanings") CogniteTimeSeries.MC_MONITORS = RelationField("mcMonitors") CogniteTimeSeries.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognos.py b/pyatlan_v9/model/assets/cognos.py index 83a786252..09b282e26 100644 --- a/pyatlan_v9/model/assets/cognos.py +++ b/pyatlan_v9/model/assets/cognos.py @@ -37,10 +37,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import RelatedCognos from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -87,6 +87,7 @@ class Cognos(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -104,6 +105,8 @@ class Cognos(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Cognos" + cognos_id: Union[str, None, UnsetType] = UNSET """ID of the asset in Cognos.""" @@ -180,6 +183,11 @@ class Cognos(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -235,66 +243,6 @@ class Cognos(Asset): def __post_init__(self) -> None: self.type_name = "Cognos" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Cognos instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Cognos validation failed: {errors}") - - def minimize(self) -> "Cognos": - """ - Return a minimal copy of this Cognos with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Cognos with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Cognos instance with only the minimum required fields. - """ - self.validate() - return Cognos(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognos": - """ - Create a :class:`RelatedCognos` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognos reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognos(guid=self.guid) - return RelatedCognos(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -430,6 +378,11 @@ class CognosRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -516,6 +469,7 @@ class CognosNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -599,9 +553,6 @@ def _cognos_to_nested(cognos: Cognos) -> CognosNested: is_incomplete=cognos.is_incomplete, provenance_type=cognos.provenance_type, home_id=cognos.home_id, - depth=cognos.depth, - immediate_upstream=cognos.immediate_upstream, - immediate_downstream=cognos.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -631,6 +582,7 @@ def _cognos_from_nested(nested: CognosNested) -> Cognos: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -639,9 +591,6 @@ def _cognos_from_nested(nested: CognosNested) -> Cognos: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -699,6 +648,9 @@ def _cognos_from_nested_bytes(data: bytes, serde: Serde) -> Cognos: Cognos.METRICS = RelationField("metrics") Cognos.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Cognos.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Cognos.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Cognos.MEANINGS = RelationField("meanings") Cognos.MC_MONITORS = RelationField("mcMonitors") Cognos.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognos_column.py b/pyatlan_v9/model/assets/cognos_column.py index 416edcc36..a8d31d394 100644 --- a/pyatlan_v9/model/assets/cognos_column.py +++ b/pyatlan_v9/model/assets/cognos_column.py @@ -39,7 +39,6 @@ _populate_asset_attrs, ) from .cognos_related import ( - RelatedCognosColumn, RelatedCognosDashboard, RelatedCognosDataset, RelatedCognosExploration, @@ -50,6 +49,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -72,9 +72,9 @@ class CognosColumn(Asset): Instance of a Cognos column in Atlan. """ - COGNOS_COLUMN_DATATYPE: ClassVar[Any] = None - COGNOS_COLUMN_NULLABLE: ClassVar[Any] = None - COGNOS_COLUMN_REGULAR_AGGREGATE: ClassVar[Any] = None + COGNOS_DATATYPE: ClassVar[Any] = None + COGNOS_NULLABLE: ClassVar[Any] = None + COGNOS_REGULAR_AGGREGATE: ClassVar[Any] = None COGNOS_ID: ClassVar[Any] = None COGNOS_PATH: ClassVar[Any] = None COGNOS_PARENT_NAME: ClassVar[Any] = None @@ -105,6 +105,7 @@ class CognosColumn(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -122,13 +123,15 @@ class CognosColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - cognos_column_datatype: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "CognosColumn" + + cognos_datatype: Union[str, None, UnsetType] = UNSET """Data type of the CognosColumn.""" - cognos_column_nullable: Union[str, None, UnsetType] = UNSET + cognos_nullable: Union[str, None, UnsetType] = UNSET """Whether the CognosColumn is nullable.""" - cognos_column_regular_aggregate: Union[str, None, UnsetType] = UNSET + cognos_regular_aggregate: Union[str, None, UnsetType] = UNSET """How data should be summarized when aggregated across different dimensions or groupings.""" cognos_id: Union[str, None, UnsetType] = UNSET @@ -225,6 +228,11 @@ class CognosColumn(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -288,72 +296,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognos_dataset is UNSET: - errors.append("cognos_dataset is required for creation") - if errors: - raise ValueError(f"CognosColumn validation failed: {errors}") - - def minimize(self) -> "CognosColumn": - """ - Return a minimal copy of this CognosColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosColumn instance with only the minimum required fields. - """ - self.validate() - return CognosColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosColumn": - """ - Create a :class:`RelatedCognosColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosColumn(guid=self.guid) - return RelatedCognosColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -409,13 +351,13 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> CognosColum class CognosColumnAttributes(AssetAttributes): """CognosColumn-specific attributes for nested API format.""" - cognos_column_datatype: Union[str, None, UnsetType] = UNSET + cognos_datatype: Union[str, None, UnsetType] = UNSET """Data type of the CognosColumn.""" - cognos_column_nullable: Union[str, None, UnsetType] = UNSET + cognos_nullable: Union[str, None, UnsetType] = UNSET """Whether the CognosColumn is nullable.""" - cognos_column_regular_aggregate: Union[str, None, UnsetType] = UNSET + cognos_regular_aggregate: Union[str, None, UnsetType] = UNSET """How data should be summarized when aggregated across different dimensions or groupings.""" cognos_id: Union[str, None, UnsetType] = UNSET @@ -516,6 +458,11 @@ class CognosColumnRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -610,6 +557,7 @@ class CognosColumnNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -634,9 +582,9 @@ def _populate_cognos_column_attrs( ) -> None: """Populate CognosColumn-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.cognos_column_datatype = obj.cognos_column_datatype - attrs.cognos_column_nullable = obj.cognos_column_nullable - attrs.cognos_column_regular_aggregate = obj.cognos_column_regular_aggregate + attrs.cognos_datatype = obj.cognos_datatype + attrs.cognos_nullable = obj.cognos_nullable + attrs.cognos_regular_aggregate = obj.cognos_regular_aggregate attrs.cognos_id = obj.cognos_id attrs.cognos_path = obj.cognos_path attrs.cognos_parent_name = obj.cognos_parent_name @@ -652,9 +600,9 @@ def _populate_cognos_column_attrs( def _extract_cognos_column_attrs(attrs: CognosColumnAttributes) -> dict: """Extract all CognosColumn attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["cognos_column_datatype"] = attrs.cognos_column_datatype - result["cognos_column_nullable"] = attrs.cognos_column_nullable - result["cognos_column_regular_aggregate"] = attrs.cognos_column_regular_aggregate + result["cognos_datatype"] = attrs.cognos_datatype + result["cognos_nullable"] = attrs.cognos_nullable + result["cognos_regular_aggregate"] = attrs.cognos_regular_aggregate result["cognos_id"] = attrs.cognos_id result["cognos_path"] = attrs.cognos_path result["cognos_parent_name"] = attrs.cognos_parent_name @@ -701,9 +649,6 @@ def _cognos_column_to_nested(cognos_column: CognosColumn) -> CognosColumnNested: is_incomplete=cognos_column.is_incomplete, provenance_type=cognos_column.provenance_type, home_id=cognos_column.home_id, - depth=cognos_column.depth, - immediate_upstream=cognos_column.immediate_upstream, - immediate_downstream=cognos_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -737,6 +682,7 @@ def _cognos_column_from_nested(nested: CognosColumnNested) -> CognosColumn: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -745,9 +691,6 @@ def _cognos_column_from_nested(nested: CognosColumnNested) -> CognosColumn: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -775,14 +718,10 @@ def _cognos_column_from_nested_bytes(data: bytes, serde: Serde) -> CognosColumn: RelationField, ) -CognosColumn.COGNOS_COLUMN_DATATYPE = KeywordField( - "cognosColumnDatatype", "cognosColumnDatatype" -) -CognosColumn.COGNOS_COLUMN_NULLABLE = KeywordField( - "cognosColumnNullable", "cognosColumnNullable" -) -CognosColumn.COGNOS_COLUMN_REGULAR_AGGREGATE = KeywordField( - "cognosColumnRegularAggregate", "cognosColumnRegularAggregate" +CognosColumn.COGNOS_DATATYPE = KeywordField("cognosDatatype", "cognosDatatype") +CognosColumn.COGNOS_NULLABLE = KeywordField("cognosNullable", "cognosNullable") +CognosColumn.COGNOS_REGULAR_AGGREGATE = KeywordField( + "cognosRegularAggregate", "cognosRegularAggregate" ) CognosColumn.COGNOS_ID = KeywordField("cognosId", "cognosId") CognosColumn.COGNOS_PATH = KeywordField("cognosPath", "cognosPath") @@ -824,6 +763,9 @@ def _cognos_column_from_nested_bytes(data: bytes, serde: Serde) -> CognosColumn: CognosColumn.METRICS = RelationField("metrics") CognosColumn.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CognosColumn.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CognosColumn.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CognosColumn.MEANINGS = RelationField("meanings") CognosColumn.MC_MONITORS = RelationField("mcMonitors") CognosColumn.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognos_dashboard.py b/pyatlan_v9/model/assets/cognos_dashboard.py index 1a9a61959..424cc7bd0 100644 --- a/pyatlan_v9/model/assets/cognos_dashboard.py +++ b/pyatlan_v9/model/assets/cognos_dashboard.py @@ -38,14 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import ( - RelatedCognosColumn, - RelatedCognosDashboard, - RelatedCognosFolder, -) +from .cognos_related import RelatedCognosColumn, RelatedCognosFolder from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -94,6 +91,7 @@ class CognosDashboard(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -111,6 +109,8 @@ class CognosDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CognosDashboard" + cognos_id: Union[str, None, UnsetType] = UNSET """ID of the asset in Cognos.""" @@ -193,6 +193,11 @@ class CognosDashboard(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -256,72 +261,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognos_folder is UNSET: - errors.append("cognos_folder is required for creation") - if errors: - raise ValueError(f"CognosDashboard validation failed: {errors}") - - def minimize(self) -> "CognosDashboard": - """ - Return a minimal copy of this CognosDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosDashboard instance with only the minimum required fields. - """ - self.validate() - return CognosDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosDashboard": - """ - Create a :class:`RelatedCognosDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosDashboard(guid=self.guid) - return RelatedCognosDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -465,6 +404,11 @@ class CognosDashboardRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -555,6 +499,7 @@ class CognosDashboardNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -644,9 +589,6 @@ def _cognos_dashboard_to_nested( is_incomplete=cognos_dashboard.is_incomplete, provenance_type=cognos_dashboard.provenance_type, home_id=cognos_dashboard.home_id, - depth=cognos_dashboard.depth, - immediate_upstream=cognos_dashboard.immediate_upstream, - immediate_downstream=cognos_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -680,6 +622,7 @@ def _cognos_dashboard_from_nested(nested: CognosDashboardNested) -> CognosDashbo updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -688,9 +631,6 @@ def _cognos_dashboard_from_nested(nested: CognosDashboardNested) -> CognosDashbo is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -760,6 +700,9 @@ def _cognos_dashboard_from_nested_bytes(data: bytes, serde: Serde) -> CognosDash CognosDashboard.METRICS = RelationField("metrics") CognosDashboard.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CognosDashboard.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CognosDashboard.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CognosDashboard.MEANINGS = RelationField("meanings") CognosDashboard.MC_MONITORS = RelationField("mcMonitors") CognosDashboard.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognos_dataset.py b/pyatlan_v9/model/assets/cognos_dataset.py index 5c5d0f15f..1e1666f00 100644 --- a/pyatlan_v9/model/assets/cognos_dataset.py +++ b/pyatlan_v9/model/assets/cognos_dataset.py @@ -38,14 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import ( - RelatedCognosColumn, - RelatedCognosDataset, - RelatedCognosFolder, -) +from .cognos_related import RelatedCognosColumn, RelatedCognosFolder from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -94,6 +91,7 @@ class CognosDataset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -111,6 +109,8 @@ class CognosDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CognosDataset" + cognos_id: Union[str, None, UnsetType] = UNSET """ID of the asset in Cognos.""" @@ -193,6 +193,11 @@ class CognosDataset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -256,72 +261,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognos_folder is UNSET: - errors.append("cognos_folder is required for creation") - if errors: - raise ValueError(f"CognosDataset validation failed: {errors}") - - def minimize(self) -> "CognosDataset": - """ - Return a minimal copy of this CognosDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosDataset instance with only the minimum required fields. - """ - self.validate() - return CognosDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosDataset": - """ - Create a :class:`RelatedCognosDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosDataset(guid=self.guid) - return RelatedCognosDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -463,6 +402,11 @@ class CognosDatasetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -553,6 +497,7 @@ class CognosDatasetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -638,9 +583,6 @@ def _cognos_dataset_to_nested(cognos_dataset: CognosDataset) -> CognosDatasetNes is_incomplete=cognos_dataset.is_incomplete, provenance_type=cognos_dataset.provenance_type, home_id=cognos_dataset.home_id, - depth=cognos_dataset.depth, - immediate_upstream=cognos_dataset.immediate_upstream, - immediate_downstream=cognos_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -674,6 +616,7 @@ def _cognos_dataset_from_nested(nested: CognosDatasetNested) -> CognosDataset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -682,9 +625,6 @@ def _cognos_dataset_from_nested(nested: CognosDatasetNested) -> CognosDataset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -750,6 +690,9 @@ def _cognos_dataset_from_nested_bytes(data: bytes, serde: Serde) -> CognosDatase CognosDataset.METRICS = RelationField("metrics") CognosDataset.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CognosDataset.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CognosDataset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CognosDataset.MEANINGS = RelationField("meanings") CognosDataset.MC_MONITORS = RelationField("mcMonitors") CognosDataset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognos_datasource.py b/pyatlan_v9/model/assets/cognos_datasource.py index c515f9f0f..97889ac8b 100644 --- a/pyatlan_v9/model/assets/cognos_datasource.py +++ b/pyatlan_v9/model/assets/cognos_datasource.py @@ -37,10 +37,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import RelatedCognosDatasource from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -63,7 +63,7 @@ class CognosDatasource(Asset): Instance of a Cognos datasource in Atlan. """ - COGNOS_DATASOURCE_CONNECTION_STRING: ClassVar[Any] = None + COGNOS_CONNECTION_STRING: ClassVar[Any] = None COGNOS_ID: ClassVar[Any] = None COGNOS_PATH: ClassVar[Any] = None COGNOS_PARENT_NAME: ClassVar[Any] = None @@ -88,6 +88,7 @@ class CognosDatasource(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,7 +106,9 @@ class CognosDatasource(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - cognos_datasource_connection_string: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "CognosDatasource" + + cognos_connection_string: Union[str, None, UnsetType] = UNSET """Connection string of a Cognos datasource.""" cognos_id: Union[str, None, UnsetType] = UNSET @@ -184,6 +187,11 @@ class CognosDatasource(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -239,66 +247,6 @@ class CognosDatasource(Asset): def __post_init__(self) -> None: self.type_name = "CognosDatasource" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosDatasource instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"CognosDatasource validation failed: {errors}") - - def minimize(self) -> "CognosDatasource": - """ - Return a minimal copy of this CognosDatasource with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosDatasource with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosDatasource instance with only the minimum required fields. - """ - self.validate() - return CognosDatasource(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosDatasource": - """ - Create a :class:`RelatedCognosDatasource` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosDatasource reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosDatasource(guid=self.guid) - return RelatedCognosDatasource(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -356,7 +304,7 @@ def from_json( class CognosDatasourceAttributes(AssetAttributes): """CognosDatasource-specific attributes for nested API format.""" - cognos_datasource_connection_string: Union[str, None, UnsetType] = UNSET + cognos_connection_string: Union[str, None, UnsetType] = UNSET """Connection string of a Cognos datasource.""" cognos_id: Union[str, None, UnsetType] = UNSET @@ -439,6 +387,11 @@ class CognosDatasourceRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -527,6 +480,7 @@ class CognosDatasourceNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -551,7 +505,7 @@ def _populate_cognos_datasource_attrs( ) -> None: """Populate CognosDatasource-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.cognos_datasource_connection_string = obj.cognos_datasource_connection_string + attrs.cognos_connection_string = obj.cognos_connection_string attrs.cognos_id = obj.cognos_id attrs.cognos_path = obj.cognos_path attrs.cognos_parent_name = obj.cognos_parent_name @@ -567,9 +521,7 @@ def _populate_cognos_datasource_attrs( def _extract_cognos_datasource_attrs(attrs: CognosDatasourceAttributes) -> dict: """Extract all CognosDatasource attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["cognos_datasource_connection_string"] = ( - attrs.cognos_datasource_connection_string - ) + result["cognos_connection_string"] = attrs.cognos_connection_string result["cognos_id"] = attrs.cognos_id result["cognos_path"] = attrs.cognos_path result["cognos_parent_name"] = attrs.cognos_parent_name @@ -620,9 +572,6 @@ def _cognos_datasource_to_nested( is_incomplete=cognos_datasource.is_incomplete, provenance_type=cognos_datasource.provenance_type, home_id=cognos_datasource.home_id, - depth=cognos_datasource.depth, - immediate_upstream=cognos_datasource.immediate_upstream, - immediate_downstream=cognos_datasource.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -656,6 +605,7 @@ def _cognos_datasource_from_nested(nested: CognosDatasourceNested) -> CognosData updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -664,9 +614,6 @@ def _cognos_datasource_from_nested(nested: CognosDatasourceNested) -> CognosData is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_datasource_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -696,8 +643,8 @@ def _cognos_datasource_from_nested_bytes(data: bytes, serde: Serde) -> CognosDat RelationField, ) -CognosDatasource.COGNOS_DATASOURCE_CONNECTION_STRING = KeywordField( - "cognosDatasourceConnectionString", "cognosDatasourceConnectionString" +CognosDatasource.COGNOS_CONNECTION_STRING = KeywordField( + "cognosConnectionString", "cognosConnectionString" ) CognosDatasource.COGNOS_ID = KeywordField("cognosId", "cognosId") CognosDatasource.COGNOS_PATH = KeywordField("cognosPath", "cognosPath") @@ -737,6 +684,9 @@ def _cognos_datasource_from_nested_bytes(data: bytes, serde: Serde) -> CognosDat CognosDatasource.METRICS = RelationField("metrics") CognosDatasource.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CognosDatasource.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CognosDatasource.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CognosDatasource.MEANINGS = RelationField("meanings") CognosDatasource.MC_MONITORS = RelationField("mcMonitors") CognosDatasource.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognos_exploration.py b/pyatlan_v9/model/assets/cognos_exploration.py index 10b9dc4f3..60a04c5e8 100644 --- a/pyatlan_v9/model/assets/cognos_exploration.py +++ b/pyatlan_v9/model/assets/cognos_exploration.py @@ -38,14 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import ( - RelatedCognosColumn, - RelatedCognosExploration, - RelatedCognosFolder, -) +from .cognos_related import RelatedCognosColumn, RelatedCognosFolder from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -94,6 +91,7 @@ class CognosExploration(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -111,6 +109,8 @@ class CognosExploration(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CognosExploration" + cognos_id: Union[str, None, UnsetType] = UNSET """ID of the asset in Cognos.""" @@ -193,6 +193,11 @@ class CognosExploration(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -256,72 +261,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosExploration instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognos_folder is UNSET: - errors.append("cognos_folder is required for creation") - if errors: - raise ValueError(f"CognosExploration validation failed: {errors}") - - def minimize(self) -> "CognosExploration": - """ - Return a minimal copy of this CognosExploration with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosExploration with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosExploration instance with only the minimum required fields. - """ - self.validate() - return CognosExploration(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosExploration": - """ - Create a :class:`RelatedCognosExploration` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosExploration reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosExploration(guid=self.guid) - return RelatedCognosExploration(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -465,6 +404,11 @@ class CognosExplorationRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -555,6 +499,7 @@ class CognosExplorationNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -644,9 +589,6 @@ def _cognos_exploration_to_nested( is_incomplete=cognos_exploration.is_incomplete, provenance_type=cognos_exploration.provenance_type, home_id=cognos_exploration.home_id, - depth=cognos_exploration.depth, - immediate_upstream=cognos_exploration.immediate_upstream, - immediate_downstream=cognos_exploration.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -682,6 +624,7 @@ def _cognos_exploration_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -690,9 +633,6 @@ def _cognos_exploration_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_exploration_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -764,6 +704,9 @@ def _cognos_exploration_from_nested_bytes( CognosExploration.METRICS = RelationField("metrics") CognosExploration.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CognosExploration.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CognosExploration.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CognosExploration.MEANINGS = RelationField("meanings") CognosExploration.MC_MONITORS = RelationField("mcMonitors") CognosExploration.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognos_file.py b/pyatlan_v9/model/assets/cognos_file.py index 81ab3c4d6..2d3f5fa95 100644 --- a/pyatlan_v9/model/assets/cognos_file.py +++ b/pyatlan_v9/model/assets/cognos_file.py @@ -38,10 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import RelatedCognosColumn, RelatedCognosFile, RelatedCognosFolder +from .cognos_related import RelatedCognosColumn, RelatedCognosFolder from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -90,6 +91,7 @@ class CognosFile(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -107,6 +109,8 @@ class CognosFile(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CognosFile" + cognos_id: Union[str, None, UnsetType] = UNSET """ID of the asset in Cognos.""" @@ -189,6 +193,11 @@ class CognosFile(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -252,72 +261,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosFile instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognos_folder is UNSET: - errors.append("cognos_folder is required for creation") - if errors: - raise ValueError(f"CognosFile validation failed: {errors}") - - def minimize(self) -> "CognosFile": - """ - Return a minimal copy of this CognosFile with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosFile with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosFile instance with only the minimum required fields. - """ - self.validate() - return CognosFile(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosFile": - """ - Create a :class:`RelatedCognosFile` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosFile reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosFile(guid=self.guid) - return RelatedCognosFile(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -459,6 +402,11 @@ class CognosFileRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -547,6 +495,7 @@ class CognosFileNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -630,9 +579,6 @@ def _cognos_file_to_nested(cognos_file: CognosFile) -> CognosFileNested: is_incomplete=cognos_file.is_incomplete, provenance_type=cognos_file.provenance_type, home_id=cognos_file.home_id, - depth=cognos_file.depth, - immediate_upstream=cognos_file.immediate_upstream, - immediate_downstream=cognos_file.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -664,6 +610,7 @@ def _cognos_file_from_nested(nested: CognosFileNested) -> CognosFile: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -672,9 +619,6 @@ def _cognos_file_from_nested(nested: CognosFileNested) -> CognosFile: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_file_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -736,6 +680,9 @@ def _cognos_file_from_nested_bytes(data: bytes, serde: Serde) -> CognosFile: CognosFile.METRICS = RelationField("metrics") CognosFile.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CognosFile.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CognosFile.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CognosFile.MEANINGS = RelationField("meanings") CognosFile.MC_MONITORS = RelationField("mcMonitors") CognosFile.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognos_folder.py b/pyatlan_v9/model/assets/cognos_folder.py index c628f6f09..55e4b8452 100644 --- a/pyatlan_v9/model/assets/cognos_folder.py +++ b/pyatlan_v9/model/assets/cognos_folder.py @@ -51,6 +51,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -73,8 +74,8 @@ class CognosFolder(Asset): Instance of a Cognos folder in Atlan. """ - COGNOS_FOLDER_SUB_FOLDER_COUNT: ClassVar[Any] = None - COGNOS_FOLDER_CHILD_OBJECTS_COUNT: ClassVar[Any] = None + COGNOS_SUB_FOLDER_COUNT: ClassVar[Any] = None + COGNOS_CHILD_OBJECTS_COUNT: ClassVar[Any] = None COGNOS_ID: ClassVar[Any] = None COGNOS_PATH: ClassVar[Any] = None COGNOS_PARENT_NAME: ClassVar[Any] = None @@ -108,6 +109,7 @@ class CognosFolder(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -125,10 +127,12 @@ class CognosFolder(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - cognos_folder_sub_folder_count: Union[int, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "CognosFolder" + + cognos_sub_folder_count: Union[int, None, UnsetType] = UNSET """Number of sub-folders in the folder.""" - cognos_folder_child_objects_count: Union[int, None, UnsetType] = UNSET + cognos_child_objects_count: Union[int, None, UnsetType] = UNSET """Number of children in the folder (excluding subfolders).""" cognos_id: Union[str, None, UnsetType] = UNSET @@ -234,6 +238,11 @@ class CognosFolder(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -295,70 +304,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosFolder instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"CognosFolder validation failed: {errors}") - - def minimize(self) -> "CognosFolder": - """ - Return a minimal copy of this CognosFolder with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosFolder with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosFolder instance with only the minimum required fields. - """ - self.validate() - return CognosFolder(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosFolder": - """ - Create a :class:`RelatedCognosFolder` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosFolder reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosFolder(guid=self.guid) - return RelatedCognosFolder(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -414,10 +359,10 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> CognosFolde class CognosFolderAttributes(AssetAttributes): """CognosFolder-specific attributes for nested API format.""" - cognos_folder_sub_folder_count: Union[int, None, UnsetType] = UNSET + cognos_sub_folder_count: Union[int, None, UnsetType] = UNSET """Number of sub-folders in the folder.""" - cognos_folder_child_objects_count: Union[int, None, UnsetType] = UNSET + cognos_child_objects_count: Union[int, None, UnsetType] = UNSET """Number of children in the folder (excluding subfolders).""" cognos_id: Union[str, None, UnsetType] = UNSET @@ -527,6 +472,11 @@ class CognosFolderRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -624,6 +574,7 @@ class CognosFolderNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -648,8 +599,8 @@ def _populate_cognos_folder_attrs( ) -> None: """Populate CognosFolder-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.cognos_folder_sub_folder_count = obj.cognos_folder_sub_folder_count - attrs.cognos_folder_child_objects_count = obj.cognos_folder_child_objects_count + attrs.cognos_sub_folder_count = obj.cognos_sub_folder_count + attrs.cognos_child_objects_count = obj.cognos_child_objects_count attrs.cognos_id = obj.cognos_id attrs.cognos_path = obj.cognos_path attrs.cognos_parent_name = obj.cognos_parent_name @@ -665,10 +616,8 @@ def _populate_cognos_folder_attrs( def _extract_cognos_folder_attrs(attrs: CognosFolderAttributes) -> dict: """Extract all CognosFolder attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["cognos_folder_sub_folder_count"] = attrs.cognos_folder_sub_folder_count - result["cognos_folder_child_objects_count"] = ( - attrs.cognos_folder_child_objects_count - ) + result["cognos_sub_folder_count"] = attrs.cognos_sub_folder_count + result["cognos_child_objects_count"] = attrs.cognos_child_objects_count result["cognos_id"] = attrs.cognos_id result["cognos_path"] = attrs.cognos_path result["cognos_parent_name"] = attrs.cognos_parent_name @@ -715,9 +664,6 @@ def _cognos_folder_to_nested(cognos_folder: CognosFolder) -> CognosFolderNested: is_incomplete=cognos_folder.is_incomplete, provenance_type=cognos_folder.provenance_type, home_id=cognos_folder.home_id, - depth=cognos_folder.depth, - immediate_upstream=cognos_folder.immediate_upstream, - immediate_downstream=cognos_folder.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -751,6 +697,7 @@ def _cognos_folder_from_nested(nested: CognosFolderNested) -> CognosFolder: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -759,9 +706,6 @@ def _cognos_folder_from_nested(nested: CognosFolderNested) -> CognosFolder: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_folder_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -790,11 +734,11 @@ def _cognos_folder_from_nested_bytes(data: bytes, serde: Serde) -> CognosFolder: RelationField, ) -CognosFolder.COGNOS_FOLDER_SUB_FOLDER_COUNT = NumericField( - "cognosFolderSubFolderCount", "cognosFolderSubFolderCount" +CognosFolder.COGNOS_SUB_FOLDER_COUNT = NumericField( + "cognosSubFolderCount", "cognosSubFolderCount" ) -CognosFolder.COGNOS_FOLDER_CHILD_OBJECTS_COUNT = NumericField( - "cognosFolderChildObjectsCount", "cognosFolderChildObjectsCount" +CognosFolder.COGNOS_CHILD_OBJECTS_COUNT = NumericField( + "cognosChildObjectsCount", "cognosChildObjectsCount" ) CognosFolder.COGNOS_ID = KeywordField("cognosId", "cognosId") CognosFolder.COGNOS_PATH = KeywordField("cognosPath", "cognosPath") @@ -839,6 +783,9 @@ def _cognos_folder_from_nested_bytes(data: bytes, serde: Serde) -> CognosFolder: CognosFolder.METRICS = RelationField("metrics") CognosFolder.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CognosFolder.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CognosFolder.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CognosFolder.MEANINGS = RelationField("meanings") CognosFolder.MC_MONITORS = RelationField("mcMonitors") CognosFolder.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognos_module.py b/pyatlan_v9/model/assets/cognos_module.py index 8eb4b249e..5f1514e07 100644 --- a/pyatlan_v9/model/assets/cognos_module.py +++ b/pyatlan_v9/model/assets/cognos_module.py @@ -38,14 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import ( - RelatedCognosColumn, - RelatedCognosFolder, - RelatedCognosModule, -) +from .cognos_related import RelatedCognosColumn, RelatedCognosFolder from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -94,6 +91,7 @@ class CognosModule(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -111,6 +109,8 @@ class CognosModule(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CognosModule" + cognos_id: Union[str, None, UnsetType] = UNSET """ID of the asset in Cognos.""" @@ -193,6 +193,11 @@ class CognosModule(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -256,72 +261,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosModule instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognos_folder is UNSET: - errors.append("cognos_folder is required for creation") - if errors: - raise ValueError(f"CognosModule validation failed: {errors}") - - def minimize(self) -> "CognosModule": - """ - Return a minimal copy of this CognosModule with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosModule with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosModule instance with only the minimum required fields. - """ - self.validate() - return CognosModule(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosModule": - """ - Create a :class:`RelatedCognosModule` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosModule reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosModule(guid=self.guid) - return RelatedCognosModule(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -463,6 +402,11 @@ class CognosModuleRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -553,6 +497,7 @@ class CognosModuleNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -638,9 +583,6 @@ def _cognos_module_to_nested(cognos_module: CognosModule) -> CognosModuleNested: is_incomplete=cognos_module.is_incomplete, provenance_type=cognos_module.provenance_type, home_id=cognos_module.home_id, - depth=cognos_module.depth, - immediate_upstream=cognos_module.immediate_upstream, - immediate_downstream=cognos_module.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -674,6 +616,7 @@ def _cognos_module_from_nested(nested: CognosModuleNested) -> CognosModule: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -682,9 +625,6 @@ def _cognos_module_from_nested(nested: CognosModuleNested) -> CognosModule: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_module_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -748,6 +688,9 @@ def _cognos_module_from_nested_bytes(data: bytes, serde: Serde) -> CognosModule: CognosModule.METRICS = RelationField("metrics") CognosModule.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CognosModule.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CognosModule.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CognosModule.MEANINGS = RelationField("meanings") CognosModule.MC_MONITORS = RelationField("mcMonitors") CognosModule.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognos_package.py b/pyatlan_v9/model/assets/cognos_package.py index 66afbdb5e..538d372e4 100644 --- a/pyatlan_v9/model/assets/cognos_package.py +++ b/pyatlan_v9/model/assets/cognos_package.py @@ -38,14 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import ( - RelatedCognosColumn, - RelatedCognosFolder, - RelatedCognosPackage, -) +from .cognos_related import RelatedCognosColumn, RelatedCognosFolder from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -94,6 +91,7 @@ class CognosPackage(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -111,6 +109,8 @@ class CognosPackage(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CognosPackage" + cognos_id: Union[str, None, UnsetType] = UNSET """ID of the asset in Cognos.""" @@ -193,6 +193,11 @@ class CognosPackage(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -256,72 +261,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosPackage instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognos_folder is UNSET: - errors.append("cognos_folder is required for creation") - if errors: - raise ValueError(f"CognosPackage validation failed: {errors}") - - def minimize(self) -> "CognosPackage": - """ - Return a minimal copy of this CognosPackage with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosPackage with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosPackage instance with only the minimum required fields. - """ - self.validate() - return CognosPackage(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosPackage": - """ - Create a :class:`RelatedCognosPackage` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosPackage reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosPackage(guid=self.guid) - return RelatedCognosPackage(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -463,6 +402,11 @@ class CognosPackageRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -553,6 +497,7 @@ class CognosPackageNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -638,9 +583,6 @@ def _cognos_package_to_nested(cognos_package: CognosPackage) -> CognosPackageNes is_incomplete=cognos_package.is_incomplete, provenance_type=cognos_package.provenance_type, home_id=cognos_package.home_id, - depth=cognos_package.depth, - immediate_upstream=cognos_package.immediate_upstream, - immediate_downstream=cognos_package.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -674,6 +616,7 @@ def _cognos_package_from_nested(nested: CognosPackageNested) -> CognosPackage: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -682,9 +625,6 @@ def _cognos_package_from_nested(nested: CognosPackageNested) -> CognosPackage: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_package_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -750,6 +690,9 @@ def _cognos_package_from_nested_bytes(data: bytes, serde: Serde) -> CognosPackag CognosPackage.METRICS = RelationField("metrics") CognosPackage.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CognosPackage.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CognosPackage.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CognosPackage.MEANINGS = RelationField("meanings") CognosPackage.MC_MONITORS = RelationField("mcMonitors") CognosPackage.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cognos_related.py b/pyatlan_v9/model/assets/cognos_related.py index d3e3d5d2d..1f3936cea 100644 --- a/pyatlan_v9/model/assets/cognos_related.py +++ b/pyatlan_v9/model/assets/cognos_related.py @@ -100,7 +100,7 @@ class RelatedCognosDatasource(RelatedCognos): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "CognosDatasource" so it serializes correctly - cognos_datasource_connection_string: Union[str, None, UnsetType] = UNSET + cognos_connection_string: Union[str, None, UnsetType] = UNSET """Connection string of a Cognos datasource.""" def __post_init__(self) -> None: @@ -148,10 +148,10 @@ class RelatedCognosFolder(RelatedCognos): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "CognosFolder" so it serializes correctly - cognos_folder_sub_folder_count: Union[int, None, UnsetType] = UNSET + cognos_sub_folder_count: Union[int, None, UnsetType] = UNSET """Number of sub-folders in the folder.""" - cognos_folder_child_objects_count: Union[int, None, UnsetType] = UNSET + cognos_child_objects_count: Union[int, None, UnsetType] = UNSET """Number of children in the folder (excluding subfolders).""" def __post_init__(self) -> None: @@ -214,13 +214,13 @@ class RelatedCognosColumn(RelatedCognos): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "CognosColumn" so it serializes correctly - cognos_column_datatype: Union[str, None, UnsetType] = UNSET + cognos_datatype: Union[str, None, UnsetType] = UNSET """Data type of the CognosColumn.""" - cognos_column_nullable: Union[str, None, UnsetType] = UNSET + cognos_nullable: Union[str, None, UnsetType] = UNSET """Whether the CognosColumn is nullable.""" - cognos_column_regular_aggregate: Union[str, None, UnsetType] = UNSET + cognos_regular_aggregate: Union[str, None, UnsetType] = UNSET """How data should be summarized when aggregated across different dimensions or groupings.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/cognos_report.py b/pyatlan_v9/model/assets/cognos_report.py index 4dd8a545b..9f23715e1 100644 --- a/pyatlan_v9/model/assets/cognos_report.py +++ b/pyatlan_v9/model/assets/cognos_report.py @@ -38,10 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cognos_related import RelatedCognosFolder, RelatedCognosReport +from .cognos_related import RelatedCognosFolder from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -89,6 +90,7 @@ class CognosReport(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -106,6 +108,8 @@ class CognosReport(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CognosReport" + cognos_id: Union[str, None, UnsetType] = UNSET """ID of the asset in Cognos.""" @@ -185,6 +189,11 @@ class CognosReport(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -248,72 +257,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CognosReport instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cognos_folder is UNSET: - errors.append("cognos_folder is required for creation") - if errors: - raise ValueError(f"CognosReport validation failed: {errors}") - - def minimize(self) -> "CognosReport": - """ - Return a minimal copy of this CognosReport with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CognosReport with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CognosReport instance with only the minimum required fields. - """ - self.validate() - return CognosReport(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCognosReport": - """ - Create a :class:`RelatedCognosReport` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCognosReport reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCognosReport(guid=self.guid) - return RelatedCognosReport(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -452,6 +395,11 @@ class CognosReportRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -541,6 +489,7 @@ class CognosReportNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -626,9 +575,6 @@ def _cognos_report_to_nested(cognos_report: CognosReport) -> CognosReportNested: is_incomplete=cognos_report.is_incomplete, provenance_type=cognos_report.provenance_type, home_id=cognos_report.home_id, - depth=cognos_report.depth, - immediate_upstream=cognos_report.immediate_upstream, - immediate_downstream=cognos_report.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -662,6 +608,7 @@ def _cognos_report_from_nested(nested: CognosReportNested) -> CognosReport: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -670,9 +617,6 @@ def _cognos_report_from_nested(nested: CognosReportNested) -> CognosReport: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cognos_report_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -735,6 +679,9 @@ def _cognos_report_from_nested_bytes(data: bytes, serde: Serde) -> CognosReport: CognosReport.METRICS = RelationField("metrics") CognosReport.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CognosReport.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CognosReport.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CognosReport.MEANINGS = RelationField("meanings") CognosReport.MC_MONITORS = RelationField("mcMonitors") CognosReport.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/collection.py b/pyatlan_v9/model/assets/collection.py index bce2930ec..654c6bf58 100644 --- a/pyatlan_v9/model/assets/collection.py +++ b/pyatlan_v9/model/assets/collection.py @@ -42,9 +42,10 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor -from .namespace_related import RelatedCollection, RelatedFolder +from .namespace_related import RelatedFolder from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -74,6 +75,7 @@ class Collection(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -87,6 +89,8 @@ class Collection(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Collection" + icon: Union[str, None, UnsetType] = UNSET """Image used to represent this collection.""" @@ -125,6 +129,11 @@ class Collection(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -168,66 +177,6 @@ class Collection(Asset): def __post_init__(self) -> None: self.type_name = "Collection" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Collection instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Collection validation failed: {errors}") - - def minimize(self) -> "Collection": - """ - Return a minimal copy of this Collection with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Collection with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Collection instance with only the minimum required fields. - """ - self.validate() - return Collection(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCollection": - """ - Create a :class:`RelatedCollection` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCollection reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCollection(guid=self.guid) - return RelatedCollection(qualified_name=self.qualified_name) - @classmethod @init_guid def creator(cls, *, client: "AtlanClient", name: str) -> "Collection": @@ -345,6 +294,11 @@ class CollectionRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -415,6 +369,7 @@ class CollectionNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -478,9 +433,6 @@ def _collection_to_nested(collection: Collection) -> CollectionNested: is_incomplete=collection.is_incomplete, provenance_type=collection.provenance_type, home_id=collection.home_id, - depth=collection.depth, - immediate_upstream=collection.immediate_upstream, - immediate_downstream=collection.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -512,6 +464,7 @@ def _collection_from_nested(nested: CollectionNested) -> Collection: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -520,9 +473,6 @@ def _collection_from_nested(nested: CollectionNested) -> Collection: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_collection_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -557,6 +507,9 @@ def _collection_from_nested_bytes(data: bytes, serde: Serde) -> Collection: Collection.METRICS = RelationField("metrics") Collection.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Collection.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Collection.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Collection.MEANINGS = RelationField("meanings") Collection.MC_MONITORS = RelationField("mcMonitors") Collection.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/column.py b/pyatlan_v9/model/assets/column.py index fc2a78c9f..0b4360837 100644 --- a/pyatlan_v9/model/assets/column.py +++ b/pyatlan_v9/model/assets/column.py @@ -55,6 +55,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .mongo_db_related import RelatedMongoDBCollection @@ -98,8 +99,8 @@ class Column(Asset): DATA_TYPE: ClassVar[Any] = None SUB_DATA_TYPE: ClassVar[Any] = None - COLUMN_COMPRESSION: ClassVar[Any] = None - COLUMN_ENCODING: ClassVar[Any] = None + SQL_COMPRESSION: ClassVar[Any] = None + SQL_ENCODING: ClassVar[Any] = None RAW_DATA_TYPE_DEFINITION: ClassVar[Any] = None ORDER: ClassVar[Any] = None NESTED_COLUMN_ORDER: ClassVar[Any] = None @@ -124,51 +125,52 @@ class Column(Asset): VALIDATIONS: ClassVar[Any] = None PARENT_COLUMN_QUALIFIED_NAME: ClassVar[Any] = None PARENT_COLUMN_NAME: ClassVar[Any] = None - COLUMN_DISTINCT_VALUES_COUNT: ClassVar[Any] = None - COLUMN_DISTINCT_VALUES_COUNT_LONG: ClassVar[Any] = None - COLUMN_HISTOGRAM: ClassVar[Any] = None - COLUMN_MAX: ClassVar[Any] = None - COLUMN_MIN: ClassVar[Any] = None - COLUMN_MEAN: ClassVar[Any] = None - COLUMN_SUM: ClassVar[Any] = None - COLUMN_MEDIAN: ClassVar[Any] = None - COLUMN_STANDARD_DEVIATION: ClassVar[Any] = None - COLUMN_UNIQUE_VALUES_COUNT: ClassVar[Any] = None - COLUMN_UNIQUE_VALUES_COUNT_LONG: ClassVar[Any] = None - COLUMN_AVERAGE: ClassVar[Any] = None - COLUMN_AVERAGE_LENGTH: ClassVar[Any] = None - COLUMN_DUPLICATE_VALUES_COUNT: ClassVar[Any] = None - COLUMN_DUPLICATE_VALUES_COUNT_LONG: ClassVar[Any] = None - COLUMN_MAXIMUM_STRING_LENGTH: ClassVar[Any] = None + SQL_DISTINCT_VALUES_COUNT: ClassVar[Any] = None + SQL_DISTINCT_VALUES_COUNT_LONG: ClassVar[Any] = None + SQL_DISTINCT_VALUES_PERCENTAGE: ClassVar[Any] = None + SQL_HISTOGRAM: ClassVar[Any] = None + SQL_MAX: ClassVar[Any] = None + SQL_MIN: ClassVar[Any] = None + SQL_MEAN: ClassVar[Any] = None + SQL_SUM: ClassVar[Any] = None + SQL_MEDIAN: ClassVar[Any] = None + SQL_STANDARD_DEVIATION: ClassVar[Any] = None + SQL_UNIQUE_VALUES_COUNT: ClassVar[Any] = None + SQL_UNIQUE_VALUES_COUNT_LONG: ClassVar[Any] = None + SQL_AVERAGE: ClassVar[Any] = None + SQL_AVERAGE_LENGTH: ClassVar[Any] = None + SQL_DUPLICATE_VALUES_COUNT: ClassVar[Any] = None + SQL_DUPLICATE_VALUES_COUNT_LONG: ClassVar[Any] = None + SQL_MAXIMUM_STRING_LENGTH: ClassVar[Any] = None COLUMN_MAXS: ClassVar[Any] = None - COLUMN_MINIMUM_STRING_LENGTH: ClassVar[Any] = None + SQL_MINIMUM_STRING_LENGTH: ClassVar[Any] = None COLUMN_MINS: ClassVar[Any] = None - COLUMN_MISSING_VALUES_COUNT: ClassVar[Any] = None - COLUMN_MISSING_VALUES_COUNT_LONG: ClassVar[Any] = None - COLUMN_MISSING_VALUES_PERCENTAGE: ClassVar[Any] = None - COLUMN_UNIQUENESS_PERCENTAGE: ClassVar[Any] = None - COLUMN_VARIANCE: ClassVar[Any] = None + SQL_MISSING_VALUES_COUNT: ClassVar[Any] = None + SQL_MISSING_VALUES_COUNT_LONG: ClassVar[Any] = None + SQL_MISSING_VALUES_PERCENTAGE: ClassVar[Any] = None + SQL_UNIQUENESS_PERCENTAGE: ClassVar[Any] = None + SQL_VARIANCE: ClassVar[Any] = None COLUMN_TOP_VALUES: ClassVar[Any] = None - COLUMN_MAX_VALUE: ClassVar[Any] = None - COLUMN_MIN_VALUE: ClassVar[Any] = None - COLUMN_MEAN_VALUE: ClassVar[Any] = None - COLUMN_SUM_VALUE: ClassVar[Any] = None - COLUMN_MEDIAN_VALUE: ClassVar[Any] = None - COLUMN_STANDARD_DEVIATION_VALUE: ClassVar[Any] = None - COLUMN_AVERAGE_VALUE: ClassVar[Any] = None - COLUMN_VARIANCE_VALUE: ClassVar[Any] = None - COLUMN_AVERAGE_LENGTH_VALUE: ClassVar[Any] = None - COLUMN_DISTRIBUTION_HISTOGRAM: ClassVar[Any] = None - COLUMN_DEPTH_LEVEL: ClassVar[Any] = None + SQL_MAX_VALUE: ClassVar[Any] = None + SQL_MIN_VALUE: ClassVar[Any] = None + SQL_MEAN_VALUE: ClassVar[Any] = None + SQL_SUM_VALUE: ClassVar[Any] = None + SQL_MEDIAN_VALUE: ClassVar[Any] = None + SQL_STANDARD_DEVIATION_VALUE: ClassVar[Any] = None + SQL_AVERAGE_VALUE: ClassVar[Any] = None + SQL_VARIANCE_VALUE: ClassVar[Any] = None + SQL_AVERAGE_LENGTH_VALUE: ClassVar[Any] = None + SQL_DISTRIBUTION_HISTOGRAM: ClassVar[Any] = None + SQL_DEPTH_LEVEL: ClassVar[Any] = None NOSQL_COLLECTION_NAME: ClassVar[Any] = None NOSQL_COLLECTION_QUALIFIED_NAME: ClassVar[Any] = None - COLUMN_IS_MEASURE: ClassVar[Any] = None - COLUMN_MEASURE_TYPE: ClassVar[Any] = None - COLUMN_AI_INSIGHTS_IS_MEASURE: ClassVar[Any] = None - COLUMN_AI_INSIGHTS_MEASURE_TYPE: ClassVar[Any] = None - COLUMN_AI_INSIGHTS_IS_DIMENSION: ClassVar[Any] = None - COLUMN_AI_INSIGHTS_DIMENSION_TYPE: ClassVar[Any] = None - COLUMN_AI_INSIGHTS_FOREIGN_KEY_COLUMN_QUALIFIED_NAME: ClassVar[Any] = None + SQL_IS_MEASURE: ClassVar[Any] = None + SQL_MEASURE_TYPE: ClassVar[Any] = None + SQL_AI_INSIGHTS_IS_MEASURE: ClassVar[Any] = None + SQL_AI_INSIGHTS_MEASURE_TYPE: ClassVar[Any] = None + SQL_AI_INSIGHTS_IS_DIMENSION: ClassVar[Any] = None + SQL_AI_INSIGHTS_DIMENSION_TYPE: ClassVar[Any] = None + SQL_AI_INSIGHTS_FOREIGN_KEY_COLUMN_QUALIFIED_NAME: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -222,6 +224,7 @@ class Column(Asset): DBT_MODEL_COLUMNS: ClassVar[Any] = None COLUMN_DBT_MODEL_COLUMNS: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MONGO_DB_COLLECTION: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -256,16 +259,18 @@ class Column(Asset): SQL_INSIGHT_FILTERS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Column" + data_type: Union[str, None, UnsetType] = UNSET """Data type of values in this column.""" sub_data_type: Union[str, None, UnsetType] = UNSET """Sub-data type of this column.""" - column_compression: Union[str, None, UnsetType] = UNSET + sql_compression: Union[str, None, UnsetType] = UNSET """Compression type of this column.""" - column_encoding: Union[str, None, UnsetType] = UNSET + sql_encoding: Union[str, None, UnsetType] = UNSET """Encoding type of this column.""" raw_data_type_definition: Union[str, None, UnsetType] = UNSET @@ -340,112 +345,115 @@ class Column(Asset): parent_column_name: Union[str, None, UnsetType] = UNSET """Simple name of the column this column is nested within, for STRUCT and NESTED columns.""" - column_distinct_values_count: Union[int, None, UnsetType] = UNSET + sql_distinct_values_count: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" - column_distinct_values_count_long: Union[int, None, UnsetType] = UNSET + sql_distinct_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" - column_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET + sql_distinct_values_percentage: Union[float, None, UnsetType] = UNSET + """Percentage of rows in a column that contain distinct values.""" + + sql_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """List of values in a histogram that represents the contents of this column.""" - column_max: Union[float, None, UnsetType] = UNSET + sql_max: Union[float, None, UnsetType] = UNSET """Greatest value in a numeric column.""" - column_min: Union[float, None, UnsetType] = UNSET + sql_min: Union[float, None, UnsetType] = UNSET """Least value in a numeric column.""" - column_mean: Union[float, None, UnsetType] = UNSET + sql_mean: Union[float, None, UnsetType] = UNSET """Arithmetic mean of the values in a numeric column.""" - column_sum: Union[float, None, UnsetType] = UNSET + sql_sum: Union[float, None, UnsetType] = UNSET """Calculated sum of the values in a numeric column.""" - column_median: Union[float, None, UnsetType] = UNSET + sql_median: Union[float, None, UnsetType] = UNSET """Calculated median of the values in a numeric column.""" - column_standard_deviation: Union[float, None, UnsetType] = UNSET + sql_standard_deviation: Union[float, None, UnsetType] = UNSET """Calculated standard deviation of the values in a numeric column.""" - column_unique_values_count: Union[int, None, UnsetType] = UNSET + sql_unique_values_count: Union[int, None, UnsetType] = UNSET """Number of rows in which a value in this column appears only once.""" - column_unique_values_count_long: Union[int, None, UnsetType] = UNSET + sql_unique_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows in which a value in this column appears only once.""" - column_average: Union[float, None, UnsetType] = UNSET + sql_average: Union[float, None, UnsetType] = UNSET """Average value in this column.""" - column_average_length: Union[float, None, UnsetType] = UNSET + sql_average_length: Union[float, None, UnsetType] = UNSET """Average length of values in a string column.""" - column_duplicate_values_count: Union[int, None, UnsetType] = UNSET + sql_duplicate_values_count: Union[int, None, UnsetType] = UNSET """Number of rows that contain duplicate values.""" - column_duplicate_values_count_long: Union[int, None, UnsetType] = UNSET + sql_duplicate_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain duplicate values.""" - column_maximum_string_length: Union[int, None, UnsetType] = UNSET + sql_maximum_string_length: Union[int, None, UnsetType] = UNSET """Length of the longest value in a string column.""" column_maxs: Union[List[str], None, UnsetType] = UNSET """List of the greatest values in a column.""" - column_minimum_string_length: Union[int, None, UnsetType] = UNSET + sql_minimum_string_length: Union[int, None, UnsetType] = UNSET """Length of the shortest value in a string column.""" column_mins: Union[List[str], None, UnsetType] = UNSET """List of the least values in a column.""" - column_missing_values_count: Union[int, None, UnsetType] = UNSET + sql_missing_values_count: Union[int, None, UnsetType] = UNSET """Number of rows in a column that do not contain content.""" - column_missing_values_count_long: Union[int, None, UnsetType] = UNSET + sql_missing_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows in a column that do not contain content.""" - column_missing_values_percentage: Union[float, None, UnsetType] = UNSET + sql_missing_values_percentage: Union[float, None, UnsetType] = UNSET """Percentage of rows in a column that do not contain content.""" - column_uniqueness_percentage: Union[float, None, UnsetType] = UNSET + sql_uniqueness_percentage: Union[float, None, UnsetType] = UNSET """Ratio indicating how unique data in this column is: 0 indicates that all values are the same, 100 indicates that all values in this column are unique.""" - column_variance: Union[float, None, UnsetType] = UNSET + sql_variance: Union[float, None, UnsetType] = UNSET """Calculated variance of the values in a numeric column.""" column_top_values: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """List of top values in this column.""" - column_max_value: Union[float, None, UnsetType] = UNSET + sql_max_value: Union[float, None, UnsetType] = UNSET """Greatest value in a numeric column.""" - column_min_value: Union[float, None, UnsetType] = UNSET + sql_min_value: Union[float, None, UnsetType] = UNSET """Least value in a numeric column.""" - column_mean_value: Union[float, None, UnsetType] = UNSET + sql_mean_value: Union[float, None, UnsetType] = UNSET """Arithmetic mean of the values in a numeric column.""" - column_sum_value: Union[float, None, UnsetType] = UNSET + sql_sum_value: Union[float, None, UnsetType] = UNSET """Calculated sum of the values in a numeric column.""" - column_median_value: Union[float, None, UnsetType] = UNSET + sql_median_value: Union[float, None, UnsetType] = UNSET """Calculated median of the values in a numeric column.""" - column_standard_deviation_value: Union[float, None, UnsetType] = UNSET + sql_standard_deviation_value: Union[float, None, UnsetType] = UNSET """Calculated standard deviation of the values in a numeric column.""" - column_average_value: Union[float, None, UnsetType] = UNSET + sql_average_value: Union[float, None, UnsetType] = UNSET """Average value in this column.""" - column_variance_value: Union[float, None, UnsetType] = UNSET + sql_variance_value: Union[float, None, UnsetType] = UNSET """Calculated variance of the values in a numeric column.""" - column_average_length_value: Union[float, None, UnsetType] = UNSET + sql_average_length_value: Union[float, None, UnsetType] = UNSET """Average length of values in a string column.""" - column_distribution_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET + sql_distribution_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """Detailed information representing a histogram of values for a column.""" - column_depth_level: Union[int, None, UnsetType] = UNSET + sql_depth_level: Union[int, None, UnsetType] = UNSET """Level of nesting of this column, used for STRUCT and NESTED columns.""" nosql_collection_name: Union[str, None, UnsetType] = UNSET @@ -454,27 +462,27 @@ class Column(Asset): nosql_collection_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the cosmos/mongo collection in which this SQL asset (column) exists, or empty if it does not exist within a cosmos/mongo collection.""" - column_is_measure: Union[bool, None, UnsetType] = UNSET + sql_is_measure: Union[bool, None, UnsetType] = UNSET """When true, this column is of type measure/calculated.""" - column_measure_type: Union[str, None, UnsetType] = UNSET + sql_measure_type: Union[str, None, UnsetType] = UNSET """The type of measure/calculated column this is, eg: base, calculated, derived.""" - column_ai_insights_is_measure: Union[bool, None, UnsetType] = UNSET + sql_ai_insights_is_measure: Union[bool, None, UnsetType] = UNSET """When true, this column is identified as a measure/calculated column by AI analysis of query patterns.""" - column_ai_insights_measure_type: Union[str, None, UnsetType] = UNSET + sql_ai_insights_measure_type: Union[str, None, UnsetType] = UNSET """Type of measure/calculated column as classified by AI analysis, for example: base, calculated, derived.""" - column_ai_insights_is_dimension: Union[bool, None, UnsetType] = UNSET + sql_ai_insights_is_dimension: Union[bool, None, UnsetType] = UNSET """When true, this column is identified as a dimension by AI analysis of query patterns.""" - column_ai_insights_dimension_type: Union[str, None, UnsetType] = UNSET + sql_ai_insights_dimension_type: Union[str, None, UnsetType] = UNSET """Type of dimension as classified by AI analysis, for example: time, categorical, geographic.""" - column_ai_insights_foreign_key_column_qualified_name: Union[ - str, None, UnsetType - ] = UNSET + sql_ai_insights_foreign_key_column_qualified_name: Union[str, None, UnsetType] = ( + UNSET + ) """Qualified name of the column in another table that this column likely references as a foreign key, inferred by AI analysis of query patterns.""" query_count: Union[int, None, UnsetType] = UNSET @@ -650,6 +658,11 @@ class Column(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -776,99 +789,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Column instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if self.order is UNSET: - errors.append("order is required for creation") - if ( - self.table is UNSET - and self.table_partition is UNSET - and self.view is UNSET - and self.materialised_view is UNSET - ): - errors.append( - "one of table, table_partition, view, materialised_view is required for creation" - ) - if self.table is not UNSET or self.table_partition is not UNSET: - if self.table_name is UNSET: - errors.append("table_name is required for creation") - if self.table_qualified_name is UNSET: - errors.append("table_qualified_name is required for creation") - if self.view is not UNSET or self.materialised_view is not UNSET: - if self.view_name is UNSET: - errors.append("view_name is required for creation") - if self.view_qualified_name is UNSET: - errors.append("view_qualified_name is required for creation") - if errors: - raise ValueError(f"Column validation failed: {errors}") - - def minimize(self) -> "Column": - """ - Return a minimal copy of this Column with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Column with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Column instance with only the minimum required fields. - """ - self.validate() - return Column(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedColumn": - """ - Create a :class:`RelatedColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedColumn(guid=self.guid) - return RelatedColumn(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -1126,10 +1046,10 @@ class ColumnAttributes(AssetAttributes): sub_data_type: Union[str, None, UnsetType] = UNSET """Sub-data type of this column.""" - column_compression: Union[str, None, UnsetType] = UNSET + sql_compression: Union[str, None, UnsetType] = UNSET """Compression type of this column.""" - column_encoding: Union[str, None, UnsetType] = UNSET + sql_encoding: Union[str, None, UnsetType] = UNSET """Encoding type of this column.""" raw_data_type_definition: Union[str, None, UnsetType] = UNSET @@ -1204,112 +1124,115 @@ class ColumnAttributes(AssetAttributes): parent_column_name: Union[str, None, UnsetType] = UNSET """Simple name of the column this column is nested within, for STRUCT and NESTED columns.""" - column_distinct_values_count: Union[int, None, UnsetType] = UNSET + sql_distinct_values_count: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" - column_distinct_values_count_long: Union[int, None, UnsetType] = UNSET + sql_distinct_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" - column_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET + sql_distinct_values_percentage: Union[float, None, UnsetType] = UNSET + """Percentage of rows in a column that contain distinct values.""" + + sql_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """List of values in a histogram that represents the contents of this column.""" - column_max: Union[float, None, UnsetType] = UNSET + sql_max: Union[float, None, UnsetType] = UNSET """Greatest value in a numeric column.""" - column_min: Union[float, None, UnsetType] = UNSET + sql_min: Union[float, None, UnsetType] = UNSET """Least value in a numeric column.""" - column_mean: Union[float, None, UnsetType] = UNSET + sql_mean: Union[float, None, UnsetType] = UNSET """Arithmetic mean of the values in a numeric column.""" - column_sum: Union[float, None, UnsetType] = UNSET + sql_sum: Union[float, None, UnsetType] = UNSET """Calculated sum of the values in a numeric column.""" - column_median: Union[float, None, UnsetType] = UNSET + sql_median: Union[float, None, UnsetType] = UNSET """Calculated median of the values in a numeric column.""" - column_standard_deviation: Union[float, None, UnsetType] = UNSET + sql_standard_deviation: Union[float, None, UnsetType] = UNSET """Calculated standard deviation of the values in a numeric column.""" - column_unique_values_count: Union[int, None, UnsetType] = UNSET + sql_unique_values_count: Union[int, None, UnsetType] = UNSET """Number of rows in which a value in this column appears only once.""" - column_unique_values_count_long: Union[int, None, UnsetType] = UNSET + sql_unique_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows in which a value in this column appears only once.""" - column_average: Union[float, None, UnsetType] = UNSET + sql_average: Union[float, None, UnsetType] = UNSET """Average value in this column.""" - column_average_length: Union[float, None, UnsetType] = UNSET + sql_average_length: Union[float, None, UnsetType] = UNSET """Average length of values in a string column.""" - column_duplicate_values_count: Union[int, None, UnsetType] = UNSET + sql_duplicate_values_count: Union[int, None, UnsetType] = UNSET """Number of rows that contain duplicate values.""" - column_duplicate_values_count_long: Union[int, None, UnsetType] = UNSET + sql_duplicate_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain duplicate values.""" - column_maximum_string_length: Union[int, None, UnsetType] = UNSET + sql_maximum_string_length: Union[int, None, UnsetType] = UNSET """Length of the longest value in a string column.""" column_maxs: Union[List[str], None, UnsetType] = UNSET """List of the greatest values in a column.""" - column_minimum_string_length: Union[int, None, UnsetType] = UNSET + sql_minimum_string_length: Union[int, None, UnsetType] = UNSET """Length of the shortest value in a string column.""" column_mins: Union[List[str], None, UnsetType] = UNSET """List of the least values in a column.""" - column_missing_values_count: Union[int, None, UnsetType] = UNSET + sql_missing_values_count: Union[int, None, UnsetType] = UNSET """Number of rows in a column that do not contain content.""" - column_missing_values_count_long: Union[int, None, UnsetType] = UNSET + sql_missing_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows in a column that do not contain content.""" - column_missing_values_percentage: Union[float, None, UnsetType] = UNSET + sql_missing_values_percentage: Union[float, None, UnsetType] = UNSET """Percentage of rows in a column that do not contain content.""" - column_uniqueness_percentage: Union[float, None, UnsetType] = UNSET + sql_uniqueness_percentage: Union[float, None, UnsetType] = UNSET """Ratio indicating how unique data in this column is: 0 indicates that all values are the same, 100 indicates that all values in this column are unique.""" - column_variance: Union[float, None, UnsetType] = UNSET + sql_variance: Union[float, None, UnsetType] = UNSET """Calculated variance of the values in a numeric column.""" column_top_values: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """List of top values in this column.""" - column_max_value: Union[float, None, UnsetType] = UNSET + sql_max_value: Union[float, None, UnsetType] = UNSET """Greatest value in a numeric column.""" - column_min_value: Union[float, None, UnsetType] = UNSET + sql_min_value: Union[float, None, UnsetType] = UNSET """Least value in a numeric column.""" - column_mean_value: Union[float, None, UnsetType] = UNSET + sql_mean_value: Union[float, None, UnsetType] = UNSET """Arithmetic mean of the values in a numeric column.""" - column_sum_value: Union[float, None, UnsetType] = UNSET + sql_sum_value: Union[float, None, UnsetType] = UNSET """Calculated sum of the values in a numeric column.""" - column_median_value: Union[float, None, UnsetType] = UNSET + sql_median_value: Union[float, None, UnsetType] = UNSET """Calculated median of the values in a numeric column.""" - column_standard_deviation_value: Union[float, None, UnsetType] = UNSET + sql_standard_deviation_value: Union[float, None, UnsetType] = UNSET """Calculated standard deviation of the values in a numeric column.""" - column_average_value: Union[float, None, UnsetType] = UNSET + sql_average_value: Union[float, None, UnsetType] = UNSET """Average value in this column.""" - column_variance_value: Union[float, None, UnsetType] = UNSET + sql_variance_value: Union[float, None, UnsetType] = UNSET """Calculated variance of the values in a numeric column.""" - column_average_length_value: Union[float, None, UnsetType] = UNSET + sql_average_length_value: Union[float, None, UnsetType] = UNSET """Average length of values in a string column.""" - column_distribution_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET + sql_distribution_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """Detailed information representing a histogram of values for a column.""" - column_depth_level: Union[int, None, UnsetType] = UNSET + sql_depth_level: Union[int, None, UnsetType] = UNSET """Level of nesting of this column, used for STRUCT and NESTED columns.""" nosql_collection_name: Union[str, None, UnsetType] = UNSET @@ -1318,27 +1241,27 @@ class ColumnAttributes(AssetAttributes): nosql_collection_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the cosmos/mongo collection in which this SQL asset (column) exists, or empty if it does not exist within a cosmos/mongo collection.""" - column_is_measure: Union[bool, None, UnsetType] = UNSET + sql_is_measure: Union[bool, None, UnsetType] = UNSET """When true, this column is of type measure/calculated.""" - column_measure_type: Union[str, None, UnsetType] = UNSET + sql_measure_type: Union[str, None, UnsetType] = UNSET """The type of measure/calculated column this is, eg: base, calculated, derived.""" - column_ai_insights_is_measure: Union[bool, None, UnsetType] = UNSET + sql_ai_insights_is_measure: Union[bool, None, UnsetType] = UNSET """When true, this column is identified as a measure/calculated column by AI analysis of query patterns.""" - column_ai_insights_measure_type: Union[str, None, UnsetType] = UNSET + sql_ai_insights_measure_type: Union[str, None, UnsetType] = UNSET """Type of measure/calculated column as classified by AI analysis, for example: base, calculated, derived.""" - column_ai_insights_is_dimension: Union[bool, None, UnsetType] = UNSET + sql_ai_insights_is_dimension: Union[bool, None, UnsetType] = UNSET """When true, this column is identified as a dimension by AI analysis of query patterns.""" - column_ai_insights_dimension_type: Union[str, None, UnsetType] = UNSET + sql_ai_insights_dimension_type: Union[str, None, UnsetType] = UNSET """Type of dimension as classified by AI analysis, for example: time, categorical, geographic.""" - column_ai_insights_foreign_key_column_qualified_name: Union[ - str, None, UnsetType - ] = UNSET + sql_ai_insights_foreign_key_column_qualified_name: Union[str, None, UnsetType] = ( + UNSET + ) """Qualified name of the column in another table that this column likely references as a foreign key, inferred by AI analysis of query patterns.""" query_count: Union[int, None, UnsetType] = UNSET @@ -1518,6 +1441,11 @@ class ColumnRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1681,6 +1609,7 @@ class ColumnNested(AssetNested): "dbt_model_columns", "column_dbt_model_columns", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mongo_db_collection", "mc_monitors", @@ -1722,8 +1651,8 @@ def _populate_column_attrs(attrs: ColumnAttributes, obj: Column) -> None: _populate_asset_attrs(attrs, obj) attrs.data_type = obj.data_type attrs.sub_data_type = obj.sub_data_type - attrs.column_compression = obj.column_compression - attrs.column_encoding = obj.column_encoding + attrs.sql_compression = obj.sql_compression + attrs.sql_encoding = obj.sql_encoding attrs.raw_data_type_definition = obj.raw_data_type_definition attrs.order = obj.order attrs.nested_column_order = obj.nested_column_order @@ -1748,52 +1677,53 @@ def _populate_column_attrs(attrs: ColumnAttributes, obj: Column) -> None: attrs.validations = obj.validations attrs.parent_column_qualified_name = obj.parent_column_qualified_name attrs.parent_column_name = obj.parent_column_name - attrs.column_distinct_values_count = obj.column_distinct_values_count - attrs.column_distinct_values_count_long = obj.column_distinct_values_count_long - attrs.column_histogram = obj.column_histogram - attrs.column_max = obj.column_max - attrs.column_min = obj.column_min - attrs.column_mean = obj.column_mean - attrs.column_sum = obj.column_sum - attrs.column_median = obj.column_median - attrs.column_standard_deviation = obj.column_standard_deviation - attrs.column_unique_values_count = obj.column_unique_values_count - attrs.column_unique_values_count_long = obj.column_unique_values_count_long - attrs.column_average = obj.column_average - attrs.column_average_length = obj.column_average_length - attrs.column_duplicate_values_count = obj.column_duplicate_values_count - attrs.column_duplicate_values_count_long = obj.column_duplicate_values_count_long - attrs.column_maximum_string_length = obj.column_maximum_string_length + attrs.sql_distinct_values_count = obj.sql_distinct_values_count + attrs.sql_distinct_values_count_long = obj.sql_distinct_values_count_long + attrs.sql_distinct_values_percentage = obj.sql_distinct_values_percentage + attrs.sql_histogram = obj.sql_histogram + attrs.sql_max = obj.sql_max + attrs.sql_min = obj.sql_min + attrs.sql_mean = obj.sql_mean + attrs.sql_sum = obj.sql_sum + attrs.sql_median = obj.sql_median + attrs.sql_standard_deviation = obj.sql_standard_deviation + attrs.sql_unique_values_count = obj.sql_unique_values_count + attrs.sql_unique_values_count_long = obj.sql_unique_values_count_long + attrs.sql_average = obj.sql_average + attrs.sql_average_length = obj.sql_average_length + attrs.sql_duplicate_values_count = obj.sql_duplicate_values_count + attrs.sql_duplicate_values_count_long = obj.sql_duplicate_values_count_long + attrs.sql_maximum_string_length = obj.sql_maximum_string_length attrs.column_maxs = obj.column_maxs - attrs.column_minimum_string_length = obj.column_minimum_string_length + attrs.sql_minimum_string_length = obj.sql_minimum_string_length attrs.column_mins = obj.column_mins - attrs.column_missing_values_count = obj.column_missing_values_count - attrs.column_missing_values_count_long = obj.column_missing_values_count_long - attrs.column_missing_values_percentage = obj.column_missing_values_percentage - attrs.column_uniqueness_percentage = obj.column_uniqueness_percentage - attrs.column_variance = obj.column_variance + attrs.sql_missing_values_count = obj.sql_missing_values_count + attrs.sql_missing_values_count_long = obj.sql_missing_values_count_long + attrs.sql_missing_values_percentage = obj.sql_missing_values_percentage + attrs.sql_uniqueness_percentage = obj.sql_uniqueness_percentage + attrs.sql_variance = obj.sql_variance attrs.column_top_values = obj.column_top_values - attrs.column_max_value = obj.column_max_value - attrs.column_min_value = obj.column_min_value - attrs.column_mean_value = obj.column_mean_value - attrs.column_sum_value = obj.column_sum_value - attrs.column_median_value = obj.column_median_value - attrs.column_standard_deviation_value = obj.column_standard_deviation_value - attrs.column_average_value = obj.column_average_value - attrs.column_variance_value = obj.column_variance_value - attrs.column_average_length_value = obj.column_average_length_value - attrs.column_distribution_histogram = obj.column_distribution_histogram - attrs.column_depth_level = obj.column_depth_level + attrs.sql_max_value = obj.sql_max_value + attrs.sql_min_value = obj.sql_min_value + attrs.sql_mean_value = obj.sql_mean_value + attrs.sql_sum_value = obj.sql_sum_value + attrs.sql_median_value = obj.sql_median_value + attrs.sql_standard_deviation_value = obj.sql_standard_deviation_value + attrs.sql_average_value = obj.sql_average_value + attrs.sql_variance_value = obj.sql_variance_value + attrs.sql_average_length_value = obj.sql_average_length_value + attrs.sql_distribution_histogram = obj.sql_distribution_histogram + attrs.sql_depth_level = obj.sql_depth_level attrs.nosql_collection_name = obj.nosql_collection_name attrs.nosql_collection_qualified_name = obj.nosql_collection_qualified_name - attrs.column_is_measure = obj.column_is_measure - attrs.column_measure_type = obj.column_measure_type - attrs.column_ai_insights_is_measure = obj.column_ai_insights_is_measure - attrs.column_ai_insights_measure_type = obj.column_ai_insights_measure_type - attrs.column_ai_insights_is_dimension = obj.column_ai_insights_is_dimension - attrs.column_ai_insights_dimension_type = obj.column_ai_insights_dimension_type - attrs.column_ai_insights_foreign_key_column_qualified_name = ( - obj.column_ai_insights_foreign_key_column_qualified_name + attrs.sql_is_measure = obj.sql_is_measure + attrs.sql_measure_type = obj.sql_measure_type + attrs.sql_ai_insights_is_measure = obj.sql_ai_insights_is_measure + attrs.sql_ai_insights_measure_type = obj.sql_ai_insights_measure_type + attrs.sql_ai_insights_is_dimension = obj.sql_ai_insights_is_dimension + attrs.sql_ai_insights_dimension_type = obj.sql_ai_insights_dimension_type + attrs.sql_ai_insights_foreign_key_column_qualified_name = ( + obj.sql_ai_insights_foreign_key_column_qualified_name ) attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count @@ -1831,8 +1761,8 @@ def _extract_column_attrs(attrs: ColumnAttributes) -> dict: result = _extract_asset_attrs(attrs) result["data_type"] = attrs.data_type result["sub_data_type"] = attrs.sub_data_type - result["column_compression"] = attrs.column_compression - result["column_encoding"] = attrs.column_encoding + result["sql_compression"] = attrs.sql_compression + result["sql_encoding"] = attrs.sql_encoding result["raw_data_type_definition"] = attrs.raw_data_type_definition result["order"] = attrs.order result["nested_column_order"] = attrs.nested_column_order @@ -1857,58 +1787,53 @@ def _extract_column_attrs(attrs: ColumnAttributes) -> dict: result["validations"] = attrs.validations result["parent_column_qualified_name"] = attrs.parent_column_qualified_name result["parent_column_name"] = attrs.parent_column_name - result["column_distinct_values_count"] = attrs.column_distinct_values_count - result["column_distinct_values_count_long"] = ( - attrs.column_distinct_values_count_long - ) - result["column_histogram"] = attrs.column_histogram - result["column_max"] = attrs.column_max - result["column_min"] = attrs.column_min - result["column_mean"] = attrs.column_mean - result["column_sum"] = attrs.column_sum - result["column_median"] = attrs.column_median - result["column_standard_deviation"] = attrs.column_standard_deviation - result["column_unique_values_count"] = attrs.column_unique_values_count - result["column_unique_values_count_long"] = attrs.column_unique_values_count_long - result["column_average"] = attrs.column_average - result["column_average_length"] = attrs.column_average_length - result["column_duplicate_values_count"] = attrs.column_duplicate_values_count - result["column_duplicate_values_count_long"] = ( - attrs.column_duplicate_values_count_long - ) - result["column_maximum_string_length"] = attrs.column_maximum_string_length + result["sql_distinct_values_count"] = attrs.sql_distinct_values_count + result["sql_distinct_values_count_long"] = attrs.sql_distinct_values_count_long + result["sql_distinct_values_percentage"] = attrs.sql_distinct_values_percentage + result["sql_histogram"] = attrs.sql_histogram + result["sql_max"] = attrs.sql_max + result["sql_min"] = attrs.sql_min + result["sql_mean"] = attrs.sql_mean + result["sql_sum"] = attrs.sql_sum + result["sql_median"] = attrs.sql_median + result["sql_standard_deviation"] = attrs.sql_standard_deviation + result["sql_unique_values_count"] = attrs.sql_unique_values_count + result["sql_unique_values_count_long"] = attrs.sql_unique_values_count_long + result["sql_average"] = attrs.sql_average + result["sql_average_length"] = attrs.sql_average_length + result["sql_duplicate_values_count"] = attrs.sql_duplicate_values_count + result["sql_duplicate_values_count_long"] = attrs.sql_duplicate_values_count_long + result["sql_maximum_string_length"] = attrs.sql_maximum_string_length result["column_maxs"] = attrs.column_maxs - result["column_minimum_string_length"] = attrs.column_minimum_string_length + result["sql_minimum_string_length"] = attrs.sql_minimum_string_length result["column_mins"] = attrs.column_mins - result["column_missing_values_count"] = attrs.column_missing_values_count - result["column_missing_values_count_long"] = attrs.column_missing_values_count_long - result["column_missing_values_percentage"] = attrs.column_missing_values_percentage - result["column_uniqueness_percentage"] = attrs.column_uniqueness_percentage - result["column_variance"] = attrs.column_variance + result["sql_missing_values_count"] = attrs.sql_missing_values_count + result["sql_missing_values_count_long"] = attrs.sql_missing_values_count_long + result["sql_missing_values_percentage"] = attrs.sql_missing_values_percentage + result["sql_uniqueness_percentage"] = attrs.sql_uniqueness_percentage + result["sql_variance"] = attrs.sql_variance result["column_top_values"] = attrs.column_top_values - result["column_max_value"] = attrs.column_max_value - result["column_min_value"] = attrs.column_min_value - result["column_mean_value"] = attrs.column_mean_value - result["column_sum_value"] = attrs.column_sum_value - result["column_median_value"] = attrs.column_median_value - result["column_standard_deviation_value"] = attrs.column_standard_deviation_value - result["column_average_value"] = attrs.column_average_value - result["column_variance_value"] = attrs.column_variance_value - result["column_average_length_value"] = attrs.column_average_length_value - result["column_distribution_histogram"] = attrs.column_distribution_histogram - result["column_depth_level"] = attrs.column_depth_level + result["sql_max_value"] = attrs.sql_max_value + result["sql_min_value"] = attrs.sql_min_value + result["sql_mean_value"] = attrs.sql_mean_value + result["sql_sum_value"] = attrs.sql_sum_value + result["sql_median_value"] = attrs.sql_median_value + result["sql_standard_deviation_value"] = attrs.sql_standard_deviation_value + result["sql_average_value"] = attrs.sql_average_value + result["sql_variance_value"] = attrs.sql_variance_value + result["sql_average_length_value"] = attrs.sql_average_length_value + result["sql_distribution_histogram"] = attrs.sql_distribution_histogram + result["sql_depth_level"] = attrs.sql_depth_level result["nosql_collection_name"] = attrs.nosql_collection_name result["nosql_collection_qualified_name"] = attrs.nosql_collection_qualified_name - result["column_is_measure"] = attrs.column_is_measure - result["column_measure_type"] = attrs.column_measure_type - result["column_ai_insights_is_measure"] = attrs.column_ai_insights_is_measure - result["column_ai_insights_measure_type"] = attrs.column_ai_insights_measure_type - result["column_ai_insights_is_dimension"] = attrs.column_ai_insights_is_dimension - result["column_ai_insights_dimension_type"] = ( - attrs.column_ai_insights_dimension_type - ) - result["column_ai_insights_foreign_key_column_qualified_name"] = ( - attrs.column_ai_insights_foreign_key_column_qualified_name + result["sql_is_measure"] = attrs.sql_is_measure + result["sql_measure_type"] = attrs.sql_measure_type + result["sql_ai_insights_is_measure"] = attrs.sql_ai_insights_is_measure + result["sql_ai_insights_measure_type"] = attrs.sql_ai_insights_measure_type + result["sql_ai_insights_is_dimension"] = attrs.sql_ai_insights_is_dimension + result["sql_ai_insights_dimension_type"] = attrs.sql_ai_insights_dimension_type + result["sql_ai_insights_foreign_key_column_qualified_name"] = ( + attrs.sql_ai_insights_foreign_key_column_qualified_name ) result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count @@ -1981,9 +1906,6 @@ def _column_to_nested(column: Column) -> ColumnNested: is_incomplete=column.is_incomplete, provenance_type=column.provenance_type, home_id=column.home_id, - depth=column.depth, - immediate_upstream=column.immediate_upstream, - immediate_downstream=column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -2013,6 +1935,7 @@ def _column_from_nested(nested: ColumnNested) -> Column: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -2021,9 +1944,6 @@ def _column_from_nested(nested: ColumnNested) -> Column: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -2054,8 +1974,8 @@ def _column_from_nested_bytes(data: bytes, serde: Serde) -> Column: Column.DATA_TYPE = KeywordTextField("dataType", "dataType", "dataType.text") Column.SUB_DATA_TYPE = KeywordField("subDataType", "subDataType") -Column.COLUMN_COMPRESSION = KeywordField("columnCompression", "columnCompression") -Column.COLUMN_ENCODING = KeywordField("columnEncoding", "columnEncoding") +Column.SQL_COMPRESSION = KeywordField("sqlCompression", "sqlCompression") +Column.SQL_ENCODING = KeywordField("sqlEncoding", "sqlEncoding") Column.RAW_DATA_TYPE_DEFINITION = KeywordField( "rawDataTypeDefinition", "rawDataTypeDefinition" ) @@ -2088,101 +2008,100 @@ def _column_from_nested_bytes(data: bytes, serde: Serde) -> Column: "parentColumnQualifiedName.text", ) Column.PARENT_COLUMN_NAME = KeywordField("parentColumnName", "parentColumnName") -Column.COLUMN_DISTINCT_VALUES_COUNT = NumericField( - "columnDistinctValuesCount", "columnDistinctValuesCount" +Column.SQL_DISTINCT_VALUES_COUNT = NumericField( + "sqlDistinctValuesCount", "sqlDistinctValuesCount" ) -Column.COLUMN_DISTINCT_VALUES_COUNT_LONG = NumericField( - "columnDistinctValuesCountLong", "columnDistinctValuesCountLong" +Column.SQL_DISTINCT_VALUES_COUNT_LONG = NumericField( + "sqlDistinctValuesCountLong", "sqlDistinctValuesCountLong" ) -Column.COLUMN_HISTOGRAM = KeywordField("columnHistogram", "columnHistogram") -Column.COLUMN_MAX = NumericField("columnMax", "columnMax") -Column.COLUMN_MIN = NumericField("columnMin", "columnMin") -Column.COLUMN_MEAN = NumericField("columnMean", "columnMean") -Column.COLUMN_SUM = NumericField("columnSum", "columnSum") -Column.COLUMN_MEDIAN = NumericField("columnMedian", "columnMedian") -Column.COLUMN_STANDARD_DEVIATION = NumericField( - "columnStandardDeviation", "columnStandardDeviation" +Column.SQL_DISTINCT_VALUES_PERCENTAGE = NumericField( + "sqlDistinctValuesPercentage", "sqlDistinctValuesPercentage" ) -Column.COLUMN_UNIQUE_VALUES_COUNT = NumericField( - "columnUniqueValuesCount", "columnUniqueValuesCount" +Column.SQL_HISTOGRAM = KeywordField("sqlHistogram", "sqlHistogram") +Column.SQL_MAX = NumericField("sqlMax", "sqlMax") +Column.SQL_MIN = NumericField("sqlMin", "sqlMin") +Column.SQL_MEAN = NumericField("sqlMean", "sqlMean") +Column.SQL_SUM = NumericField("sqlSum", "sqlSum") +Column.SQL_MEDIAN = NumericField("sqlMedian", "sqlMedian") +Column.SQL_STANDARD_DEVIATION = NumericField( + "sqlStandardDeviation", "sqlStandardDeviation" ) -Column.COLUMN_UNIQUE_VALUES_COUNT_LONG = NumericField( - "columnUniqueValuesCountLong", "columnUniqueValuesCountLong" +Column.SQL_UNIQUE_VALUES_COUNT = NumericField( + "sqlUniqueValuesCount", "sqlUniqueValuesCount" ) -Column.COLUMN_AVERAGE = NumericField("columnAverage", "columnAverage") -Column.COLUMN_AVERAGE_LENGTH = NumericField( - "columnAverageLength", "columnAverageLength" +Column.SQL_UNIQUE_VALUES_COUNT_LONG = NumericField( + "sqlUniqueValuesCountLong", "sqlUniqueValuesCountLong" ) -Column.COLUMN_DUPLICATE_VALUES_COUNT = NumericField( - "columnDuplicateValuesCount", "columnDuplicateValuesCount" +Column.SQL_AVERAGE = NumericField("sqlAverage", "sqlAverage") +Column.SQL_AVERAGE_LENGTH = NumericField("sqlAverageLength", "sqlAverageLength") +Column.SQL_DUPLICATE_VALUES_COUNT = NumericField( + "sqlDuplicateValuesCount", "sqlDuplicateValuesCount" ) -Column.COLUMN_DUPLICATE_VALUES_COUNT_LONG = NumericField( - "columnDuplicateValuesCountLong", "columnDuplicateValuesCountLong" +Column.SQL_DUPLICATE_VALUES_COUNT_LONG = NumericField( + "sqlDuplicateValuesCountLong", "sqlDuplicateValuesCountLong" ) -Column.COLUMN_MAXIMUM_STRING_LENGTH = NumericField( - "columnMaximumStringLength", "columnMaximumStringLength" +Column.SQL_MAXIMUM_STRING_LENGTH = NumericField( + "sqlMaximumStringLength", "sqlMaximumStringLength" ) Column.COLUMN_MAXS = KeywordField("columnMaxs", "columnMaxs") -Column.COLUMN_MINIMUM_STRING_LENGTH = NumericField( - "columnMinimumStringLength", "columnMinimumStringLength" +Column.SQL_MINIMUM_STRING_LENGTH = NumericField( + "sqlMinimumStringLength", "sqlMinimumStringLength" ) Column.COLUMN_MINS = KeywordField("columnMins", "columnMins") -Column.COLUMN_MISSING_VALUES_COUNT = NumericField( - "columnMissingValuesCount", "columnMissingValuesCount" +Column.SQL_MISSING_VALUES_COUNT = NumericField( + "sqlMissingValuesCount", "sqlMissingValuesCount" ) -Column.COLUMN_MISSING_VALUES_COUNT_LONG = NumericField( - "columnMissingValuesCountLong", "columnMissingValuesCountLong" +Column.SQL_MISSING_VALUES_COUNT_LONG = NumericField( + "sqlMissingValuesCountLong", "sqlMissingValuesCountLong" ) -Column.COLUMN_MISSING_VALUES_PERCENTAGE = NumericField( - "columnMissingValuesPercentage", "columnMissingValuesPercentage" +Column.SQL_MISSING_VALUES_PERCENTAGE = NumericField( + "sqlMissingValuesPercentage", "sqlMissingValuesPercentage" ) -Column.COLUMN_UNIQUENESS_PERCENTAGE = NumericField( - "columnUniquenessPercentage", "columnUniquenessPercentage" +Column.SQL_UNIQUENESS_PERCENTAGE = NumericField( + "sqlUniquenessPercentage", "sqlUniquenessPercentage" ) -Column.COLUMN_VARIANCE = NumericField("columnVariance", "columnVariance") +Column.SQL_VARIANCE = NumericField("sqlVariance", "sqlVariance") Column.COLUMN_TOP_VALUES = KeywordField("columnTopValues", "columnTopValues") -Column.COLUMN_MAX_VALUE = NumericField("columnMaxValue", "columnMaxValue") -Column.COLUMN_MIN_VALUE = NumericField("columnMinValue", "columnMinValue") -Column.COLUMN_MEAN_VALUE = NumericField("columnMeanValue", "columnMeanValue") -Column.COLUMN_SUM_VALUE = NumericField("columnSumValue", "columnSumValue") -Column.COLUMN_MEDIAN_VALUE = NumericField("columnMedianValue", "columnMedianValue") -Column.COLUMN_STANDARD_DEVIATION_VALUE = NumericField( - "columnStandardDeviationValue", "columnStandardDeviationValue" +Column.SQL_MAX_VALUE = NumericField("sqlMaxValue", "sqlMaxValue") +Column.SQL_MIN_VALUE = NumericField("sqlMinValue", "sqlMinValue") +Column.SQL_MEAN_VALUE = NumericField("sqlMeanValue", "sqlMeanValue") +Column.SQL_SUM_VALUE = NumericField("sqlSumValue", "sqlSumValue") +Column.SQL_MEDIAN_VALUE = NumericField("sqlMedianValue", "sqlMedianValue") +Column.SQL_STANDARD_DEVIATION_VALUE = NumericField( + "sqlStandardDeviationValue", "sqlStandardDeviationValue" ) -Column.COLUMN_AVERAGE_VALUE = NumericField("columnAverageValue", "columnAverageValue") -Column.COLUMN_VARIANCE_VALUE = NumericField( - "columnVarianceValue", "columnVarianceValue" +Column.SQL_AVERAGE_VALUE = NumericField("sqlAverageValue", "sqlAverageValue") +Column.SQL_VARIANCE_VALUE = NumericField("sqlVarianceValue", "sqlVarianceValue") +Column.SQL_AVERAGE_LENGTH_VALUE = NumericField( + "sqlAverageLengthValue", "sqlAverageLengthValue" ) -Column.COLUMN_AVERAGE_LENGTH_VALUE = NumericField( - "columnAverageLengthValue", "columnAverageLengthValue" +Column.SQL_DISTRIBUTION_HISTOGRAM = KeywordField( + "sqlDistributionHistogram", "sqlDistributionHistogram" ) -Column.COLUMN_DISTRIBUTION_HISTOGRAM = KeywordField( - "columnDistributionHistogram", "columnDistributionHistogram" -) -Column.COLUMN_DEPTH_LEVEL = NumericField("columnDepthLevel", "columnDepthLevel") +Column.SQL_DEPTH_LEVEL = NumericField("sqlDepthLevel", "sqlDepthLevel") Column.NOSQL_COLLECTION_NAME = KeywordField( "nosqlCollectionName", "nosqlCollectionName" ) Column.NOSQL_COLLECTION_QUALIFIED_NAME = KeywordField( "nosqlCollectionQualifiedName", "nosqlCollectionQualifiedName" ) -Column.COLUMN_IS_MEASURE = BooleanField("columnIsMeasure", "columnIsMeasure") -Column.COLUMN_MEASURE_TYPE = KeywordField("columnMeasureType", "columnMeasureType") -Column.COLUMN_AI_INSIGHTS_IS_MEASURE = BooleanField( - "columnAiInsightsIsMeasure", "columnAiInsightsIsMeasure" +Column.SQL_IS_MEASURE = BooleanField("sqlIsMeasure", "sqlIsMeasure") +Column.SQL_MEASURE_TYPE = KeywordField("sqlMeasureType", "sqlMeasureType") +Column.SQL_AI_INSIGHTS_IS_MEASURE = BooleanField( + "sqlAiInsightsIsMeasure", "sqlAiInsightsIsMeasure" ) -Column.COLUMN_AI_INSIGHTS_MEASURE_TYPE = KeywordField( - "columnAiInsightsMeasureType", "columnAiInsightsMeasureType" +Column.SQL_AI_INSIGHTS_MEASURE_TYPE = KeywordField( + "sqlAiInsightsMeasureType", "sqlAiInsightsMeasureType" ) -Column.COLUMN_AI_INSIGHTS_IS_DIMENSION = BooleanField( - "columnAiInsightsIsDimension", "columnAiInsightsIsDimension" +Column.SQL_AI_INSIGHTS_IS_DIMENSION = BooleanField( + "sqlAiInsightsIsDimension", "sqlAiInsightsIsDimension" ) -Column.COLUMN_AI_INSIGHTS_DIMENSION_TYPE = KeywordField( - "columnAiInsightsDimensionType", "columnAiInsightsDimensionType" +Column.SQL_AI_INSIGHTS_DIMENSION_TYPE = KeywordField( + "sqlAiInsightsDimensionType", "sqlAiInsightsDimensionType" ) -Column.COLUMN_AI_INSIGHTS_FOREIGN_KEY_COLUMN_QUALIFIED_NAME = KeywordField( - "columnAiInsightsForeignKeyColumnQualifiedName", - "columnAiInsightsForeignKeyColumnQualifiedName", +Column.SQL_AI_INSIGHTS_FOREIGN_KEY_COLUMN_QUALIFIED_NAME = KeywordField( + "sqlAiInsightsForeignKeyColumnQualifiedName", + "sqlAiInsightsForeignKeyColumnQualifiedName", ) Column.QUERY_COUNT = NumericField("queryCount", "queryCount") Column.QUERY_USER_COUNT = NumericField("queryUserCount", "queryUserCount") @@ -2260,6 +2179,9 @@ def _column_from_nested_bytes(data: bytes, serde: Serde) -> Column: Column.DBT_MODEL_COLUMNS = RelationField("dbtModelColumns") Column.COLUMN_DBT_MODEL_COLUMNS = RelationField("columnDbtModelColumns") Column.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +Column.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Column.MEANINGS = RelationField("meanings") Column.MONGO_DB_COLLECTION = RelationField("mongoDBCollection") Column.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/column_process.py b/pyatlan_v9/model/assets/column_process.py index 4b38552f0..348212591 100644 --- a/pyatlan_v9/model/assets/column_process.py +++ b/pyatlan_v9/model/assets/column_process.py @@ -50,6 +50,7 @@ from .fabric_related import RelatedFabricActivity from .fivetran_related import RelatedFivetranConnector from .flow_related import RelatedFlowControlOperation +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .matillion_related import RelatedMatillionComponent from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -79,6 +80,7 @@ class ColumnProcess(Asset): AST: ClassVar[Any] = None ADDITIONAL_ETL_CONTEXT: ClassVar[Any] = None AI_DATASET_TYPE: ClassVar[Any] = None + IS_PASS_THROUGH: ClassVar[Any] = None ADF_ACTIVITY: ClassVar[Any] = None AIRFLOW_TASKS: ClassVar[Any] = None ANOMALO_CHECKS: ClassVar[Any] = None @@ -94,6 +96,7 @@ class ColumnProcess(Asset): FABRIC_ACTIVITIES: ClassVar[Any] = None FIVETRAN_CONNECTOR: ClassVar[Any] = None FLOW_ORCHESTRATED_BY: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MATILLION_COMPONENT: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -114,6 +117,8 @@ class ColumnProcess(Asset): SODA_CHECKS: ClassVar[Any] = None SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ColumnProcess" + code: Union[str, None, UnsetType] = UNSET """Code that ran within the process.""" @@ -132,6 +137,9 @@ class ColumnProcess(Asset): ai_dataset_type: Union[str, None, UnsetType] = UNSET """Dataset type for AI Model - dataset process.""" + is_pass_through: Union[bool, None, UnsetType] = UNSET + """Whether this process represents a pass-through data flow where data is moved without transformation, as opposed to a flow where data is actively modified.""" + adf_activity: Union[RelatedAdfActivity, None, UnsetType] = UNSET """ADF Activity that is associated with this lineage process.""" @@ -179,6 +187,11 @@ class ColumnProcess(Asset): flow_orchestrated_by: Union[RelatedFlowControlOperation, None, UnsetType] = UNSET """Orchestrated control operation that ran these data flows (process).""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -251,72 +264,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ColumnProcess instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.process is UNSET: - errors.append("process is required for creation") - if errors: - raise ValueError(f"ColumnProcess validation failed: {errors}") - - def minimize(self) -> "ColumnProcess": - """ - Return a minimal copy of this ColumnProcess with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ColumnProcess with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ColumnProcess instance with only the minimum required fields. - """ - self.validate() - return ColumnProcess(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedColumnProcess": - """ - Create a :class:`RelatedColumnProcess` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedColumnProcess reference to this asset. - """ - if self.guid is not UNSET: - return RelatedColumnProcess(guid=self.guid) - return RelatedColumnProcess(qualified_name=self.qualified_name) - @staticmethod def _extract_guid(relationship: Any) -> Union[str, None]: """Extract guid from a relationship-like object.""" @@ -524,6 +471,9 @@ class ColumnProcessAttributes(AssetAttributes): ai_dataset_type: Union[str, None, UnsetType] = UNSET """Dataset type for AI Model - dataset process.""" + is_pass_through: Union[bool, None, UnsetType] = UNSET + """Whether this process represents a pass-through data flow where data is moved without transformation, as opposed to a flow where data is actively modified.""" + class ColumnProcessRelationshipAttributes(AssetRelationshipAttributes): """ColumnProcess-specific relationship attributes for nested API format.""" @@ -575,6 +525,11 @@ class ColumnProcessRelationshipAttributes(AssetRelationshipAttributes): flow_orchestrated_by: Union[RelatedFlowControlOperation, None, UnsetType] = UNSET """Orchestrated control operation that ran these data flows (process).""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -675,6 +630,7 @@ class ColumnProcessNested(AssetNested): "fabric_activities", "fivetran_connector", "flow_orchestrated_by", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "matillion_component", "mc_monitors", @@ -710,6 +666,7 @@ def _populate_column_process_attrs( attrs.ast = obj.ast attrs.additional_etl_context = obj.additional_etl_context attrs.ai_dataset_type = obj.ai_dataset_type + attrs.is_pass_through = obj.is_pass_through def _extract_column_process_attrs(attrs: ColumnProcessAttributes) -> dict: @@ -723,6 +680,7 @@ def _extract_column_process_attrs(attrs: ColumnProcessAttributes) -> dict: result["ast"] = attrs.ast result["additional_etl_context"] = attrs.additional_etl_context result["ai_dataset_type"] = attrs.ai_dataset_type + result["is_pass_through"] = attrs.is_pass_through return result @@ -759,9 +717,6 @@ def _column_process_to_nested(column_process: ColumnProcess) -> ColumnProcessNes is_incomplete=column_process.is_incomplete, provenance_type=column_process.provenance_type, home_id=column_process.home_id, - depth=column_process.depth, - immediate_upstream=column_process.immediate_upstream, - immediate_downstream=column_process.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -795,6 +750,7 @@ def _column_process_from_nested(nested: ColumnProcessNested) -> ColumnProcess: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -803,9 +759,6 @@ def _column_process_from_nested(nested: ColumnProcessNested) -> ColumnProcess: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_column_process_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -828,7 +781,11 @@ def _column_process_from_nested_bytes(data: bytes, serde: Serde) -> ColumnProces # --------------------------------------------------------------------------- # Deferred field descriptor initialization # --------------------------------------------------------------------------- -from pyatlan.model.fields.atlan_fields import KeywordField, RelationField # noqa: E402 +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + BooleanField, + KeywordField, + RelationField, +) ColumnProcess.CODE = KeywordField("code", "code") ColumnProcess.SQL = KeywordField("sql", "sql") @@ -840,6 +797,7 @@ def _column_process_from_nested_bytes(data: bytes, serde: Serde) -> ColumnProces "additionalEtlContext", "additionalEtlContext" ) ColumnProcess.AI_DATASET_TYPE = KeywordField("aiDatasetType", "aiDatasetType") +ColumnProcess.IS_PASS_THROUGH = BooleanField("isPassThrough", "isPassThrough") ColumnProcess.ADF_ACTIVITY = RelationField("adfActivity") ColumnProcess.AIRFLOW_TASKS = RelationField("airflowTasks") ColumnProcess.ANOMALO_CHECKS = RelationField("anomaloChecks") @@ -857,6 +815,9 @@ def _column_process_from_nested_bytes(data: bytes, serde: Serde) -> ColumnProces ColumnProcess.FABRIC_ACTIVITIES = RelationField("fabricActivities") ColumnProcess.FIVETRAN_CONNECTOR = RelationField("fivetranConnector") ColumnProcess.FLOW_ORCHESTRATED_BY = RelationField("flowOrchestratedBy") +ColumnProcess.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ColumnProcess.MEANINGS = RelationField("meanings") ColumnProcess.MATILLION_COMPONENT = RelationField("matillionComponent") ColumnProcess.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/connection.py b/pyatlan_v9/model/assets/connection.py index 04554d40e..e50dee587 100644 --- a/pyatlan_v9/model/assets/connection.py +++ b/pyatlan_v9/model/assets/connection.py @@ -40,10 +40,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .connection_related import RelatedConnection from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .process_related import RelatedConnectionProcess @@ -76,6 +76,7 @@ class Connection(Asset): PREVIEW_CREDENTIAL_STRATEGY: ClassVar[Any] = None POLICY_STRATEGY: ClassVar[Any] = None POLICY_STRATEGY_FOR_SAMPLE_PREVIEW: ClassVar[Any] = None + CONNECTION_REVERSE_SYNC_STRATEGY: ClassVar[Any] = None QUERY_USERNAME_STRATEGY: ClassVar[Any] = None ROW_LIMIT: ClassVar[Any] = None QUERY_TIMEOUT: ClassVar[Any] = None @@ -109,6 +110,7 @@ class Connection(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -122,6 +124,8 @@ class Connection(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Connection" + category: Union[str, None, UnsetType] = UNSET """Type of connection, for example WAREHOUSE, RDBMS, etc.""" @@ -161,6 +165,9 @@ class Connection(Asset): policy_strategy_for_sample_preview: Union[str, None, UnsetType] = UNSET """Policy strategy is a configuration that determines whether the Atlan policy will be applied to the results of insight queries and whether the query will be rewritten. policyStrategyForSamplePreview config is applicable for sample preview call from assets screen""" + connection_reverse_sync_strategy: Union[str, None, UnsetType] = UNSET + """Strategy configuration for reverse-sync operations on this connection, stored as a stringified JSON array. Each element specifies a source entity type and whether reverse-sync is enabled for it, e.g. [{"source_entity": "Aspects", "enabled": true}].""" + query_username_strategy: Union[str, None, UnsetType] = UNSET """Username strategy to use for this connection for queries.""" @@ -278,6 +285,11 @@ class Connection(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -331,67 +343,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^default/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Connection instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if errors: - raise ValueError(f"Connection validation failed: {errors}") - - def minimize(self) -> "Connection": - """ - Return a minimal copy of this Connection with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Connection with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Connection instance with only the minimum required fields. - """ - self.validate() - return Connection(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedConnection": - """ - Create a :class:`RelatedConnection` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedConnection reference to this asset. - """ - if self.guid is not UNSET: - return RelatedConnection(guid=self.guid) - return RelatedConnection(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -636,6 +587,9 @@ class ConnectionAttributes(AssetAttributes): policy_strategy_for_sample_preview: Union[str, None, UnsetType] = UNSET """Policy strategy is a configuration that determines whether the Atlan policy will be applied to the results of insight queries and whether the query will be rewritten. policyStrategyForSamplePreview config is applicable for sample preview call from assets screen""" + connection_reverse_sync_strategy: Union[str, None, UnsetType] = UNSET + """Strategy configuration for reverse-sync operations on this connection, stored as a stringified JSON array. Each element specifies a source entity type and whether reverse-sync is enabled for it, e.g. [{"source_entity": "Aspects", "enabled": true}].""" + query_username_strategy: Union[str, None, UnsetType] = UNSET """Username strategy to use for this connection for queries.""" @@ -757,6 +711,11 @@ class ConnectionRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -831,6 +790,7 @@ class ConnectionNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -862,6 +822,7 @@ def _populate_connection_attrs(attrs: ConnectionAttributes, obj: Connection) -> attrs.preview_credential_strategy = obj.preview_credential_strategy attrs.policy_strategy = obj.policy_strategy attrs.policy_strategy_for_sample_preview = obj.policy_strategy_for_sample_preview + attrs.connection_reverse_sync_strategy = obj.connection_reverse_sync_strategy attrs.query_username_strategy = obj.query_username_strategy attrs.row_limit = obj.row_limit attrs.query_timeout = obj.query_timeout @@ -917,6 +878,7 @@ def _extract_connection_attrs(attrs: ConnectionAttributes) -> dict: result["policy_strategy_for_sample_preview"] = ( attrs.policy_strategy_for_sample_preview ) + result["connection_reverse_sync_strategy"] = attrs.connection_reverse_sync_strategy result["query_username_strategy"] = attrs.query_username_strategy result["row_limit"] = attrs.row_limit result["query_timeout"] = attrs.query_timeout @@ -986,9 +948,6 @@ def _connection_to_nested(connection: Connection) -> ConnectionNested: is_incomplete=connection.is_incomplete, provenance_type=connection.provenance_type, home_id=connection.home_id, - depth=connection.depth, - immediate_upstream=connection.immediate_upstream, - immediate_downstream=connection.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1020,6 +979,7 @@ def _connection_from_nested(nested: ConnectionNested) -> Connection: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1028,9 +988,6 @@ def _connection_from_nested(nested: ConnectionNested) -> Connection: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_connection_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1082,6 +1039,9 @@ def _connection_from_nested_bytes(data: bytes, serde: Serde) -> Connection: Connection.POLICY_STRATEGY_FOR_SAMPLE_PREVIEW = KeywordField( "policyStrategyForSamplePreview", "policyStrategyForSamplePreview" ) +Connection.CONNECTION_REVERSE_SYNC_STRATEGY = KeywordField( + "connectionReverseSyncStrategy", "connectionReverseSyncStrategy" +) Connection.QUERY_USERNAME_STRATEGY = KeywordField( "queryUsernameStrategy", "queryUsernameStrategy" ) @@ -1152,6 +1112,9 @@ def _connection_from_nested_bytes(data: bytes, serde: Serde) -> Connection: Connection.METRICS = RelationField("metrics") Connection.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Connection.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Connection.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Connection.MEANINGS = RelationField("meanings") Connection.MC_MONITORS = RelationField("mcMonitors") Connection.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/connection_related.py b/pyatlan_v9/model/assets/connection_related.py index f46574ea8..483ec770e 100644 --- a/pyatlan_v9/model/assets/connection_related.py +++ b/pyatlan_v9/model/assets/connection_related.py @@ -73,6 +73,9 @@ class RelatedConnection(RelatedAsset): policy_strategy_for_sample_preview: Union[str, None, UnsetType] = UNSET """Policy strategy is a configuration that determines whether the Atlan policy will be applied to the results of insight queries and whether the query will be rewritten. policyStrategyForSamplePreview config is applicable for sample preview call from assets screen""" + connection_reverse_sync_strategy: Union[str, None, UnsetType] = UNSET + """Strategy configuration for reverse-sync operations on this connection, stored as a stringified JSON array. Each element specifies a source entity type and whether reverse-sync is enabled for it, e.g. [{"source_entity": "Aspects", "enabled": true}].""" + query_username_strategy: Union[str, None, UnsetType] = UNSET """Username strategy to use for this connection for queries.""" diff --git a/pyatlan_v9/model/assets/context.py b/pyatlan_v9/model/assets/context.py new file mode 100644 index 000000000..90313384b --- /dev/null +++ b/pyatlan_v9/model/assets/context.py @@ -0,0 +1,543 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# ruff: noqa: ARG002 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +Context asset model with flattened inheritance. + +This module provides: +- Context: Flat asset class (easy to use) +- ContextAttributes: Nested attributes struct (extends AssetAttributes) +- ContextNested: Nested API format struct +""" + +from __future__ import annotations + +from typing import Any, ClassVar, Dict, List, Set, Union + +import msgspec +from msgspec import UNSET, UnsetType + +from .airflow_related import RelatedAirflowTask +from .anomalo_related import RelatedAnomaloCheck +from .app_related import RelatedApplication, RelatedApplicationField +from .asset import ( + _ASSET_REL_FIELDS, + Asset, + AssetAttributes, + AssetNested, + AssetRelationshipAttributes, + _extract_asset_attrs, + _populate_asset_attrs, +) +from .data_contract_related import RelatedDataContract +from .data_mesh_related import RelatedDataProduct +from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType +from .gtc_related import RelatedAtlasGlossaryTerm +from .model_related import RelatedModelAttribute, RelatedModelEntity +from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor +from .partial_related import RelatedPartialField, RelatedPartialObject +from .process_related import RelatedProcess +from .referenceable_related import RelatedReferenceable +from .resource_related import RelatedFile, RelatedLink, RelatedReadme +from .schema_registry_related import RelatedSchemaRegistrySubject +from .soda_related import RelatedSodaCheck +from .spark_related import RelatedSparkJob +from pyatlan_v9.model.conversion_utils import categorize_relationships, merge_relationships +from pyatlan_v9.model.serde import Serde, get_serde +from pyatlan_v9.model.transform import register_asset + +# ============================================================================= +# FLAT ASSET CLASS +# ============================================================================= + +@register_asset +class Context(Asset): + """ + Base class for Context assets. Context provides curated context repositories for NL2SQL and AI-powered data analysis. + """ + + CONTEXT_REPOSITORY_QUALIFIED_NAME: ClassVar[Any] = None + CATALOG_DATASET_GUID: ClassVar[Any] = None + INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None + OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None + ANOMALO_CHECKS: ClassVar[Any] = None + APPLICATION: ClassVar[Any] = None + APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None + OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + MODEL_IMPLEMENTED_ENTITIES: ClassVar[Any] = None + MODEL_IMPLEMENTED_ATTRIBUTES: ClassVar[Any] = None + METRICS: ClassVar[Any] = None + DQ_BASE_DATASET_RULES: ClassVar[Any] = None + DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None + MEANINGS: ClassVar[Any] = None + MC_MONITORS: ClassVar[Any] = None + MC_INCIDENTS: ClassVar[Any] = None + PARTIAL_CHILD_FIELDS: ClassVar[Any] = None + PARTIAL_CHILD_OBJECTS: ClassVar[Any] = None + INPUT_TO_PROCESSES: ClassVar[Any] = None + OUTPUT_FROM_PROCESSES: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_TO: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_FROM: ClassVar[Any] = None + FILES: ClassVar[Any] = None + LINKS: ClassVar[Any] = None + README: ClassVar[Any] = None + SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None + SODA_CHECKS: ClassVar[Any] = None + INPUT_TO_SPARK_JOBS: ClassVar[Any] = None + OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + + type_name: Union[str, UnsetType] = "Context" + + context_repository_qualified_name: Union[str, None, UnsetType] = UNSET + """Qualified name of the context repository to which this asset belongs.""" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + def __post_init__(self) -> None: + self.type_name = "Context" + + + + # ========================================================================= + # Optimized Serialization Methods (override Asset base class) + # ========================================================================= + + def to_json(self, nested: bool = True, serde: Serde | None = None) -> str: + """ + Convert to JSON string using optimized nested struct serialization. + + Args: + nested: If True (default), use nested API format. If False, use flat format. + serde: Optional Serde instance for encoder reuse. Uses shared singleton if None. + + Returns: + JSON string representation + """ + if serde is None: + serde = get_serde() + if nested: + return self.to_nested_bytes(serde).decode("utf-8") + else: + return serde.encode(self).decode("utf-8") + + def to_nested_bytes(self, serde: Serde | None = None) -> bytes: + """Serialize to Atlas nested-format JSON bytes (pure msgspec, no dict intermediate).""" + if serde is None: + serde = get_serde() + return _context_to_nested_bytes(self, serde) + + @staticmethod + def from_json(json_data: str | bytes, serde: Serde | None = None) -> Context: + """ + Create from JSON string or bytes using optimized nested struct deserialization. + + Args: + json_data: JSON string or bytes to deserialize + serde: Optional Serde instance for decoder reuse. Uses shared singleton if None. + + Returns: + Context instance + """ + if isinstance(json_data, str): + json_data = json_data.encode("utf-8") + if serde is None: + serde = get_serde() + return _context_from_nested_bytes(json_data, serde) + + +# ============================================================================= +# NESTED FORMAT CLASSES +# ============================================================================= + +class ContextAttributes(AssetAttributes): + """Context-specific attributes for nested API format.""" + + context_repository_qualified_name: Union[str, None, UnsetType] = UNSET + """Qualified name of the context repository to which this asset belongs.""" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + +class ContextRelationshipAttributes(AssetRelationshipAttributes): + """Context-specific relationship attributes for nested API format.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + +class ContextNested(AssetNested): + """Context in nested API format for high-performance serialization.""" + + attributes: Union[ContextAttributes, UnsetType] = UNSET + relationship_attributes: Union[ContextRelationshipAttributes, UnsetType] = UNSET + append_relationship_attributes: Union[ContextRelationshipAttributes, UnsetType] = UNSET + remove_relationship_attributes: Union[ContextRelationshipAttributes, UnsetType] = UNSET + +# ============================================================================= +# CONVERSION HELPERS & CONSTANTS +# ============================================================================= + +_CONTEXT_REL_FIELDS: List[str] = [ + *_ASSET_REL_FIELDS, + "input_to_airflow_tasks", + "output_from_airflow_tasks", + "anomalo_checks", + "application", + "application_field", + "data_contract_latest", + "data_contract_latest_certified", + "output_port_data_products", + "input_port_data_products", + "model_implemented_entities", + "model_implemented_attributes", + "metrics", + "dq_base_dataset_rules", + "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", + "meanings", + "mc_monitors", + "mc_incidents", + "partial_child_fields", + "partial_child_objects", + "input_to_processes", + "output_from_processes", + "user_def_relationship_to", + "user_def_relationship_from", + "files", + "links", + "readme", + "schema_registry_subjects", + "soda_checks", + "input_to_spark_jobs", + "output_from_spark_jobs", +] + +def _populate_context_attrs(attrs: ContextAttributes, obj: Context) -> None: + """Populate Context-specific attributes on the attrs struct.""" + _populate_asset_attrs(attrs, obj) + attrs.context_repository_qualified_name = obj.context_repository_qualified_name + attrs.catalog_dataset_guid = obj.catalog_dataset_guid + +def _extract_context_attrs(attrs: ContextAttributes) -> dict: + """Extract all Context attributes from the attrs struct into a flat dict.""" + result = _extract_asset_attrs(attrs) + result["context_repository_qualified_name"] = attrs.context_repository_qualified_name + result["catalog_dataset_guid"] = attrs.catalog_dataset_guid + return result + +# ============================================================================= +# CONVERSION FUNCTIONS +# ============================================================================= + + +def _context_to_nested(context: Context) -> ContextNested: + """Convert flat Context to nested format.""" + attrs = ContextAttributes() + _populate_context_attrs(attrs, context) + # Categorize relationships by save semantic (REPLACE, APPEND, REMOVE) + replace_rels, append_rels, remove_rels = categorize_relationships( + context, _CONTEXT_REL_FIELDS, ContextRelationshipAttributes + ) + return ContextNested( + guid=context.guid, + type_name=context.type_name, + status=context.status, + version=context.version, + create_time=context.create_time, + update_time=context.update_time, + created_by=context.created_by, + updated_by=context.updated_by, + classifications=context.classifications, + classification_names=context.classification_names, + meanings=context.meanings, + labels=context.labels, + business_attributes=context.business_attributes, + custom_attributes=context.custom_attributes, + pending_tasks=context.pending_tasks, + proxy=context.proxy, + is_incomplete=context.is_incomplete, + provenance_type=context.provenance_type, + home_id=context.home_id, + attributes=attrs, + relationship_attributes=replace_rels, + append_relationship_attributes=append_rels, + remove_relationship_attributes=remove_rels, + ) + +def _context_from_nested(nested: ContextNested) -> Context: + """Convert nested format to flat Context.""" + attrs = nested.attributes if nested.attributes is not UNSET else ContextAttributes() + # Merge relationships from all three buckets + merged_rels = merge_relationships( + nested.relationship_attributes, + nested.append_relationship_attributes, + nested.remove_relationship_attributes, + _CONTEXT_REL_FIELDS, + ContextRelationshipAttributes + ) + return Context( + guid=nested.guid, + type_name=nested.type_name, + status=nested.status, + version=nested.version, + create_time=nested.create_time, + update_time=nested.update_time, + created_by=nested.created_by, + updated_by=nested.updated_by, + classifications=nested.classifications, + classification_names=nested.classification_names, + meanings=nested.meanings, + labels=nested.labels, + business_attributes=nested.business_attributes, + custom_attributes=nested.custom_attributes, + pending_tasks=nested.pending_tasks, + proxy=nested.proxy, + is_incomplete=nested.is_incomplete, + provenance_type=nested.provenance_type, + home_id=nested.home_id, + **_extract_context_attrs(attrs), + # Merged relationship attributes + **merged_rels, + ) + +def _context_to_nested_bytes(context: Context, serde: Serde) -> bytes: + """Convert flat Context to nested JSON bytes.""" + return serde.encode(_context_to_nested(context)) + + +def _context_from_nested_bytes(data: bytes, serde: Serde) -> Context: + """Convert nested JSON bytes to flat Context.""" + nested = serde.decode(data, ContextNested) + return _context_from_nested(nested) + +# --------------------------------------------------------------------------- +# Deferred field descriptor initialization +# --------------------------------------------------------------------------- +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + KeywordField, + RelationField, +) + +Context.CONTEXT_REPOSITORY_QUALIFIED_NAME = KeywordField("contextRepositoryQualifiedName", "contextRepositoryQualifiedName") +Context.CATALOG_DATASET_GUID = KeywordField("catalogDatasetGuid", "catalogDatasetGuid") +Context.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") +Context.OUTPUT_FROM_AIRFLOW_TASKS = RelationField("outputFromAirflowTasks") +Context.ANOMALO_CHECKS = RelationField("anomaloChecks") +Context.APPLICATION = RelationField("application") +Context.APPLICATION_FIELD = RelationField("applicationField") +Context.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +Context.DATA_CONTRACT_LATEST_CERTIFIED = RelationField("dataContractLatestCertified") +Context.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") +Context.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") +Context.MODEL_IMPLEMENTED_ENTITIES = RelationField("modelImplementedEntities") +Context.MODEL_IMPLEMENTED_ATTRIBUTES = RelationField("modelImplementedAttributes") +Context.METRICS = RelationField("metrics") +Context.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") +Context.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Context.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField("gcpDataplexAspectTypeMetadataEntities") +Context.MEANINGS = RelationField("meanings") +Context.MC_MONITORS = RelationField("mcMonitors") +Context.MC_INCIDENTS = RelationField("mcIncidents") +Context.PARTIAL_CHILD_FIELDS = RelationField("partialChildFields") +Context.PARTIAL_CHILD_OBJECTS = RelationField("partialChildObjects") +Context.INPUT_TO_PROCESSES = RelationField("inputToProcesses") +Context.OUTPUT_FROM_PROCESSES = RelationField("outputFromProcesses") +Context.USER_DEF_RELATIONSHIP_TO = RelationField("userDefRelationshipTo") +Context.USER_DEF_RELATIONSHIP_FROM = RelationField("userDefRelationshipFrom") +Context.FILES = RelationField("files") +Context.LINKS = RelationField("links") +Context.README = RelationField("readme") +Context.SCHEMA_REGISTRY_SUBJECTS = RelationField("schemaRegistrySubjects") +Context.SODA_CHECKS = RelationField("sodaChecks") +Context.INPUT_TO_SPARK_JOBS = RelationField("inputToSparkJobs") +Context.OUTPUT_FROM_SPARK_JOBS = RelationField("outputFromSparkJobs") \ No newline at end of file diff --git a/pyatlan_v9/model/assets/context_artifact.py b/pyatlan_v9/model/assets/context_artifact.py new file mode 100644 index 000000000..6baa24c3b --- /dev/null +++ b/pyatlan_v9/model/assets/context_artifact.py @@ -0,0 +1,633 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# ruff: noqa: ARG002 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +ContextArtifact asset model with flattened inheritance. + +This module provides: +- ContextArtifact: Flat asset class (easy to use) +- ContextArtifactAttributes: Nested attributes struct (extends AssetAttributes) +- ContextArtifactNested: Nested API format struct +""" + +from __future__ import annotations + +import re +from typing import Any, ClassVar, Dict, List, Set, Union + +import msgspec +from msgspec import UNSET, UnsetType + +from .airflow_related import RelatedAirflowTask +from .anomalo_related import RelatedAnomaloCheck +from .app_related import RelatedApplication, RelatedApplicationField +from .asset import ( + _ASSET_REL_FIELDS, + Asset, + AssetAttributes, + AssetNested, + AssetRelationshipAttributes, + _extract_asset_attrs, + _populate_asset_attrs, +) +from .data_contract_related import RelatedDataContract +from .data_mesh_related import RelatedDataProduct +from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType +from .gtc_related import RelatedAtlasGlossaryTerm +from .model_related import RelatedModelAttribute, RelatedModelEntity +from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor +from .partial_related import RelatedPartialField, RelatedPartialObject +from .process_related import RelatedProcess +from .referenceable_related import RelatedReferenceable +from .resource_related import RelatedFile, RelatedLink, RelatedReadme +from .schema_registry_related import RelatedSchemaRegistrySubject +from .soda_related import RelatedSodaCheck +from .spark_related import RelatedSparkJob +from pyatlan_v9.model.conversion_utils import categorize_relationships, merge_relationships +from pyatlan_v9.model.serde import Serde, get_serde +from pyatlan_v9.model.transform import register_asset + +from .context_related import RelatedContextRepository + +# ============================================================================= +# FLAT ASSET CLASS +# ============================================================================= + +@register_asset +class ContextArtifact(Asset): + """ + A context-specific artifact produced by a context repository. Inherits from both Context and Artifact for file type, versioning, and storage path. + """ + + CONTEXT_REPOSITORY_QUALIFIED_NAME: ClassVar[Any] = None + CATALOG_DATASET_GUID: ClassVar[Any] = None + ARTIFACT_VERSION: ClassVar[Any] = None + FILE_TYPE: ClassVar[Any] = None + FILE_PATH: ClassVar[Any] = None + LINK: ClassVar[Any] = None + IS_GLOBAL: ClassVar[Any] = None + REFERENCE: ClassVar[Any] = None + RESOURCE_METADATA: ClassVar[Any] = None + INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None + OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None + ANOMALO_CHECKS: ClassVar[Any] = None + APPLICATION: ClassVar[Any] = None + APPLICATION_FIELD: ClassVar[Any] = None + CONTEXT_REPOSITORY: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None + OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + MODEL_IMPLEMENTED_ENTITIES: ClassVar[Any] = None + MODEL_IMPLEMENTED_ATTRIBUTES: ClassVar[Any] = None + METRICS: ClassVar[Any] = None + DQ_BASE_DATASET_RULES: ClassVar[Any] = None + DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None + MEANINGS: ClassVar[Any] = None + MC_MONITORS: ClassVar[Any] = None + MC_INCIDENTS: ClassVar[Any] = None + PARTIAL_CHILD_FIELDS: ClassVar[Any] = None + PARTIAL_CHILD_OBJECTS: ClassVar[Any] = None + INPUT_TO_PROCESSES: ClassVar[Any] = None + OUTPUT_FROM_PROCESSES: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_TO: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_FROM: ClassVar[Any] = None + FILES: ClassVar[Any] = None + LINKS: ClassVar[Any] = None + README: ClassVar[Any] = None + SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None + SODA_CHECKS: ClassVar[Any] = None + INPUT_TO_SPARK_JOBS: ClassVar[Any] = None + OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + + type_name: Union[str, UnsetType] = "ContextArtifact" + + context_repository_qualified_name: Union[str, None, UnsetType] = UNSET + """Qualified name of the context repository to which this asset belongs.""" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + + artifact_version: Union[str, None, UnsetType] = UNSET + """Version identifier for this artifact.""" + + file_type: Union[str, None, UnsetType] = UNSET + """Type (extension) of the file.""" + + file_path: Union[str, None, UnsetType] = UNSET + """URL giving the online location where the file can be accessed.""" + + link: Union[str, None, UnsetType] = UNSET + """URL to the resource.""" + + is_global: Union[bool, None, UnsetType] = UNSET + """Whether the resource is global (true) or not (false).""" + + reference: Union[str, None, UnsetType] = UNSET + """Reference to the resource.""" + + resource_metadata: Union[Dict[str, str], None, UnsetType] = UNSET + """Metadata of the resource.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + context_repository: Union[RelatedContextRepository, None, UnsetType] = UNSET + """Context repository that produced this artifact.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + def __post_init__(self) -> None: + self.type_name = "ContextArtifact" + + # ========================================================================= + # SDK Methods + # ========================================================================= + + _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile( + r"^.+/[^/]+/[^/]+$" + ) + + + # ========================================================================= + # Optimized Serialization Methods (override Asset base class) + # ========================================================================= + + def to_json(self, nested: bool = True, serde: Serde | None = None) -> str: + """ + Convert to JSON string using optimized nested struct serialization. + + Args: + nested: If True (default), use nested API format. If False, use flat format. + serde: Optional Serde instance for encoder reuse. Uses shared singleton if None. + + Returns: + JSON string representation + """ + if serde is None: + serde = get_serde() + if nested: + return self.to_nested_bytes(serde).decode("utf-8") + else: + return serde.encode(self).decode("utf-8") + + def to_nested_bytes(self, serde: Serde | None = None) -> bytes: + """Serialize to Atlas nested-format JSON bytes (pure msgspec, no dict intermediate).""" + if serde is None: + serde = get_serde() + return _context_artifact_to_nested_bytes(self, serde) + + @staticmethod + def from_json(json_data: str | bytes, serde: Serde | None = None) -> ContextArtifact: + """ + Create from JSON string or bytes using optimized nested struct deserialization. + + Args: + json_data: JSON string or bytes to deserialize + serde: Optional Serde instance for decoder reuse. Uses shared singleton if None. + + Returns: + ContextArtifact instance + """ + if isinstance(json_data, str): + json_data = json_data.encode("utf-8") + if serde is None: + serde = get_serde() + return _context_artifact_from_nested_bytes(json_data, serde) + + +# ============================================================================= +# NESTED FORMAT CLASSES +# ============================================================================= + +class ContextArtifactAttributes(AssetAttributes): + """ContextArtifact-specific attributes for nested API format.""" + + context_repository_qualified_name: Union[str, None, UnsetType] = UNSET + """Qualified name of the context repository to which this asset belongs.""" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + + artifact_version: Union[str, None, UnsetType] = UNSET + """Version identifier for this artifact.""" + + file_type: Union[str, None, UnsetType] = UNSET + """Type (extension) of the file.""" + + file_path: Union[str, None, UnsetType] = UNSET + """URL giving the online location where the file can be accessed.""" + + link: Union[str, None, UnsetType] = UNSET + """URL to the resource.""" + + is_global: Union[bool, None, UnsetType] = UNSET + """Whether the resource is global (true) or not (false).""" + + reference: Union[str, None, UnsetType] = UNSET + """Reference to the resource.""" + + resource_metadata: Union[Dict[str, str], None, UnsetType] = UNSET + """Metadata of the resource.""" + +class ContextArtifactRelationshipAttributes(AssetRelationshipAttributes): + """ContextArtifact-specific relationship attributes for nested API format.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + context_repository: Union[RelatedContextRepository, None, UnsetType] = UNSET + """Context repository that produced this artifact.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + +class ContextArtifactNested(AssetNested): + """ContextArtifact in nested API format for high-performance serialization.""" + + attributes: Union[ContextArtifactAttributes, UnsetType] = UNSET + relationship_attributes: Union[ContextArtifactRelationshipAttributes, UnsetType] = UNSET + append_relationship_attributes: Union[ContextArtifactRelationshipAttributes, UnsetType] = UNSET + remove_relationship_attributes: Union[ContextArtifactRelationshipAttributes, UnsetType] = UNSET + +# ============================================================================= +# CONVERSION HELPERS & CONSTANTS +# ============================================================================= + +_CONTEXT_ARTIFACT_REL_FIELDS: List[str] = [ + *_ASSET_REL_FIELDS, + "input_to_airflow_tasks", + "output_from_airflow_tasks", + "anomalo_checks", + "application", + "application_field", + "context_repository", + "data_contract_latest", + "data_contract_latest_certified", + "output_port_data_products", + "input_port_data_products", + "model_implemented_entities", + "model_implemented_attributes", + "metrics", + "dq_base_dataset_rules", + "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", + "meanings", + "mc_monitors", + "mc_incidents", + "partial_child_fields", + "partial_child_objects", + "input_to_processes", + "output_from_processes", + "user_def_relationship_to", + "user_def_relationship_from", + "files", + "links", + "readme", + "schema_registry_subjects", + "soda_checks", + "input_to_spark_jobs", + "output_from_spark_jobs", +] + +def _populate_context_artifact_attrs(attrs: ContextArtifactAttributes, obj: ContextArtifact) -> None: + """Populate ContextArtifact-specific attributes on the attrs struct.""" + _populate_asset_attrs(attrs, obj) + attrs.context_repository_qualified_name = obj.context_repository_qualified_name + attrs.catalog_dataset_guid = obj.catalog_dataset_guid + attrs.artifact_version = obj.artifact_version + attrs.file_type = obj.file_type + attrs.file_path = obj.file_path + attrs.link = obj.link + attrs.is_global = obj.is_global + attrs.reference = obj.reference + attrs.resource_metadata = obj.resource_metadata + +def _extract_context_artifact_attrs(attrs: ContextArtifactAttributes) -> dict: + """Extract all ContextArtifact attributes from the attrs struct into a flat dict.""" + result = _extract_asset_attrs(attrs) + result["context_repository_qualified_name"] = attrs.context_repository_qualified_name + result["catalog_dataset_guid"] = attrs.catalog_dataset_guid + result["artifact_version"] = attrs.artifact_version + result["file_type"] = attrs.file_type + result["file_path"] = attrs.file_path + result["link"] = attrs.link + result["is_global"] = attrs.is_global + result["reference"] = attrs.reference + result["resource_metadata"] = attrs.resource_metadata + return result + +# ============================================================================= +# CONVERSION FUNCTIONS +# ============================================================================= + + +def _context_artifact_to_nested(context_artifact: ContextArtifact) -> ContextArtifactNested: + """Convert flat ContextArtifact to nested format.""" + attrs = ContextArtifactAttributes() + _populate_context_artifact_attrs(attrs, context_artifact) + # Categorize relationships by save semantic (REPLACE, APPEND, REMOVE) + replace_rels, append_rels, remove_rels = categorize_relationships( + context_artifact, _CONTEXT_ARTIFACT_REL_FIELDS, ContextArtifactRelationshipAttributes + ) + return ContextArtifactNested( + guid=context_artifact.guid, + type_name=context_artifact.type_name, + status=context_artifact.status, + version=context_artifact.version, + create_time=context_artifact.create_time, + update_time=context_artifact.update_time, + created_by=context_artifact.created_by, + updated_by=context_artifact.updated_by, + classifications=context_artifact.classifications, + classification_names=context_artifact.classification_names, + meanings=context_artifact.meanings, + labels=context_artifact.labels, + business_attributes=context_artifact.business_attributes, + custom_attributes=context_artifact.custom_attributes, + pending_tasks=context_artifact.pending_tasks, + proxy=context_artifact.proxy, + is_incomplete=context_artifact.is_incomplete, + provenance_type=context_artifact.provenance_type, + home_id=context_artifact.home_id, + attributes=attrs, + relationship_attributes=replace_rels, + append_relationship_attributes=append_rels, + remove_relationship_attributes=remove_rels, + ) + +def _context_artifact_from_nested(nested: ContextArtifactNested) -> ContextArtifact: + """Convert nested format to flat ContextArtifact.""" + attrs = nested.attributes if nested.attributes is not UNSET else ContextArtifactAttributes() + # Merge relationships from all three buckets + merged_rels = merge_relationships( + nested.relationship_attributes, + nested.append_relationship_attributes, + nested.remove_relationship_attributes, + _CONTEXT_ARTIFACT_REL_FIELDS, + ContextArtifactRelationshipAttributes + ) + return ContextArtifact( + guid=nested.guid, + type_name=nested.type_name, + status=nested.status, + version=nested.version, + create_time=nested.create_time, + update_time=nested.update_time, + created_by=nested.created_by, + updated_by=nested.updated_by, + classifications=nested.classifications, + classification_names=nested.classification_names, + meanings=nested.meanings, + labels=nested.labels, + business_attributes=nested.business_attributes, + custom_attributes=nested.custom_attributes, + pending_tasks=nested.pending_tasks, + proxy=nested.proxy, + is_incomplete=nested.is_incomplete, + provenance_type=nested.provenance_type, + home_id=nested.home_id, + **_extract_context_artifact_attrs(attrs), + # Merged relationship attributes + **merged_rels, + ) + +def _context_artifact_to_nested_bytes(context_artifact: ContextArtifact, serde: Serde) -> bytes: + """Convert flat ContextArtifact to nested JSON bytes.""" + return serde.encode(_context_artifact_to_nested(context_artifact)) + + +def _context_artifact_from_nested_bytes(data: bytes, serde: Serde) -> ContextArtifact: + """Convert nested JSON bytes to flat ContextArtifact.""" + nested = serde.decode(data, ContextArtifactNested) + return _context_artifact_from_nested(nested) + +# --------------------------------------------------------------------------- +# Deferred field descriptor initialization +# --------------------------------------------------------------------------- +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + BooleanField, + KeywordField, + RelationField, +) + +ContextArtifact.CONTEXT_REPOSITORY_QUALIFIED_NAME = KeywordField("contextRepositoryQualifiedName", "contextRepositoryQualifiedName") +ContextArtifact.CATALOG_DATASET_GUID = KeywordField("catalogDatasetGuid", "catalogDatasetGuid") +ContextArtifact.ARTIFACT_VERSION = KeywordField("artifactVersion", "artifactVersion") +ContextArtifact.FILE_TYPE = KeywordField("fileType", "fileType") +ContextArtifact.FILE_PATH = KeywordField("filePath", "filePath") +ContextArtifact.LINK = KeywordField("link", "link") +ContextArtifact.IS_GLOBAL = BooleanField("isGlobal", "isGlobal") +ContextArtifact.REFERENCE = KeywordField("reference", "reference") +ContextArtifact.RESOURCE_METADATA = KeywordField("resourceMetadata", "resourceMetadata") +ContextArtifact.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") +ContextArtifact.OUTPUT_FROM_AIRFLOW_TASKS = RelationField("outputFromAirflowTasks") +ContextArtifact.ANOMALO_CHECKS = RelationField("anomaloChecks") +ContextArtifact.APPLICATION = RelationField("application") +ContextArtifact.APPLICATION_FIELD = RelationField("applicationField") +ContextArtifact.CONTEXT_REPOSITORY = RelationField("contextRepository") +ContextArtifact.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +ContextArtifact.DATA_CONTRACT_LATEST_CERTIFIED = RelationField("dataContractLatestCertified") +ContextArtifact.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") +ContextArtifact.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") +ContextArtifact.MODEL_IMPLEMENTED_ENTITIES = RelationField("modelImplementedEntities") +ContextArtifact.MODEL_IMPLEMENTED_ATTRIBUTES = RelationField("modelImplementedAttributes") +ContextArtifact.METRICS = RelationField("metrics") +ContextArtifact.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") +ContextArtifact.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ContextArtifact.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField("gcpDataplexAspectTypeMetadataEntities") +ContextArtifact.MEANINGS = RelationField("meanings") +ContextArtifact.MC_MONITORS = RelationField("mcMonitors") +ContextArtifact.MC_INCIDENTS = RelationField("mcIncidents") +ContextArtifact.PARTIAL_CHILD_FIELDS = RelationField("partialChildFields") +ContextArtifact.PARTIAL_CHILD_OBJECTS = RelationField("partialChildObjects") +ContextArtifact.INPUT_TO_PROCESSES = RelationField("inputToProcesses") +ContextArtifact.OUTPUT_FROM_PROCESSES = RelationField("outputFromProcesses") +ContextArtifact.USER_DEF_RELATIONSHIP_TO = RelationField("userDefRelationshipTo") +ContextArtifact.USER_DEF_RELATIONSHIP_FROM = RelationField("userDefRelationshipFrom") +ContextArtifact.FILES = RelationField("files") +ContextArtifact.LINKS = RelationField("links") +ContextArtifact.README = RelationField("readme") +ContextArtifact.SCHEMA_REGISTRY_SUBJECTS = RelationField("schemaRegistrySubjects") +ContextArtifact.SODA_CHECKS = RelationField("sodaChecks") +ContextArtifact.INPUT_TO_SPARK_JOBS = RelationField("inputToSparkJobs") +ContextArtifact.OUTPUT_FROM_SPARK_JOBS = RelationField("outputFromSparkJobs") \ No newline at end of file diff --git a/pyatlan_v9/model/assets/context_related.py b/pyatlan_v9/model/assets/context_related.py new file mode 100644 index 000000000..9b27acf2c --- /dev/null +++ b/pyatlan_v9/model/assets/context_related.py @@ -0,0 +1,81 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +Related type classes for Context module. + +This module contains all Related{Type} classes for the Context type hierarchy. +These classes are used for relationship attributes to reference related entities. +""" + +from __future__ import annotations + +from typing import Dict, List, Set, Union + +import msgspec +from msgspec import UNSET, UnsetType + +from .agentic_related import RelatedAgentic +from .referenceable_related import RelatedReferenceable + +__all__ = [ + "RelatedContext", + "RelatedContextRepository", + "RelatedContextArtifact", +] + + +class RelatedContext(RelatedAgentic): + """ + Related entity reference for Context assets. + + Extends RelatedAgentic with Context-specific attributes. + """ + + # type_name inherited from parent with default=UNSET + # __post_init__ sets it to "Context" so it serializes correctly + + context_repository_qualified_name: Union[str, None, UnsetType] = UNSET + """Qualified name of the context repository to which this asset belongs.""" + + def __post_init__(self) -> None: + RelatedReferenceable.__post_init__(self) + self.type_name = "Context" + +class RelatedContextRepository(RelatedContext): + """ + Related entity reference for ContextRepository assets. + + Extends RelatedContext with ContextRepository-specific attributes. + """ + + # type_name inherited from parent with default=UNSET + # __post_init__ sets it to "ContextRepository" so it serializes correctly + + context_repository_lifecycle_status: Union[str, None, UnsetType] = UNSET + """Lifecycle status of the context repository.""" + + context_repository_agent_instructions: Union[str, None, UnsetType] = UNSET + """LLM guidance and constraints for NL2SQL generation using this repository's context.""" + + context_repository_target_connection_qualified_name: Union[str, None, UnsetType] = UNSET + """Qualified name of the connection used as the execution engine for deploying and running queries against this repository.""" + + def __post_init__(self) -> None: + RelatedReferenceable.__post_init__(self) + self.type_name = "ContextRepository" + +class RelatedContextArtifact(RelatedContext): + """ + Related entity reference for ContextArtifact assets. + + Extends RelatedContext with ContextArtifact-specific attributes. + """ + + # type_name inherited from parent with default=UNSET + # __post_init__ sets it to "ContextArtifact" so it serializes correctly + + def __post_init__(self) -> None: + RelatedReferenceable.__post_init__(self) + self.type_name = "ContextArtifact" diff --git a/pyatlan_v9/model/assets/context_repository.py b/pyatlan_v9/model/assets/context_repository.py new file mode 100644 index 000000000..15f6c1b56 --- /dev/null +++ b/pyatlan_v9/model/assets/context_repository.py @@ -0,0 +1,585 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# ruff: noqa: ARG002 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +ContextRepository asset model with flattened inheritance. + +This module provides: +- ContextRepository: Flat asset class (easy to use) +- ContextRepositoryAttributes: Nested attributes struct (extends AssetAttributes) +- ContextRepositoryNested: Nested API format struct +""" + +from __future__ import annotations + +from typing import Any, ClassVar, Dict, List, Set, Union + +import msgspec +from msgspec import UNSET, UnsetType + +from .airflow_related import RelatedAirflowTask +from .anomalo_related import RelatedAnomaloCheck +from .app_related import RelatedApplication, RelatedApplicationField +from .asset import ( + _ASSET_REL_FIELDS, + Asset, + AssetAttributes, + AssetNested, + AssetRelationshipAttributes, + _extract_asset_attrs, + _populate_asset_attrs, +) +from .data_contract_related import RelatedDataContract +from .data_mesh_related import RelatedDataProduct +from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType +from .gtc_related import RelatedAtlasGlossaryTerm +from .model_related import RelatedModelAttribute, RelatedModelEntity +from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor +from .partial_related import RelatedPartialField, RelatedPartialObject +from .process_related import RelatedProcess +from .referenceable_related import RelatedReferenceable +from .resource_related import RelatedFile, RelatedLink, RelatedReadme +from .schema_registry_related import RelatedSchemaRegistrySubject +from .soda_related import RelatedSodaCheck +from .spark_related import RelatedSparkJob +from pyatlan_v9.model.conversion_utils import categorize_relationships, merge_relationships +from pyatlan_v9.model.serde import Serde, get_serde +from pyatlan_v9.model.transform import register_asset + +from .context_related import RelatedContextArtifact + +# ============================================================================= +# FLAT ASSET CLASS +# ============================================================================= + +@register_asset +class ContextRepository(Asset): + """ + A curated context repository that organizes metrics, dimensions, filters, and other context for NL2SQL query generation. + """ + + CONTEXT_REPOSITORY_LIFECYCLE_STATUS: ClassVar[Any] = None + CONTEXT_REPOSITORY_AGENT_INSTRUCTIONS: ClassVar[Any] = None + CONTEXT_REPOSITORY_TARGET_CONNECTION_QUALIFIED_NAME: ClassVar[Any] = None + CONTEXT_REPOSITORY_QUALIFIED_NAME: ClassVar[Any] = None + CATALOG_DATASET_GUID: ClassVar[Any] = None + INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None + OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None + ANOMALO_CHECKS: ClassVar[Any] = None + APPLICATION: ClassVar[Any] = None + APPLICATION_FIELD: ClassVar[Any] = None + CONTEXT_ARTIFACTS: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None + OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + MODEL_IMPLEMENTED_ENTITIES: ClassVar[Any] = None + MODEL_IMPLEMENTED_ATTRIBUTES: ClassVar[Any] = None + METRICS: ClassVar[Any] = None + DQ_BASE_DATASET_RULES: ClassVar[Any] = None + DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None + MEANINGS: ClassVar[Any] = None + MC_MONITORS: ClassVar[Any] = None + MC_INCIDENTS: ClassVar[Any] = None + PARTIAL_CHILD_FIELDS: ClassVar[Any] = None + PARTIAL_CHILD_OBJECTS: ClassVar[Any] = None + INPUT_TO_PROCESSES: ClassVar[Any] = None + OUTPUT_FROM_PROCESSES: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_TO: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_FROM: ClassVar[Any] = None + FILES: ClassVar[Any] = None + LINKS: ClassVar[Any] = None + README: ClassVar[Any] = None + SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None + SODA_CHECKS: ClassVar[Any] = None + INPUT_TO_SPARK_JOBS: ClassVar[Any] = None + OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + + type_name: Union[str, UnsetType] = "ContextRepository" + + context_repository_lifecycle_status: Union[str, None, UnsetType] = UNSET + """Lifecycle status of the context repository.""" + + context_repository_agent_instructions: Union[str, None, UnsetType] = UNSET + """LLM guidance and constraints for NL2SQL generation using this repository's context.""" + + context_repository_target_connection_qualified_name: Union[str, None, UnsetType] = UNSET + """Qualified name of the connection used as the execution engine for deploying and running queries against this repository.""" + + context_repository_qualified_name: Union[str, None, UnsetType] = UNSET + """Qualified name of the context repository to which this asset belongs.""" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + context_artifacts: Union[List[RelatedContextArtifact], None, UnsetType] = UNSET + """Context artifacts produced by this repository.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + def __post_init__(self) -> None: + self.type_name = "ContextRepository" + + + + # ========================================================================= + # Optimized Serialization Methods (override Asset base class) + # ========================================================================= + + def to_json(self, nested: bool = True, serde: Serde | None = None) -> str: + """ + Convert to JSON string using optimized nested struct serialization. + + Args: + nested: If True (default), use nested API format. If False, use flat format. + serde: Optional Serde instance for encoder reuse. Uses shared singleton if None. + + Returns: + JSON string representation + """ + if serde is None: + serde = get_serde() + if nested: + return self.to_nested_bytes(serde).decode("utf-8") + else: + return serde.encode(self).decode("utf-8") + + def to_nested_bytes(self, serde: Serde | None = None) -> bytes: + """Serialize to Atlas nested-format JSON bytes (pure msgspec, no dict intermediate).""" + if serde is None: + serde = get_serde() + return _context_repository_to_nested_bytes(self, serde) + + @staticmethod + def from_json(json_data: str | bytes, serde: Serde | None = None) -> ContextRepository: + """ + Create from JSON string or bytes using optimized nested struct deserialization. + + Args: + json_data: JSON string or bytes to deserialize + serde: Optional Serde instance for decoder reuse. Uses shared singleton if None. + + Returns: + ContextRepository instance + """ + if isinstance(json_data, str): + json_data = json_data.encode("utf-8") + if serde is None: + serde = get_serde() + return _context_repository_from_nested_bytes(json_data, serde) + + +# ============================================================================= +# NESTED FORMAT CLASSES +# ============================================================================= + +class ContextRepositoryAttributes(AssetAttributes): + """ContextRepository-specific attributes for nested API format.""" + + context_repository_lifecycle_status: Union[str, None, UnsetType] = UNSET + """Lifecycle status of the context repository.""" + + context_repository_agent_instructions: Union[str, None, UnsetType] = UNSET + """LLM guidance and constraints for NL2SQL generation using this repository's context.""" + + context_repository_target_connection_qualified_name: Union[str, None, UnsetType] = UNSET + """Qualified name of the connection used as the execution engine for deploying and running queries against this repository.""" + + context_repository_qualified_name: Union[str, None, UnsetType] = UNSET + """Qualified name of the context repository to which this asset belongs.""" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + +class ContextRepositoryRelationshipAttributes(AssetRelationshipAttributes): + """ContextRepository-specific relationship attributes for nested API format.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + context_artifacts: Union[List[RelatedContextArtifact], None, UnsetType] = UNSET + """Context artifacts produced by this repository.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + +class ContextRepositoryNested(AssetNested): + """ContextRepository in nested API format for high-performance serialization.""" + + attributes: Union[ContextRepositoryAttributes, UnsetType] = UNSET + relationship_attributes: Union[ContextRepositoryRelationshipAttributes, UnsetType] = UNSET + append_relationship_attributes: Union[ContextRepositoryRelationshipAttributes, UnsetType] = UNSET + remove_relationship_attributes: Union[ContextRepositoryRelationshipAttributes, UnsetType] = UNSET + +# ============================================================================= +# CONVERSION HELPERS & CONSTANTS +# ============================================================================= + +_CONTEXT_REPOSITORY_REL_FIELDS: List[str] = [ + *_ASSET_REL_FIELDS, + "input_to_airflow_tasks", + "output_from_airflow_tasks", + "anomalo_checks", + "application", + "application_field", + "context_artifacts", + "data_contract_latest", + "data_contract_latest_certified", + "output_port_data_products", + "input_port_data_products", + "model_implemented_entities", + "model_implemented_attributes", + "metrics", + "dq_base_dataset_rules", + "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", + "meanings", + "mc_monitors", + "mc_incidents", + "partial_child_fields", + "partial_child_objects", + "input_to_processes", + "output_from_processes", + "user_def_relationship_to", + "user_def_relationship_from", + "files", + "links", + "readme", + "schema_registry_subjects", + "soda_checks", + "input_to_spark_jobs", + "output_from_spark_jobs", +] + +def _populate_context_repository_attrs(attrs: ContextRepositoryAttributes, obj: ContextRepository) -> None: + """Populate ContextRepository-specific attributes on the attrs struct.""" + _populate_asset_attrs(attrs, obj) + attrs.context_repository_lifecycle_status = obj.context_repository_lifecycle_status + attrs.context_repository_agent_instructions = obj.context_repository_agent_instructions + attrs.context_repository_target_connection_qualified_name = obj.context_repository_target_connection_qualified_name + attrs.context_repository_qualified_name = obj.context_repository_qualified_name + attrs.catalog_dataset_guid = obj.catalog_dataset_guid + +def _extract_context_repository_attrs(attrs: ContextRepositoryAttributes) -> dict: + """Extract all ContextRepository attributes from the attrs struct into a flat dict.""" + result = _extract_asset_attrs(attrs) + result["context_repository_lifecycle_status"] = attrs.context_repository_lifecycle_status + result["context_repository_agent_instructions"] = attrs.context_repository_agent_instructions + result["context_repository_target_connection_qualified_name"] = attrs.context_repository_target_connection_qualified_name + result["context_repository_qualified_name"] = attrs.context_repository_qualified_name + result["catalog_dataset_guid"] = attrs.catalog_dataset_guid + return result + +# ============================================================================= +# CONVERSION FUNCTIONS +# ============================================================================= + + +def _context_repository_to_nested(context_repository: ContextRepository) -> ContextRepositoryNested: + """Convert flat ContextRepository to nested format.""" + attrs = ContextRepositoryAttributes() + _populate_context_repository_attrs(attrs, context_repository) + # Categorize relationships by save semantic (REPLACE, APPEND, REMOVE) + replace_rels, append_rels, remove_rels = categorize_relationships( + context_repository, _CONTEXT_REPOSITORY_REL_FIELDS, ContextRepositoryRelationshipAttributes + ) + return ContextRepositoryNested( + guid=context_repository.guid, + type_name=context_repository.type_name, + status=context_repository.status, + version=context_repository.version, + create_time=context_repository.create_time, + update_time=context_repository.update_time, + created_by=context_repository.created_by, + updated_by=context_repository.updated_by, + classifications=context_repository.classifications, + classification_names=context_repository.classification_names, + meanings=context_repository.meanings, + labels=context_repository.labels, + business_attributes=context_repository.business_attributes, + custom_attributes=context_repository.custom_attributes, + pending_tasks=context_repository.pending_tasks, + proxy=context_repository.proxy, + is_incomplete=context_repository.is_incomplete, + provenance_type=context_repository.provenance_type, + home_id=context_repository.home_id, + attributes=attrs, + relationship_attributes=replace_rels, + append_relationship_attributes=append_rels, + remove_relationship_attributes=remove_rels, + ) + +def _context_repository_from_nested(nested: ContextRepositoryNested) -> ContextRepository: + """Convert nested format to flat ContextRepository.""" + attrs = nested.attributes if nested.attributes is not UNSET else ContextRepositoryAttributes() + # Merge relationships from all three buckets + merged_rels = merge_relationships( + nested.relationship_attributes, + nested.append_relationship_attributes, + nested.remove_relationship_attributes, + _CONTEXT_REPOSITORY_REL_FIELDS, + ContextRepositoryRelationshipAttributes + ) + return ContextRepository( + guid=nested.guid, + type_name=nested.type_name, + status=nested.status, + version=nested.version, + create_time=nested.create_time, + update_time=nested.update_time, + created_by=nested.created_by, + updated_by=nested.updated_by, + classifications=nested.classifications, + classification_names=nested.classification_names, + meanings=nested.meanings, + labels=nested.labels, + business_attributes=nested.business_attributes, + custom_attributes=nested.custom_attributes, + pending_tasks=nested.pending_tasks, + proxy=nested.proxy, + is_incomplete=nested.is_incomplete, + provenance_type=nested.provenance_type, + home_id=nested.home_id, + **_extract_context_repository_attrs(attrs), + # Merged relationship attributes + **merged_rels, + ) + +def _context_repository_to_nested_bytes(context_repository: ContextRepository, serde: Serde) -> bytes: + """Convert flat ContextRepository to nested JSON bytes.""" + return serde.encode(_context_repository_to_nested(context_repository)) + + +def _context_repository_from_nested_bytes(data: bytes, serde: Serde) -> ContextRepository: + """Convert nested JSON bytes to flat ContextRepository.""" + nested = serde.decode(data, ContextRepositoryNested) + return _context_repository_from_nested(nested) + +# --------------------------------------------------------------------------- +# Deferred field descriptor initialization +# --------------------------------------------------------------------------- +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + KeywordField, + RelationField, + TextField, +) + +ContextRepository.CONTEXT_REPOSITORY_LIFECYCLE_STATUS = KeywordField("contextRepositoryLifecycleStatus", "contextRepositoryLifecycleStatus") +ContextRepository.CONTEXT_REPOSITORY_AGENT_INSTRUCTIONS = TextField("contextRepositoryAgentInstructions", "contextRepositoryAgentInstructions") +ContextRepository.CONTEXT_REPOSITORY_TARGET_CONNECTION_QUALIFIED_NAME = KeywordField("contextRepositoryTargetConnectionQualifiedName", "contextRepositoryTargetConnectionQualifiedName") +ContextRepository.CONTEXT_REPOSITORY_QUALIFIED_NAME = KeywordField("contextRepositoryQualifiedName", "contextRepositoryQualifiedName") +ContextRepository.CATALOG_DATASET_GUID = KeywordField("catalogDatasetGuid", "catalogDatasetGuid") +ContextRepository.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") +ContextRepository.OUTPUT_FROM_AIRFLOW_TASKS = RelationField("outputFromAirflowTasks") +ContextRepository.ANOMALO_CHECKS = RelationField("anomaloChecks") +ContextRepository.APPLICATION = RelationField("application") +ContextRepository.APPLICATION_FIELD = RelationField("applicationField") +ContextRepository.CONTEXT_ARTIFACTS = RelationField("contextArtifacts") +ContextRepository.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +ContextRepository.DATA_CONTRACT_LATEST_CERTIFIED = RelationField("dataContractLatestCertified") +ContextRepository.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") +ContextRepository.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") +ContextRepository.MODEL_IMPLEMENTED_ENTITIES = RelationField("modelImplementedEntities") +ContextRepository.MODEL_IMPLEMENTED_ATTRIBUTES = RelationField("modelImplementedAttributes") +ContextRepository.METRICS = RelationField("metrics") +ContextRepository.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") +ContextRepository.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ContextRepository.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField("gcpDataplexAspectTypeMetadataEntities") +ContextRepository.MEANINGS = RelationField("meanings") +ContextRepository.MC_MONITORS = RelationField("mcMonitors") +ContextRepository.MC_INCIDENTS = RelationField("mcIncidents") +ContextRepository.PARTIAL_CHILD_FIELDS = RelationField("partialChildFields") +ContextRepository.PARTIAL_CHILD_OBJECTS = RelationField("partialChildObjects") +ContextRepository.INPUT_TO_PROCESSES = RelationField("inputToProcesses") +ContextRepository.OUTPUT_FROM_PROCESSES = RelationField("outputFromProcesses") +ContextRepository.USER_DEF_RELATIONSHIP_TO = RelationField("userDefRelationshipTo") +ContextRepository.USER_DEF_RELATIONSHIP_FROM = RelationField("userDefRelationshipFrom") +ContextRepository.FILES = RelationField("files") +ContextRepository.LINKS = RelationField("links") +ContextRepository.README = RelationField("readme") +ContextRepository.SCHEMA_REGISTRY_SUBJECTS = RelationField("schemaRegistrySubjects") +ContextRepository.SODA_CHECKS = RelationField("sodaChecks") +ContextRepository.INPUT_TO_SPARK_JOBS = RelationField("inputToSparkJobs") +ContextRepository.OUTPUT_FROM_SPARK_JOBS = RelationField("outputFromSparkJobs") \ No newline at end of file diff --git a/pyatlan_v9/model/assets/cosmos_mongo_db.py b/pyatlan_v9/model/assets/cosmos_mongo_db.py index 4986e28ce..d79da6e4b 100644 --- a/pyatlan_v9/model/assets/cosmos_mongo_db.py +++ b/pyatlan_v9/model/assets/cosmos_mongo_db.py @@ -38,10 +38,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cosmos_mongo_db_related import RelatedCosmosMongoDB from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -80,6 +80,7 @@ class CosmosMongoDB(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +98,8 @@ class CosmosMongoDB(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CosmosMongoDB" + no_sql_schema_definition: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="noSQLSchemaDefinition" ) @@ -151,6 +154,11 @@ class CosmosMongoDB(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -206,66 +214,6 @@ class CosmosMongoDB(Asset): def __post_init__(self) -> None: self.type_name = "CosmosMongoDB" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CosmosMongoDB instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"CosmosMongoDB validation failed: {errors}") - - def minimize(self) -> "CosmosMongoDB": - """ - Return a minimal copy of this CosmosMongoDB with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CosmosMongoDB with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CosmosMongoDB instance with only the minimum required fields. - """ - self.validate() - return CosmosMongoDB(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCosmosMongoDB": - """ - Create a :class:`RelatedCosmosMongoDB` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCosmosMongoDB reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCosmosMongoDB(guid=self.guid) - return RelatedCosmosMongoDB(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -379,6 +327,11 @@ class CosmosMongoDBRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -467,6 +420,7 @@ class CosmosMongoDBNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -538,9 +492,6 @@ def _cosmos_mongo_db_to_nested(cosmos_mongo_db: CosmosMongoDB) -> CosmosMongoDBN is_incomplete=cosmos_mongo_db.is_incomplete, provenance_type=cosmos_mongo_db.provenance_type, home_id=cosmos_mongo_db.home_id, - depth=cosmos_mongo_db.depth, - immediate_upstream=cosmos_mongo_db.immediate_upstream, - immediate_downstream=cosmos_mongo_db.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -574,6 +525,7 @@ def _cosmos_mongo_db_from_nested(nested: CosmosMongoDBNested) -> CosmosMongoDB: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -582,9 +534,6 @@ def _cosmos_mongo_db_from_nested(nested: CosmosMongoDBNested) -> CosmosMongoDB: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cosmos_mongo_db_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -631,6 +580,9 @@ def _cosmos_mongo_db_from_nested_bytes(data: bytes, serde: Serde) -> CosmosMongo CosmosMongoDB.METRICS = RelationField("metrics") CosmosMongoDB.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CosmosMongoDB.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CosmosMongoDB.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CosmosMongoDB.MEANINGS = RelationField("meanings") CosmosMongoDB.MC_MONITORS = RelationField("mcMonitors") CosmosMongoDB.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cosmos_mongo_db_account.py b/pyatlan_v9/model/assets/cosmos_mongo_db_account.py index ba51821ea..da0fc581b 100644 --- a/pyatlan_v9/model/assets/cosmos_mongo_db_account.py +++ b/pyatlan_v9/model/assets/cosmos_mongo_db_account.py @@ -38,13 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cosmos_mongo_db_related import ( - RelatedCosmosMongoDBAccount, - RelatedCosmosMongoDBDatabase, -) +from .cosmos_mongo_db_related import RelatedCosmosMongoDBDatabase from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -100,6 +98,7 @@ class CosmosMongoDBAccount(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -117,6 +116,8 @@ class CosmosMongoDBAccount(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CosmosMongoDBAccount" + cosmos_mongo_db_account_instance_id: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="cosmosMongoDBAccountInstanceId" ) @@ -262,6 +263,11 @@ class CosmosMongoDBAccount(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -317,66 +323,6 @@ class CosmosMongoDBAccount(Asset): def __post_init__(self) -> None: self.type_name = "CosmosMongoDBAccount" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CosmosMongoDBAccount instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"CosmosMongoDBAccount validation failed: {errors}") - - def minimize(self) -> "CosmosMongoDBAccount": - """ - Return a minimal copy of this CosmosMongoDBAccount with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CosmosMongoDBAccount with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CosmosMongoDBAccount instance with only the minimum required fields. - """ - self.validate() - return CosmosMongoDBAccount(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCosmosMongoDBAccount": - """ - Create a :class:`RelatedCosmosMongoDBAccount` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCosmosMongoDBAccount reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCosmosMongoDBAccount(guid=self.guid) - return RelatedCosmosMongoDBAccount(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -583,6 +529,11 @@ class CosmosMongoDBAccountRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -672,6 +623,7 @@ class CosmosMongoDBAccountNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -831,9 +783,6 @@ def _cosmos_mongo_db_account_to_nested( is_incomplete=cosmos_mongo_db_account.is_incomplete, provenance_type=cosmos_mongo_db_account.provenance_type, home_id=cosmos_mongo_db_account.home_id, - depth=cosmos_mongo_db_account.depth, - immediate_upstream=cosmos_mongo_db_account.immediate_upstream, - immediate_downstream=cosmos_mongo_db_account.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -869,6 +818,7 @@ def _cosmos_mongo_db_account_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -877,9 +827,6 @@ def _cosmos_mongo_db_account_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cosmos_mongo_db_account_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -998,6 +945,9 @@ def _cosmos_mongo_db_account_from_nested_bytes( CosmosMongoDBAccount.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +CosmosMongoDBAccount.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CosmosMongoDBAccount.MEANINGS = RelationField("meanings") CosmosMongoDBAccount.MC_MONITORS = RelationField("mcMonitors") CosmosMongoDBAccount.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cosmos_mongo_db_collection.py b/pyatlan_v9/model/assets/cosmos_mongo_db_collection.py index 2d3eb7b0e..5541f5ce4 100644 --- a/pyatlan_v9/model/assets/cosmos_mongo_db_collection.py +++ b/pyatlan_v9/model/assets/cosmos_mongo_db_collection.py @@ -39,10 +39,7 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cosmos_mongo_db_related import ( - RelatedCosmosMongoDBCollection, - RelatedCosmosMongoDBDatabase, -) +from .cosmos_mongo_db_related import RelatedCosmosMongoDBDatabase from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric @@ -52,6 +49,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .mongo_db_related import RelatedMongoDBDatabase @@ -175,6 +173,7 @@ class CosmosMongoDBCollection(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MONGO_DB_DATABASE: ClassVar[Any] = None MONGO_DB_COLUMNS: ClassVar[Any] = None @@ -203,6 +202,8 @@ class CosmosMongoDBCollection(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CosmosMongoDBCollection" + cosmos_mongo_db_database_qualified_name: Union[str, None, UnsetType] = ( msgspec.field(default=UNSET, name="cosmosMongoDBDatabaseQualifiedName") ) @@ -505,6 +506,11 @@ class CosmosMongoDBCollection(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -613,82 +619,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CosmosMongoDBCollection instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cosmos_mongo_db_database is UNSET: - errors.append("cosmos_mongo_db_database is required for creation") - if self.cosmos_mongo_db_database_qualified_name is UNSET: - errors.append( - "cosmos_mongo_db_database_qualified_name is required for creation" - ) - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"CosmosMongoDBCollection validation failed: {errors}") - - def minimize(self) -> "CosmosMongoDBCollection": - """ - Return a minimal copy of this CosmosMongoDBCollection with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CosmosMongoDBCollection with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CosmosMongoDBCollection instance with only the minimum required fields. - """ - self.validate() - return CosmosMongoDBCollection( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedCosmosMongoDBCollection": - """ - Create a :class:`RelatedCosmosMongoDBCollection` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCosmosMongoDBCollection reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCosmosMongoDBCollection(guid=self.guid) - return RelatedCosmosMongoDBCollection(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1052,6 +982,11 @@ class CosmosMongoDBCollectionRelationshipAttributes(AssetRelationshipAttributes) dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1193,6 +1128,7 @@ class CosmosMongoDBCollectionNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mongo_db_database", "mongo_db_columns", @@ -1451,9 +1387,6 @@ def _cosmos_mongo_db_collection_to_nested( is_incomplete=cosmos_mongo_db_collection.is_incomplete, provenance_type=cosmos_mongo_db_collection.provenance_type, home_id=cosmos_mongo_db_collection.home_id, - depth=cosmos_mongo_db_collection.depth, - immediate_upstream=cosmos_mongo_db_collection.immediate_upstream, - immediate_downstream=cosmos_mongo_db_collection.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1489,6 +1422,7 @@ def _cosmos_mongo_db_collection_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1497,9 +1431,6 @@ def _cosmos_mongo_db_collection_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cosmos_mongo_db_collection_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1741,6 +1672,9 @@ def _cosmos_mongo_db_collection_from_nested_bytes( CosmosMongoDBCollection.DBT_SOURCES = RelationField("dbtSources") CosmosMongoDBCollection.SQL_DBT_SOURCES = RelationField("sqlDBTSources") CosmosMongoDBCollection.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +CosmosMongoDBCollection.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CosmosMongoDBCollection.MEANINGS = RelationField("meanings") CosmosMongoDBCollection.MONGO_DB_DATABASE = RelationField("mongoDBDatabase") CosmosMongoDBCollection.MONGO_DB_COLUMNS = RelationField("mongoDBColumns") diff --git a/pyatlan_v9/model/assets/cosmos_mongo_db_database.py b/pyatlan_v9/model/assets/cosmos_mongo_db_database.py index e5b08c1af..9f2802328 100644 --- a/pyatlan_v9/model/assets/cosmos_mongo_db_database.py +++ b/pyatlan_v9/model/assets/cosmos_mongo_db_database.py @@ -42,7 +42,6 @@ from .cosmos_mongo_db_related import ( RelatedCosmosMongoDBAccount, RelatedCosmosMongoDBCollection, - RelatedCosmosMongoDBDatabase, ) from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct @@ -54,6 +53,7 @@ RelatedDbtTest, ) from .fabric_related import RelatedFabricWorkspace +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .mongo_db_related import RelatedMongoDBCollection @@ -135,6 +135,7 @@ class CosmosMongoDBDatabase(Asset): SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None FABRIC_WORKSPACE: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MONGO_DB_COLLECTIONS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -158,6 +159,8 @@ class CosmosMongoDBDatabase(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CosmosMongoDBDatabase" + cosmos_mongo_db_account_qualified_name: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="cosmosMongoDBAccountQualifiedName" ) @@ -332,6 +335,11 @@ class CosmosMongoDBDatabase(Asset): fabric_workspace: Union[RelatedFabricWorkspace, None, UnsetType] = UNSET """Workspace containing the database.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -421,76 +429,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CosmosMongoDBDatabase instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cosmos_mongo_db_account is UNSET: - errors.append("cosmos_mongo_db_account is required for creation") - if self.cosmos_mongo_db_account_qualified_name is UNSET: - errors.append( - "cosmos_mongo_db_account_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"CosmosMongoDBDatabase validation failed: {errors}") - - def minimize(self) -> "CosmosMongoDBDatabase": - """ - Return a minimal copy of this CosmosMongoDBDatabase with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CosmosMongoDBDatabase with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CosmosMongoDBDatabase instance with only the minimum required fields. - """ - self.validate() - return CosmosMongoDBDatabase(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCosmosMongoDBDatabase": - """ - Create a :class:`RelatedCosmosMongoDBDatabase` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCosmosMongoDBDatabase reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCosmosMongoDBDatabase(guid=self.guid) - return RelatedCosmosMongoDBDatabase(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -726,6 +664,11 @@ class CosmosMongoDBDatabaseRelationshipAttributes(AssetRelationshipAttributes): fabric_workspace: Union[RelatedFabricWorkspace, None, UnsetType] = UNSET """Workspace containing the database.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -851,6 +794,7 @@ class CosmosMongoDBDatabaseNested(AssetNested): "sql_dbt_sources", "dbt_seed_assets", "fabric_workspace", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mongo_db_collections", "mc_monitors", @@ -1006,9 +950,6 @@ def _cosmos_mongo_db_database_to_nested( is_incomplete=cosmos_mongo_db_database.is_incomplete, provenance_type=cosmos_mongo_db_database.provenance_type, home_id=cosmos_mongo_db_database.home_id, - depth=cosmos_mongo_db_database.depth, - immediate_upstream=cosmos_mongo_db_database.immediate_upstream, - immediate_downstream=cosmos_mongo_db_database.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1044,6 +985,7 @@ def _cosmos_mongo_db_database_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1052,9 +994,6 @@ def _cosmos_mongo_db_database_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cosmos_mongo_db_database_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1196,6 +1135,9 @@ def _cosmos_mongo_db_database_from_nested_bytes( CosmosMongoDBDatabase.SQL_DBT_SOURCES = RelationField("sqlDBTSources") CosmosMongoDBDatabase.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") CosmosMongoDBDatabase.FABRIC_WORKSPACE = RelationField("fabricWorkspace") +CosmosMongoDBDatabase.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CosmosMongoDBDatabase.MEANINGS = RelationField("meanings") CosmosMongoDBDatabase.MONGO_DB_COLLECTIONS = RelationField("mongoDBCollections") CosmosMongoDBDatabase.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/cube.py b/pyatlan_v9/model/assets/cube.py index cd8531177..aa8f67040 100644 --- a/pyatlan_v9/model/assets/cube.py +++ b/pyatlan_v9/model/assets/cube.py @@ -37,10 +37,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cube_related import RelatedCube, RelatedCubeDimension +from .cube_related import RelatedCubeDimension from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +86,7 @@ class Cube(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,6 +105,8 @@ class Cube(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Cube" + cube_dimension_count: Union[int, None, UnsetType] = UNSET """Number of dimensions in the cube.""" @@ -173,6 +177,11 @@ class Cube(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -231,66 +240,6 @@ class Cube(Asset): def __post_init__(self) -> None: self.type_name = "Cube" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Cube instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Cube validation failed: {errors}") - - def minimize(self) -> "Cube": - """ - Return a minimal copy of this Cube with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Cube with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Cube instance with only the minimum required fields. - """ - self.validate() - return Cube(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCube": - """ - Create a :class:`RelatedCube` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCube reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCube(guid=self.guid) - return RelatedCube(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -420,6 +369,11 @@ class CubeRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -505,6 +459,7 @@ class CubeNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -585,9 +540,6 @@ def _cube_to_nested(cube: Cube) -> CubeNested: is_incomplete=cube.is_incomplete, provenance_type=cube.provenance_type, home_id=cube.home_id, - depth=cube.depth, - immediate_upstream=cube.immediate_upstream, - immediate_downstream=cube.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -617,6 +569,7 @@ def _cube_from_nested(nested: CubeNested) -> Cube: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -625,9 +578,6 @@ def _cube_from_nested(nested: CubeNested) -> Cube: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cube_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -685,6 +635,9 @@ def _cube_from_nested_bytes(data: bytes, serde: Serde) -> Cube: Cube.METRICS = RelationField("metrics") Cube.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Cube.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Cube.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Cube.MEANINGS = RelationField("meanings") Cube.MC_MONITORS = RelationField("mcMonitors") Cube.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cube_dimension.py b/pyatlan_v9/model/assets/cube_dimension.py index 4c5b1195a..6dba3f95b 100644 --- a/pyatlan_v9/model/assets/cube_dimension.py +++ b/pyatlan_v9/model/assets/cube_dimension.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -86,6 +87,7 @@ class CubeDimension(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -106,6 +108,8 @@ class CubeDimension(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CubeDimension" + cube_hierarchy_count: Union[int, None, UnsetType] = UNSET """Number of hierarchies in the cube dimension.""" @@ -176,6 +180,11 @@ class CubeDimension(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -246,76 +255,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CubeDimension instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cube is UNSET: - errors.append("cube is required for creation") - if self.cube_name is UNSET: - errors.append("cube_name is required for creation") - if self.cube_qualified_name is UNSET: - errors.append("cube_qualified_name is required for creation") - if errors: - raise ValueError(f"CubeDimension validation failed: {errors}") - - def minimize(self) -> "CubeDimension": - """ - Return a minimal copy of this CubeDimension with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CubeDimension with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CubeDimension instance with only the minimum required fields. - """ - self.validate() - return CubeDimension(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCubeDimension": - """ - Create a :class:`RelatedCubeDimension` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCubeDimension reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCubeDimension(guid=self.guid) - return RelatedCubeDimension(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -445,6 +384,11 @@ class CubeDimensionRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -542,6 +486,7 @@ class CubeDimensionNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -626,9 +571,6 @@ def _cube_dimension_to_nested(cube_dimension: CubeDimension) -> CubeDimensionNes is_incomplete=cube_dimension.is_incomplete, provenance_type=cube_dimension.provenance_type, home_id=cube_dimension.home_id, - depth=cube_dimension.depth, - immediate_upstream=cube_dimension.immediate_upstream, - immediate_downstream=cube_dimension.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -662,6 +604,7 @@ def _cube_dimension_from_nested(nested: CubeDimensionNested) -> CubeDimension: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -670,9 +613,6 @@ def _cube_dimension_from_nested(nested: CubeDimensionNested) -> CubeDimension: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cube_dimension_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -740,6 +680,9 @@ def _cube_dimension_from_nested_bytes(data: bytes, serde: Serde) -> CubeDimensio CubeDimension.METRICS = RelationField("metrics") CubeDimension.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CubeDimension.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CubeDimension.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CubeDimension.MEANINGS = RelationField("meanings") CubeDimension.MC_MONITORS = RelationField("mcMonitors") CubeDimension.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cube_field.py b/pyatlan_v9/model/assets/cube_field.py index 6802622b6..35e62be12 100644 --- a/pyatlan_v9/model/assets/cube_field.py +++ b/pyatlan_v9/model/assets/cube_field.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -91,6 +92,7 @@ class CubeField(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -112,6 +114,8 @@ class CubeField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CubeField" + cube_parent_field_name: Union[str, None, UnsetType] = UNSET """Name of the parent field in which this field is nested.""" @@ -197,6 +201,11 @@ class CubeField(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -272,84 +281,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CubeField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cube_hierarchy is UNSET: - errors.append("cube_hierarchy is required for creation") - if self.cube_hierarchy_name is UNSET: - errors.append("cube_hierarchy_name is required for creation") - if self.cube_hierarchy_qualified_name is UNSET: - errors.append("cube_hierarchy_qualified_name is required for creation") - if self.cube_dimension_name is UNSET: - errors.append("cube_dimension_name is required for creation") - if self.cube_dimension_qualified_name is UNSET: - errors.append("cube_dimension_qualified_name is required for creation") - if self.cube_name is UNSET: - errors.append("cube_name is required for creation") - if self.cube_qualified_name is UNSET: - errors.append("cube_qualified_name is required for creation") - if errors: - raise ValueError(f"CubeField validation failed: {errors}") - - def minimize(self) -> "CubeField": - """ - Return a minimal copy of this CubeField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CubeField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CubeField instance with only the minimum required fields. - """ - self.validate() - return CubeField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCubeField": - """ - Create a :class:`RelatedCubeField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCubeField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCubeField(guid=self.guid) - return RelatedCubeField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -494,6 +425,11 @@ class CubeFieldRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -592,6 +528,7 @@ class CubeFieldNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -685,9 +622,6 @@ def _cube_field_to_nested(cube_field: CubeField) -> CubeFieldNested: is_incomplete=cube_field.is_incomplete, provenance_type=cube_field.provenance_type, home_id=cube_field.home_id, - depth=cube_field.depth, - immediate_upstream=cube_field.immediate_upstream, - immediate_downstream=cube_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -719,6 +653,7 @@ def _cube_field_from_nested(nested: CubeFieldNested) -> CubeField: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -727,9 +662,6 @@ def _cube_field_from_nested(nested: CubeFieldNested) -> CubeField: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cube_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -804,6 +736,9 @@ def _cube_field_from_nested_bytes(data: bytes, serde: Serde) -> CubeField: CubeField.METRICS = RelationField("metrics") CubeField.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CubeField.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CubeField.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CubeField.MEANINGS = RelationField("meanings") CubeField.MC_MONITORS = RelationField("mcMonitors") CubeField.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/cube_hierarchy.py b/pyatlan_v9/model/assets/cube_hierarchy.py index e06a892d1..7f3169ae7 100644 --- a/pyatlan_v9/model/assets/cube_hierarchy.py +++ b/pyatlan_v9/model/assets/cube_hierarchy.py @@ -38,10 +38,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cube_related import RelatedCubeDimension, RelatedCubeField, RelatedCubeHierarchy +from .cube_related import RelatedCubeDimension, RelatedCubeField from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -86,6 +87,7 @@ class CubeHierarchy(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -106,6 +108,8 @@ class CubeHierarchy(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CubeHierarchy" + cube_field_count: Union[int, None, UnsetType] = UNSET """Number of total fields in the cube hierarchy.""" @@ -176,6 +180,11 @@ class CubeHierarchy(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -248,80 +257,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CubeHierarchy instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.cube_dimension is UNSET: - errors.append("cube_dimension is required for creation") - if self.cube_dimension_name is UNSET: - errors.append("cube_dimension_name is required for creation") - if self.cube_dimension_qualified_name is UNSET: - errors.append("cube_dimension_qualified_name is required for creation") - if self.cube_name is UNSET: - errors.append("cube_name is required for creation") - if self.cube_qualified_name is UNSET: - errors.append("cube_qualified_name is required for creation") - if errors: - raise ValueError(f"CubeHierarchy validation failed: {errors}") - - def minimize(self) -> "CubeHierarchy": - """ - Return a minimal copy of this CubeHierarchy with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CubeHierarchy with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CubeHierarchy instance with only the minimum required fields. - """ - self.validate() - return CubeHierarchy(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCubeHierarchy": - """ - Create a :class:`RelatedCubeHierarchy` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCubeHierarchy reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCubeHierarchy(guid=self.guid) - return RelatedCubeHierarchy(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -451,6 +386,11 @@ class CubeHierarchyRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -548,6 +488,7 @@ class CubeHierarchyNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -632,9 +573,6 @@ def _cube_hierarchy_to_nested(cube_hierarchy: CubeHierarchy) -> CubeHierarchyNes is_incomplete=cube_hierarchy.is_incomplete, provenance_type=cube_hierarchy.provenance_type, home_id=cube_hierarchy.home_id, - depth=cube_hierarchy.depth, - immediate_upstream=cube_hierarchy.immediate_upstream, - immediate_downstream=cube_hierarchy.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -668,6 +606,7 @@ def _cube_hierarchy_from_nested(nested: CubeHierarchyNested) -> CubeHierarchy: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -676,9 +615,6 @@ def _cube_hierarchy_from_nested(nested: CubeHierarchyNested) -> CubeHierarchy: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_cube_hierarchy_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -744,6 +680,9 @@ def _cube_hierarchy_from_nested_bytes(data: bytes, serde: Serde) -> CubeHierarch CubeHierarchy.METRICS = RelationField("metrics") CubeHierarchy.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CubeHierarchy.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CubeHierarchy.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CubeHierarchy.MEANINGS = RelationField("meanings") CubeHierarchy.MC_MONITORS = RelationField("mcMonitors") CubeHierarchy.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/custom.py b/pyatlan_v9/model/assets/custom.py index d6c4a8dec..4c7428d2c 100644 --- a/pyatlan_v9/model/assets/custom.py +++ b/pyatlan_v9/model/assets/custom.py @@ -37,10 +37,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .custom_related import RelatedCustom from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -78,6 +78,7 @@ class Custom(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +96,8 @@ class Custom(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Custom" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -144,6 +147,11 @@ class Custom(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -199,66 +207,6 @@ class Custom(Asset): def __post_init__(self) -> None: self.type_name = "Custom" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Custom instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Custom validation failed: {errors}") - - def minimize(self) -> "Custom": - """ - Return a minimal copy of this Custom with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Custom with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Custom instance with only the minimum required fields. - """ - self.validate() - return Custom(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCustom": - """ - Create a :class:`RelatedCustom` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCustom reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCustom(guid=self.guid) - return RelatedCustom(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -367,6 +315,11 @@ class CustomRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -453,6 +406,7 @@ class CustomNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -518,9 +472,6 @@ def _custom_to_nested(custom: Custom) -> CustomNested: is_incomplete=custom.is_incomplete, provenance_type=custom.provenance_type, home_id=custom.home_id, - depth=custom.depth, - immediate_upstream=custom.immediate_upstream, - immediate_downstream=custom.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -550,6 +501,7 @@ def _custom_from_nested(nested: CustomNested) -> Custom: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -558,9 +510,6 @@ def _custom_from_nested(nested: CustomNested) -> Custom: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_custom_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -598,6 +547,9 @@ def _custom_from_nested_bytes(data: bytes, serde: Serde) -> Custom: Custom.METRICS = RelationField("metrics") Custom.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Custom.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Custom.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Custom.MEANINGS = RelationField("meanings") Custom.MC_MONITORS = RelationField("mcMonitors") Custom.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/custom_entity.py b/pyatlan_v9/model/assets/custom_entity.py index cd5f87ab8..6b72d66df 100644 --- a/pyatlan_v9/model/assets/custom_entity.py +++ b/pyatlan_v9/model/assets/custom_entity.py @@ -43,6 +43,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +86,7 @@ class CustomEntity(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +104,8 @@ class CustomEntity(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "CustomEntity" + custom_children_subtype: Union[str, None, UnsetType] = UNSET """Label of the children column for this asset type.""" @@ -170,6 +174,11 @@ class CustomEntity(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -231,70 +240,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this CustomEntity instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"CustomEntity validation failed: {errors}") - - def minimize(self) -> "CustomEntity": - """ - Return a minimal copy of this CustomEntity with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new CustomEntity with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new CustomEntity instance with only the minimum required fields. - """ - self.validate() - return CustomEntity(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedCustomEntity": - """ - Create a :class:`RelatedCustomEntity` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedCustomEntity reference to this asset. - """ - if self.guid is not UNSET: - return RelatedCustomEntity(guid=self.guid) - return RelatedCustomEntity(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -452,6 +397,11 @@ class CustomEntityRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -544,6 +494,7 @@ class CustomEntityNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -613,9 +564,6 @@ def _custom_entity_to_nested(custom_entity: CustomEntity) -> CustomEntityNested: is_incomplete=custom_entity.is_incomplete, provenance_type=custom_entity.provenance_type, home_id=custom_entity.home_id, - depth=custom_entity.depth, - immediate_upstream=custom_entity.immediate_upstream, - immediate_downstream=custom_entity.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -649,6 +597,7 @@ def _custom_entity_from_nested(nested: CustomEntityNested) -> CustomEntity: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -657,9 +606,6 @@ def _custom_entity_from_nested(nested: CustomEntityNested) -> CustomEntity: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_custom_entity_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -708,6 +654,9 @@ def _custom_entity_from_nested_bytes(data: bytes, serde: Serde) -> CustomEntity: CustomEntity.METRICS = RelationField("metrics") CustomEntity.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") CustomEntity.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +CustomEntity.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) CustomEntity.MEANINGS = RelationField("meanings") CustomEntity.MC_MONITORS = RelationField("mcMonitors") CustomEntity.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/data_contract.py b/pyatlan_v9/model/assets/data_contract.py index 7230be37e..feed04d13 100644 --- a/pyatlan_v9/model/assets/data_contract.py +++ b/pyatlan_v9/model/assets/data_contract.py @@ -1,21 +1,26 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# ruff: noqa: ARG002 # SPDX-License-Identifier: Apache-2.0 # Copyright 2024 Atlan Pte. Ltd. -"""DataContract asset model for pyatlan_v9.""" +""" +DataContract asset model with flattened inheritance. + +This module provides: +- DataContract: Flat asset class (easy to use) +- DataContractAttributes: Nested attributes struct (extends AssetAttributes) +- DataContractNested: Nested API format struct +""" from __future__ import annotations import re -from json import JSONDecodeError, loads -from typing import Union +from typing import Any, ClassVar, List, Union from msgspec import UNSET, UnsetType -from pyatlan.errors import ErrorCode from pyatlan_v9.model.contract import DataContractSpec from pyatlan_v9.model.conversion_utils import ( - build_attributes_kwargs, - build_flat_kwargs, categorize_relationships, merge_relationships, ) @@ -23,45 +28,226 @@ from pyatlan_v9.model.transform import register_asset from pyatlan_v9.utils import init_guid, validate_required_fields -from .asset_related import RelatedAsset -from .catalog import ( - Catalog, - CatalogAttributes, - CatalogNested, - CatalogRelationshipAttributes, +from .airflow_related import RelatedAirflowTask +from .anomalo_related import RelatedAnomaloCheck +from .app_related import RelatedApplication, RelatedApplicationField +from .asset import ( + _ASSET_REL_FIELDS, + Asset, + AssetAttributes, + AssetNested, + AssetRelationshipAttributes, + _extract_asset_attrs, + _populate_asset_attrs, ) -from .catalog_related import RelatedCatalog +from .asset_related import RelatedAsset +from .data_contract_related import RelatedDataContract +from .data_mesh_related import RelatedDataProduct +from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType +from .gtc_related import RelatedAtlasGlossaryTerm +from .model_related import RelatedModelAttribute, RelatedModelEntity +from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor +from .partial_related import RelatedPartialField, RelatedPartialObject +from .process_related import RelatedProcess +from .referenceable_related import RelatedReferenceable +from .resource_related import RelatedFile, RelatedLink, RelatedReadme +from .schema_registry_related import RelatedSchemaRegistrySubject +from .soda_related import RelatedSodaCheck +from .spark_related import RelatedSparkJob + +# ============================================================================= +# FLAT ASSET CLASS +# ============================================================================= @register_asset -class DataContract(Catalog): - """Instance of a data contract in Atlan.""" +class DataContract(Asset): + """ + Data contract for an asset. + """ + + DATA_CONTRACT_JSON: ClassVar[Any] = None + DATA_CONTRACT_SPEC: ClassVar[Any] = None + DATA_CONTRACT_VERSION: ClassVar[Any] = None + DATA_CONTRACT_ASSET_GUID: ClassVar[Any] = None + CATALOG_DATASET_GUID: ClassVar[Any] = None + INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None + OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None + ANOMALO_CHECKS: ClassVar[Any] = None + APPLICATION: ClassVar[Any] = None + APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_ASSET_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None + DATA_CONTRACT_ASSET_CERTIFIED: ClassVar[Any] = None + DATA_CONTRACT_PREVIOUS_VERSION: ClassVar[Any] = None + DATA_CONTRACT_NEXT_VERSION: ClassVar[Any] = None + OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + MODEL_IMPLEMENTED_ENTITIES: ClassVar[Any] = None + MODEL_IMPLEMENTED_ATTRIBUTES: ClassVar[Any] = None + METRICS: ClassVar[Any] = None + DQ_BASE_DATASET_RULES: ClassVar[Any] = None + DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None + MEANINGS: ClassVar[Any] = None + MC_MONITORS: ClassVar[Any] = None + MC_INCIDENTS: ClassVar[Any] = None + PARTIAL_CHILD_FIELDS: ClassVar[Any] = None + PARTIAL_CHILD_OBJECTS: ClassVar[Any] = None + INPUT_TO_PROCESSES: ClassVar[Any] = None + OUTPUT_FROM_PROCESSES: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_TO: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_FROM: ClassVar[Any] = None + FILES: ClassVar[Any] = None + LINKS: ClassVar[Any] = None + README: ClassVar[Any] = None + SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None + SODA_CHECKS: ClassVar[Any] = None + INPUT_TO_SPARK_JOBS: ClassVar[Any] = None + OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None type_name: Union[str, UnsetType] = "DataContract" data_contract_json: Union[str, None, UnsetType] = UNSET - """Deprecated JSON representation of the data contract.""" + """(Deprecated) Replaced by dataContractSpec attribute.""" data_contract_spec: Union[str, None, UnsetType] = UNSET - """YAML representation of the data contract.""" + """Actual content of the contract in YAML string format. Any changes to this string should create a new instance (with new sequential version number).""" data_contract_version: Union[int, None, UnsetType] = UNSET - """Version number of the data contract.""" + """Version of the contract.""" data_contract_asset_guid: Union[str, None, UnsetType] = UNSET - """GUID of the governed asset.""" + """Unique identifier of the asset associated with this data contract.""" - data_contract_asset_certified: Union[RelatedAsset, None, UnsetType] = UNSET - """Certified target asset for this contract.""" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" - data_contract_next_version: Union[RelatedCatalog, None, UnsetType] = UNSET - """Next version in this contract chain.""" + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" data_contract_asset_latest: Union[RelatedAsset, None, UnsetType] = UNSET - """Latest version of this contract.""" + """Asset this contract controls or will control.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + data_contract_asset_certified: Union[RelatedAsset, None, UnsetType] = UNSET + """Asset this contract controls.""" + + data_contract_previous_version: Union[RelatedDataContract, None, UnsetType] = UNSET + """Data contract instance that holds the previous version of this contract.""" + + data_contract_next_version: Union[RelatedDataContract, None, UnsetType] = UNSET + """Data contract instance that holds the next version of this contract.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[ + List[RelatedModelAttribute], None, UnsetType + ] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = ( + UNSET + ) + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" - data_contract_previous_version: Union[RelatedCatalog, None, UnsetType] = UNSET - """Previous version in this contract chain.""" + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = ( + UNSET + ) + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[ + List[RelatedSchemaRegistrySubject], None, UnsetType + ] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + def __post_init__(self) -> None: + self.type_name = "DataContract" + + # ========================================================================= + # SDK Methods + # ========================================================================= + + _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") @classmethod @init_guid @@ -95,19 +281,46 @@ def trim_to_required(self) -> "DataContract": """Return only required fields for update operations.""" return DataContract.updater(qualified_name=self.qualified_name, name=self.name) + # ========================================================================= + # Optimized Serialization Methods (override Asset base class) + # ========================================================================= + def to_json(self, nested: bool = True, serde: Serde | None = None) -> str: - """Convert to JSON string.""" + """ + Convert to JSON string using optimized nested struct serialization. + + Args: + nested: If True (default), use nested API format. If False, use flat format. + serde: Optional Serde instance for encoder reuse. Uses shared singleton if None. + + Returns: + JSON string representation + """ if serde is None: serde = get_serde() if nested: - return _data_contract_to_nested_bytes(self, serde).decode("utf-8") - return serde.encode(self).decode("utf-8") + return self.to_nested_bytes(serde).decode("utf-8") + else: + return serde.encode(self).decode("utf-8") + + def to_nested_bytes(self, serde: Serde | None = None) -> bytes: + """Serialize to Atlas nested-format JSON bytes (pure msgspec, no dict intermediate).""" + if serde is None: + serde = get_serde() + return _data_contract_to_nested_bytes(self, serde) @staticmethod - def from_json( - json_data: Union[str, bytes], serde: Serde | None = None - ) -> "DataContract": - """Create from JSON string or bytes.""" + def from_json(json_data: str | bytes, serde: Serde | None = None) -> DataContract: + """ + Create from JSON string or bytes using optimized nested struct deserialization. + + Args: + json_data: JSON string or bytes to deserialize + serde: Optional Serde instance for decoder reuse. Uses shared singleton if None. + + Returns: + DataContract instance + """ if isinstance(json_data, str): json_data = json_data.encode("utf-8") if serde is None: @@ -115,80 +328,150 @@ def from_json( return _data_contract_from_nested_bytes(json_data, serde) -class DataContractAttributes(CatalogAttributes): +# ============================================================================= +# NESTED FORMAT CLASSES +# ============================================================================= + + +class DataContractAttributes(AssetAttributes): """DataContract-specific attributes for nested API format.""" data_contract_json: Union[str, None, UnsetType] = UNSET + """(Deprecated) Replaced by dataContractSpec attribute.""" + data_contract_spec: Union[str, None, UnsetType] = UNSET + """Actual content of the contract in YAML string format. Any changes to this string should create a new instance (with new sequential version number).""" + data_contract_version: Union[int, None, UnsetType] = UNSET + """Version of the contract.""" + data_contract_asset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the asset associated with this data contract.""" - @classmethod - def creator( - cls, - *, - asset_qualified_name: str, - contract_json: Union[str, None] = None, - contract_spec: Union[DataContractSpec, str, None] = None, - ) -> "DataContractAttributes": - """Create DataContract attributes from JSON or YAML contract content.""" - validate_required_fields(["asset_qualified_name"], [asset_qualified_name]) - if not (contract_json or contract_spec): - raise ValueError( - "At least one of `contract_json` or `contract_spec` must be provided to create a contract." - ) - if contract_json and contract_spec: - raise ValueError( - "Both `contract_json` and `contract_spec` cannot be provided simultaneously to create a contract." - ) - - default_dataset = asset_qualified_name[asset_qualified_name.rfind("/") + 1 :] - contract_name: str - contract_spec_value: Union[str, None] = None - - if contract_json: - try: - payload = loads(contract_json) - dataset = payload.get("dataset") - if not dataset: - raise KeyError("dataset") - contract_name = f"Data contract for {dataset}" - except (JSONDecodeError, KeyError): - raise ErrorCode.INVALID_CONTRACT_JSON.exception_with_parameters() - else: - if isinstance(contract_spec, DataContractSpec): - contract_name = ( - f"Data contract for {contract_spec.dataset or default_dataset}" - ) - contract_spec_value = contract_spec.to_yaml() - else: - spec_str = contract_spec or "" - match = re.search(r"dataset:\s*([^\s#]+)", spec_str) - dataset = match.group(1) if match else default_dataset - contract_name = f"Data contract for {dataset}" - contract_spec_value = spec_str + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" - return cls( - name=contract_name, - qualified_name=f"{asset_qualified_name}/contract", - data_contract_json=contract_json, - data_contract_spec=contract_spec_value, - ) +class DataContractRelationshipAttributes(AssetRelationshipAttributes): + """DataContract-specific relationship attributes for nested API format.""" -DataContract.Attributes = DataContractAttributes # type: ignore[attr-defined] + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" -class DataContractRelationshipAttributes(CatalogRelationshipAttributes): - """DataContract-specific relationship attributes for nested API format.""" + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" - data_contract_asset_certified: Union[RelatedAsset, None, UnsetType] = UNSET - data_contract_next_version: Union[RelatedCatalog, None, UnsetType] = UNSET data_contract_asset_latest: Union[RelatedAsset, None, UnsetType] = UNSET - data_contract_previous_version: Union[RelatedCatalog, None, UnsetType] = UNSET + """Asset this contract controls or will control.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + data_contract_asset_certified: Union[RelatedAsset, None, UnsetType] = UNSET + """Asset this contract controls.""" + + data_contract_previous_version: Union[RelatedDataContract, None, UnsetType] = UNSET + """Data contract instance that holds the previous version of this contract.""" + + data_contract_next_version: Union[RelatedDataContract, None, UnsetType] = UNSET + """Data contract instance that holds the next version of this contract.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[ + List[RelatedModelAttribute], None, UnsetType + ] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = ( + UNSET + ) + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" -class DataContractNested(CatalogNested): + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = ( + UNSET + ) + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[ + List[RelatedSchemaRegistrySubject], None, UnsetType + ] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + +class DataContractNested(AssetNested): """DataContract in nested API format for high-performance serialization.""" attributes: Union[DataContractAttributes, UnsetType] = UNSET @@ -203,24 +486,90 @@ class DataContractNested(CatalogNested): ] = UNSET +# ============================================================================= +# CONVERSION HELPERS & CONSTANTS +# ============================================================================= + +_DATA_CONTRACT_REL_FIELDS: List[str] = [ + *_ASSET_REL_FIELDS, + "input_to_airflow_tasks", + "output_from_airflow_tasks", + "anomalo_checks", + "application", + "application_field", + "data_contract_latest", + "data_contract_asset_latest", + "data_contract_latest_certified", + "data_contract_asset_certified", + "data_contract_previous_version", + "data_contract_next_version", + "output_port_data_products", + "input_port_data_products", + "model_implemented_entities", + "model_implemented_attributes", + "metrics", + "dq_base_dataset_rules", + "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", + "meanings", + "mc_monitors", + "mc_incidents", + "partial_child_fields", + "partial_child_objects", + "input_to_processes", + "output_from_processes", + "user_def_relationship_to", + "user_def_relationship_from", + "files", + "links", + "readme", + "schema_registry_subjects", + "soda_checks", + "input_to_spark_jobs", + "output_from_spark_jobs", +] + + +def _populate_data_contract_attrs( + attrs: DataContractAttributes, obj: DataContract +) -> None: + """Populate DataContract-specific attributes on the attrs struct.""" + _populate_asset_attrs(attrs, obj) + attrs.data_contract_json = obj.data_contract_json + attrs.data_contract_spec = obj.data_contract_spec + attrs.data_contract_version = obj.data_contract_version + attrs.data_contract_asset_guid = obj.data_contract_asset_guid + attrs.catalog_dataset_guid = obj.catalog_dataset_guid + + +def _extract_data_contract_attrs(attrs: DataContractAttributes) -> dict: + """Extract all DataContract attributes from the attrs struct into a flat dict.""" + result = _extract_asset_attrs(attrs) + result["data_contract_json"] = attrs.data_contract_json + result["data_contract_spec"] = attrs.data_contract_spec + result["data_contract_version"] = attrs.data_contract_version + result["data_contract_asset_guid"] = attrs.data_contract_asset_guid + result["catalog_dataset_guid"] = attrs.catalog_dataset_guid + return result + + +# ============================================================================= +# CONVERSION FUNCTIONS +# ============================================================================= + + def _data_contract_to_nested(data_contract: DataContract) -> DataContractNested: """Convert flat DataContract to nested format.""" - attrs_kwargs = build_attributes_kwargs(data_contract, DataContractAttributes) - attrs = DataContractAttributes(**attrs_kwargs) - rel_fields: list[str] = [ - "data_contract_asset_certified", - "data_contract_next_version", - "data_contract_asset_latest", - "data_contract_previous_version", - ] + attrs = DataContractAttributes() + _populate_data_contract_attrs(attrs, data_contract) + # Categorize relationships by save semantic (REPLACE, APPEND, REMOVE) replace_rels, append_rels, remove_rels = categorize_relationships( - data_contract, rel_fields, DataContractRelationshipAttributes + data_contract, _DATA_CONTRACT_REL_FIELDS, DataContractRelationshipAttributes ) return DataContractNested( guid=data_contract.guid, type_name=data_contract.type_name, status=data_contract.status, - delete_handler=data_contract.delete_handler, version=data_contract.version, create_time=data_contract.create_time, update_time=data_contract.update_time, @@ -251,23 +600,38 @@ def _data_contract_from_nested(nested: DataContractNested) -> DataContract: if nested.attributes is not UNSET else DataContractAttributes() ) - rel_fields: list[str] = [ - "data_contract_asset_certified", - "data_contract_next_version", - "data_contract_asset_latest", - "data_contract_previous_version", - ] + # Merge relationships from all three buckets merged_rels = merge_relationships( nested.relationship_attributes, nested.append_relationship_attributes, nested.remove_relationship_attributes, - rel_fields, + _DATA_CONTRACT_REL_FIELDS, DataContractRelationshipAttributes, ) - kwargs = build_flat_kwargs( - nested, attrs, merged_rels, CatalogNested, DataContractAttributes + return DataContract( + guid=nested.guid, + type_name=nested.type_name, + status=nested.status, + version=nested.version, + create_time=nested.create_time, + update_time=nested.update_time, + created_by=nested.created_by, + updated_by=nested.updated_by, + classifications=nested.classifications, + classification_names=nested.classification_names, + meanings=nested.meanings, + labels=nested.labels, + business_attributes=nested.business_attributes, + custom_attributes=nested.custom_attributes, + pending_tasks=nested.pending_tasks, + proxy=nested.proxy, + is_incomplete=nested.is_incomplete, + provenance_type=nested.provenance_type, + home_id=nested.home_id, + **_extract_data_contract_attrs(attrs), + # Merged relationship attributes + **merged_rels, ) - return DataContract(**kwargs) def _data_contract_to_nested_bytes(data_contract: DataContract, serde: Serde) -> bytes: @@ -279,3 +643,66 @@ def _data_contract_from_nested_bytes(data: bytes, serde: Serde) -> DataContract: """Convert nested JSON bytes to flat DataContract.""" nested = serde.decode(data, DataContractNested) return _data_contract_from_nested(nested) + + +# --------------------------------------------------------------------------- +# Deferred field descriptor initialization +# --------------------------------------------------------------------------- +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + KeywordField, + NumericField, + RelationField, +) + +DataContract.DATA_CONTRACT_JSON = KeywordField("dataContractJson", "dataContractJson") +DataContract.DATA_CONTRACT_SPEC = KeywordField("dataContractSpec", "dataContractSpec") +DataContract.DATA_CONTRACT_VERSION = NumericField( + "dataContractVersion", "dataContractVersion" +) +DataContract.DATA_CONTRACT_ASSET_GUID = KeywordField( + "dataContractAssetGuid", "dataContractAssetGuid" +) +DataContract.CATALOG_DATASET_GUID = KeywordField( + "catalogDatasetGuid", "catalogDatasetGuid" +) +DataContract.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") +DataContract.OUTPUT_FROM_AIRFLOW_TASKS = RelationField("outputFromAirflowTasks") +DataContract.ANOMALO_CHECKS = RelationField("anomaloChecks") +DataContract.APPLICATION = RelationField("application") +DataContract.APPLICATION_FIELD = RelationField("applicationField") +DataContract.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +DataContract.DATA_CONTRACT_ASSET_LATEST = RelationField("dataContractAssetLatest") +DataContract.DATA_CONTRACT_LATEST_CERTIFIED = RelationField( + "dataContractLatestCertified" +) +DataContract.DATA_CONTRACT_ASSET_CERTIFIED = RelationField("dataContractAssetCertified") +DataContract.DATA_CONTRACT_PREVIOUS_VERSION = RelationField( + "dataContractPreviousVersion" +) +DataContract.DATA_CONTRACT_NEXT_VERSION = RelationField("dataContractNextVersion") +DataContract.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") +DataContract.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") +DataContract.MODEL_IMPLEMENTED_ENTITIES = RelationField("modelImplementedEntities") +DataContract.MODEL_IMPLEMENTED_ATTRIBUTES = RelationField("modelImplementedAttributes") +DataContract.METRICS = RelationField("metrics") +DataContract.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") +DataContract.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +DataContract.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) +DataContract.MEANINGS = RelationField("meanings") +DataContract.MC_MONITORS = RelationField("mcMonitors") +DataContract.MC_INCIDENTS = RelationField("mcIncidents") +DataContract.PARTIAL_CHILD_FIELDS = RelationField("partialChildFields") +DataContract.PARTIAL_CHILD_OBJECTS = RelationField("partialChildObjects") +DataContract.INPUT_TO_PROCESSES = RelationField("inputToProcesses") +DataContract.OUTPUT_FROM_PROCESSES = RelationField("outputFromProcesses") +DataContract.USER_DEF_RELATIONSHIP_TO = RelationField("userDefRelationshipTo") +DataContract.USER_DEF_RELATIONSHIP_FROM = RelationField("userDefRelationshipFrom") +DataContract.FILES = RelationField("files") +DataContract.LINKS = RelationField("links") +DataContract.README = RelationField("readme") +DataContract.SCHEMA_REGISTRY_SUBJECTS = RelationField("schemaRegistrySubjects") +DataContract.SODA_CHECKS = RelationField("sodaChecks") +DataContract.INPUT_TO_SPARK_JOBS = RelationField("inputToSparkJobs") +DataContract.OUTPUT_FROM_SPARK_JOBS = RelationField("outputFromSparkJobs") diff --git a/pyatlan_v9/model/assets/data_domain.py b/pyatlan_v9/model/assets/data_domain.py index 0289c166f..dc0216136 100644 --- a/pyatlan_v9/model/assets/data_domain.py +++ b/pyatlan_v9/model/assets/data_domain.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataDomain, RelatedDataProduct, RelatedStakeholder from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +86,7 @@ class DataDomain(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +104,8 @@ class DataDomain(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataDomain" + parent_domain_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the parent domain in which this asset exists.""" @@ -169,6 +173,11 @@ class DataDomain(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -224,66 +233,6 @@ class DataDomain(Asset): def __post_init__(self) -> None: self.type_name = "DataDomain" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataDomain instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DataDomain validation failed: {errors}") - - def minimize(self) -> "DataDomain": - """ - Return a minimal copy of this DataDomain with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataDomain with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataDomain instance with only the minimum required fields. - """ - self.validate() - return DataDomain(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataDomain": - """ - Create a :class:`RelatedDataDomain` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataDomain reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataDomain(guid=self.guid) - return RelatedDataDomain(qualified_name=self.qualified_name) - @classmethod def _get_super_domain_qualified_name( cls, domain_qualified_name: str @@ -471,6 +420,11 @@ class DataDomainRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -561,6 +515,7 @@ class DataDomainNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -630,9 +585,6 @@ def _data_domain_to_nested(data_domain: DataDomain) -> DataDomainNested: is_incomplete=data_domain.is_incomplete, provenance_type=data_domain.provenance_type, home_id=data_domain.home_id, - depth=data_domain.depth, - immediate_upstream=data_domain.immediate_upstream, - immediate_downstream=data_domain.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -664,6 +616,7 @@ def _data_domain_from_nested(nested: DataDomainNested) -> DataDomain: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -672,9 +625,6 @@ def _data_domain_from_nested(nested: DataDomainNested) -> DataDomain: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_domain_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -732,6 +682,9 @@ def _data_domain_from_nested_bytes(data: bytes, serde: Serde) -> DataDomain: DataDomain.METRICS = RelationField("metrics") DataDomain.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DataDomain.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +DataDomain.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DataDomain.MEANINGS = RelationField("meanings") DataDomain.MC_MONITORS = RelationField("mcMonitors") DataDomain.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/data_mesh.py b/pyatlan_v9/model/assets/data_mesh.py index 8501fb753..16aebe221 100644 --- a/pyatlan_v9/model/assets/data_mesh.py +++ b/pyatlan_v9/model/assets/data_mesh.py @@ -38,8 +38,9 @@ _populate_asset_attrs, ) from .data_contract_related import RelatedDataContract -from .data_mesh_related import RelatedDataMesh, RelatedDataProduct +from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -79,6 +80,7 @@ class DataMesh(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -96,6 +98,8 @@ class DataMesh(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataMesh" + parent_domain_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the parent domain in which this asset exists.""" @@ -151,6 +155,11 @@ class DataMesh(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -206,66 +215,6 @@ class DataMesh(Asset): def __post_init__(self) -> None: self.type_name = "DataMesh" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataMesh instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DataMesh validation failed: {errors}") - - def minimize(self) -> "DataMesh": - """ - Return a minimal copy of this DataMesh with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataMesh with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataMesh instance with only the minimum required fields. - """ - self.validate() - return DataMesh(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataMesh": - """ - Create a :class:`RelatedDataMesh` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataMesh reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataMesh(guid=self.guid) - return RelatedDataMesh(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -380,6 +329,11 @@ class DataMeshRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -466,6 +420,7 @@ class DataMeshNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -535,9 +490,6 @@ def _data_mesh_to_nested(data_mesh: DataMesh) -> DataMeshNested: is_incomplete=data_mesh.is_incomplete, provenance_type=data_mesh.provenance_type, home_id=data_mesh.home_id, - depth=data_mesh.depth, - immediate_upstream=data_mesh.immediate_upstream, - immediate_downstream=data_mesh.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -569,6 +521,7 @@ def _data_mesh_from_nested(nested: DataMeshNested) -> DataMesh: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -577,9 +530,6 @@ def _data_mesh_from_nested(nested: DataMeshNested) -> DataMesh: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_mesh_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -631,6 +581,9 @@ def _data_mesh_from_nested_bytes(data: bytes, serde: Serde) -> DataMesh: DataMesh.METRICS = RelationField("metrics") DataMesh.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DataMesh.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +DataMesh.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DataMesh.MEANINGS = RelationField("meanings") DataMesh.MC_MONITORS = RelationField("mcMonitors") DataMesh.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/data_mesh_dataset.py b/pyatlan_v9/model/assets/data_mesh_dataset.py index a9c6e9023..c04bd8e71 100644 --- a/pyatlan_v9/model/assets/data_mesh_dataset.py +++ b/pyatlan_v9/model/assets/data_mesh_dataset.py @@ -39,8 +39,9 @@ _populate_asset_attrs, ) from .data_contract_related import RelatedDataContract -from .data_mesh_related import RelatedDataMeshDataset, RelatedDataProduct +from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -82,6 +83,7 @@ class DataMeshDataset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -99,6 +101,8 @@ class DataMeshDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataMeshDataset" + data_mesh_dataset_type: Union[str, None, UnsetType] = UNSET """Type classification of this dataset (Raw, Refined, or Aggregated).""" @@ -160,6 +164,11 @@ class DataMeshDataset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -223,67 +232,6 @@ def __post_init__(self) -> None: r"^default/dataset/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataMeshDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if errors: - raise ValueError(f"DataMeshDataset validation failed: {errors}") - - def minimize(self) -> "DataMeshDataset": - """ - Return a minimal copy of this DataMeshDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataMeshDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataMeshDataset instance with only the minimum required fields. - """ - self.validate() - return DataMeshDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataMeshDataset": - """ - Create a :class:`RelatedDataMeshDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataMeshDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataMeshDataset(guid=self.guid) - return RelatedDataMeshDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -406,6 +354,11 @@ class DataMeshDatasetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -495,6 +448,7 @@ class DataMeshDatasetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -572,9 +526,6 @@ def _data_mesh_dataset_to_nested( is_incomplete=data_mesh_dataset.is_incomplete, provenance_type=data_mesh_dataset.provenance_type, home_id=data_mesh_dataset.home_id, - depth=data_mesh_dataset.depth, - immediate_upstream=data_mesh_dataset.immediate_upstream, - immediate_downstream=data_mesh_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -608,6 +559,7 @@ def _data_mesh_dataset_from_nested(nested: DataMeshDatasetNested) -> DataMeshDat updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -616,9 +568,6 @@ def _data_mesh_dataset_from_nested(nested: DataMeshDatasetNested) -> DataMeshDat is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_mesh_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -682,6 +631,9 @@ def _data_mesh_dataset_from_nested_bytes(data: bytes, serde: Serde) -> DataMeshD DataMeshDataset.METRICS = RelationField("metrics") DataMeshDataset.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DataMeshDataset.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +DataMeshDataset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DataMeshDataset.MEANINGS = RelationField("meanings") DataMeshDataset.MC_MONITORS = RelationField("mcMonitors") DataMeshDataset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/data_mesh_related.py b/pyatlan_v9/model/assets/data_mesh_related.py index 0ba3f8437..6f16c2db7 100644 --- a/pyatlan_v9/model/assets/data_mesh_related.py +++ b/pyatlan_v9/model/assets/data_mesh_related.py @@ -143,7 +143,7 @@ class RelatedDataProduct(RelatedDataMesh): data_product_score_value: Union[float, None, UnsetType] = UNSET """Score of this data product.""" - data_product_score_updated_at: Union[int, None, UnsetType] = UNSET + data_mesh_score_updated_at: Union[int, None, UnsetType] = UNSET """Timestamp when the score of this data product was last updated.""" daap_visibility_users: Union[List[str], None, UnsetType] = UNSET diff --git a/pyatlan_v9/model/assets/data_product.py b/pyatlan_v9/model/assets/data_product.py index 3460a71f0..4d0e94c4e 100644 --- a/pyatlan_v9/model/assets/data_product.py +++ b/pyatlan_v9/model/assets/data_product.py @@ -52,6 +52,7 @@ RelatedDataProduct, ) from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -86,7 +87,7 @@ class DataProduct(Asset): DATA_PRODUCT_ASSETS_DSL: ClassVar[Any] = None DATA_PRODUCT_ASSETS_PLAYBOOK_FILTER: ClassVar[Any] = None DATA_PRODUCT_SCORE_VALUE: ClassVar[Any] = None - DATA_PRODUCT_SCORE_UPDATED_AT: ClassVar[Any] = None + DATA_MESH_SCORE_UPDATED_AT: ClassVar[Any] = None DAAP_VISIBILITY_USERS: ClassVar[Any] = None DAAP_VISIBILITY_GROUPS: ClassVar[Any] = None DAAP_OUTPUT_PORT_GUIDS: ClassVar[Any] = None @@ -113,6 +114,7 @@ class DataProduct(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -131,6 +133,8 @@ class DataProduct(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None STARBURST_DATASETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataProduct" + data_product_status: Union[str, None, UnsetType] = UNSET """Status of this data product.""" @@ -166,7 +170,7 @@ class DataProduct(Asset): data_product_score_value: Union[float, None, UnsetType] = UNSET """Score of this data product.""" - data_product_score_updated_at: Union[int, None, UnsetType] = UNSET + data_mesh_score_updated_at: Union[int, None, UnsetType] = UNSET """Timestamp when the score of this data product was last updated.""" daap_visibility_users: Union[List[str], None, UnsetType] = UNSET @@ -251,6 +255,11 @@ class DataProduct(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -315,70 +324,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/product/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataProduct instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.data_domain is UNSET: - errors.append("data_domain is required for creation") - if errors: - raise ValueError(f"DataProduct validation failed: {errors}") - - def minimize(self) -> "DataProduct": - """ - Return a minimal copy of this DataProduct with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataProduct with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataProduct instance with only the minimum required fields. - """ - self.validate() - return DataProduct(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataProduct": - """ - Create a :class:`RelatedDataProduct` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataProduct reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataProduct(guid=self.guid) - return RelatedDataProduct(qualified_name=self.qualified_name) - @classmethod def _get_super_domain_qualified_name( cls, domain_qualified_name: str @@ -549,7 +494,7 @@ class DataProductAttributes(AssetAttributes): data_product_score_value: Union[float, None, UnsetType] = UNSET """Score of this data product.""" - data_product_score_updated_at: Union[int, None, UnsetType] = UNSET + data_mesh_score_updated_at: Union[int, None, UnsetType] = UNSET """Timestamp when the score of this data product was last updated.""" daap_visibility_users: Union[List[str], None, UnsetType] = UNSET @@ -638,6 +583,11 @@ class DataProductRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -731,6 +681,7 @@ class DataProductNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -767,7 +718,7 @@ def _populate_data_product_attrs( attrs.data_product_assets_dsl = obj.data_product_assets_dsl attrs.data_product_assets_playbook_filter = obj.data_product_assets_playbook_filter attrs.data_product_score_value = obj.data_product_score_value - attrs.data_product_score_updated_at = obj.data_product_score_updated_at + attrs.data_mesh_score_updated_at = obj.data_mesh_score_updated_at attrs.daap_visibility_users = obj.daap_visibility_users attrs.daap_visibility_groups = obj.daap_visibility_groups attrs.daap_output_port_guids = obj.daap_output_port_guids @@ -794,7 +745,7 @@ def _extract_data_product_attrs(attrs: DataProductAttributes) -> dict: attrs.data_product_assets_playbook_filter ) result["data_product_score_value"] = attrs.data_product_score_value - result["data_product_score_updated_at"] = attrs.data_product_score_updated_at + result["data_mesh_score_updated_at"] = attrs.data_mesh_score_updated_at result["daap_visibility_users"] = attrs.daap_visibility_users result["daap_visibility_groups"] = attrs.daap_visibility_groups result["daap_output_port_guids"] = attrs.daap_output_port_guids @@ -839,9 +790,6 @@ def _data_product_to_nested(data_product: DataProduct) -> DataProductNested: is_incomplete=data_product.is_incomplete, provenance_type=data_product.provenance_type, home_id=data_product.home_id, - depth=data_product.depth, - immediate_upstream=data_product.immediate_upstream, - immediate_downstream=data_product.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -873,6 +821,7 @@ def _data_product_from_nested(nested: DataProductNested) -> DataProduct: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -881,9 +830,6 @@ def _data_product_from_nested(nested: DataProductNested) -> DataProduct: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_product_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -934,8 +880,8 @@ def _data_product_from_nested_bytes(data: bytes, serde: Serde) -> DataProduct: DataProduct.DATA_PRODUCT_SCORE_VALUE = NumericField( "dataProductScoreValue", "dataProductScoreValue" ) -DataProduct.DATA_PRODUCT_SCORE_UPDATED_AT = NumericField( - "dataProductScoreUpdatedAt", "dataProductScoreUpdatedAt" +DataProduct.DATA_MESH_SCORE_UPDATED_AT = NumericField( + "dataMeshScoreUpdatedAt", "dataMeshScoreUpdatedAt" ) DataProduct.DAAP_VISIBILITY_USERS = KeywordField( "daapVisibilityUsers", "daapVisibilityUsers" @@ -983,6 +929,9 @@ def _data_product_from_nested_bytes(data: bytes, serde: Serde) -> DataProduct: DataProduct.METRICS = RelationField("metrics") DataProduct.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DataProduct.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +DataProduct.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DataProduct.MEANINGS = RelationField("meanings") DataProduct.MC_MONITORS = RelationField("mcMonitors") DataProduct.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/data_quality.py b/pyatlan_v9/model/assets/data_quality.py index 6448e8daa..e43da83b4 100644 --- a/pyatlan_v9/model/assets/data_quality.py +++ b/pyatlan_v9/model/assets/data_quality.py @@ -39,11 +39,8 @@ ) from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct -from .data_quality_related import ( - RelatedDataQuality, - RelatedDataQualityRule, - RelatedMetric, -) +from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -82,6 +79,7 @@ class DataQuality(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -99,6 +97,8 @@ class DataQuality(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataQuality" + dq_is_part_of_contract: Union[bool, None, UnsetType] = UNSET """Whether this data quality is part of contract (true) or not (false).""" @@ -151,6 +151,11 @@ class DataQuality(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -206,66 +211,6 @@ class DataQuality(Asset): def __post_init__(self) -> None: self.type_name = "DataQuality" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataQuality instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DataQuality validation failed: {errors}") - - def minimize(self) -> "DataQuality": - """ - Return a minimal copy of this DataQuality with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataQuality with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataQuality instance with only the minimum required fields. - """ - self.validate() - return DataQuality(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataQuality": - """ - Create a :class:`RelatedDataQuality` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataQuality reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataQuality(guid=self.guid) - return RelatedDataQuality(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -377,6 +322,11 @@ class DataQualityRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -463,6 +413,7 @@ class DataQualityNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -532,9 +483,6 @@ def _data_quality_to_nested(data_quality: DataQuality) -> DataQualityNested: is_incomplete=data_quality.is_incomplete, provenance_type=data_quality.provenance_type, home_id=data_quality.home_id, - depth=data_quality.depth, - immediate_upstream=data_quality.immediate_upstream, - immediate_downstream=data_quality.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -566,6 +514,7 @@ def _data_quality_from_nested(nested: DataQualityNested) -> DataQuality: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -574,9 +523,6 @@ def _data_quality_from_nested(nested: DataQualityNested) -> DataQuality: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_quality_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -625,6 +571,9 @@ def _data_quality_from_nested_bytes(data: bytes, serde: Serde) -> DataQuality: DataQuality.METRICS = RelationField("metrics") DataQuality.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DataQuality.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +DataQuality.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DataQuality.MEANINGS = RelationField("meanings") DataQuality.MC_MONITORS = RelationField("mcMonitors") DataQuality.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/data_quality_rule.py b/pyatlan_v9/model/assets/data_quality_rule.py index c35757106..b5ef1a42c 100644 --- a/pyatlan_v9/model/assets/data_quality_rule.py +++ b/pyatlan_v9/model/assets/data_quality_rule.py @@ -59,12 +59,14 @@ _populate_asset_attrs, ) from .asset_related import RelatedAsset +from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import ( RelatedDataQualityRule, RelatedDataQualityRuleTemplate, RelatedMetric, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -111,12 +113,17 @@ class DataQualityRule(Asset): DQ_RULE_CUSTOM_SQL_RETURN_TYPE: ClassVar[Any] = None DQ_RULE_FAILED_ROWS_SQL: ClassVar[Any] = None DQ_RULE_ROW_SCOPE_FILTERING_ENABLED: ClassVar[Any] = None + DQ_RULE_LATEST_RESULT_DETAILS: ClassVar[Any] = None + DQ_RULE_AD_STATUS: ClassVar[Any] = None DQ_IS_PART_OF_CONTRACT: ClassVar[Any] = None + CATALOG_DATASET_GUID: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None ANOMALO_CHECKS: ClassVar[Any] = None APPLICATION: ClassVar[Any] = None APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None MODEL_IMPLEMENTED_ENTITIES: ClassVar[Any] = None @@ -129,6 +136,7 @@ class DataQualityRule(Asset): DQ_RULE_REFERENCE_DATASETS: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None DQ_RULE_REFERENCE_COLUMNS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -146,6 +154,8 @@ class DataQualityRule(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataQualityRule" + dq_rule_base_dataset_qualified_name: Union[str, None, UnsetType] = UNSET """Base dataset qualified name that attached to this rule.""" @@ -221,9 +231,20 @@ class DataQualityRule(Asset): dq_rule_row_scope_filtering_enabled: Union[bool, None, UnsetType] = UNSET """Whether row scope filtering is enabled for this data quality rule (true) or not (false).""" + dq_rule_latest_result_details: Union[str, None, UnsetType] = UNSET + """JSON string with anomaly detection result details (forecast, upper_bound, lower_bound, is_anomaly) from Snowflake AD.""" + + dq_rule_ad_status: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="dqRuleADStatus" + ) + """Anomaly detection lifecycle status for this rule.""" + dq_is_part_of_contract: Union[bool, None, UnsetType] = UNSET """Whether this data quality is part of contract (true) or not (false).""" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -239,6 +260,12 @@ class DataQualityRule(Asset): application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET """ApplicationField owning the Asset.""" + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET """Data products for which this asset is an output port.""" @@ -279,6 +306,11 @@ class DataQualityRule(Asset): dq_rule_reference_columns: Union[List[RelatedColumn], None, UnsetType] = UNSET """Columns referenced in this rule.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -320,7 +352,7 @@ class DataQualityRule(Asset): schema_registry_subjects: Union[ List[RelatedSchemaRegistrySubject], None, UnsetType ] = UNSET - """""" + """Schema registry subjects associated with this asset.""" soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET """""" @@ -340,72 +372,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataQualityRule instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dq_rule_template is UNSET: - errors.append("dq_rule_template is required for creation") - if errors: - raise ValueError(f"DataQualityRule validation failed: {errors}") - - def minimize(self) -> "DataQualityRule": - """ - Return a minimal copy of this DataQualityRule with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataQualityRule with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataQualityRule instance with only the minimum required fields. - """ - self.validate() - return DataQualityRule(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataQualityRule": - """ - Create a :class:`RelatedDataQualityRule` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataQualityRule reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataQualityRule(guid=self.guid) - return RelatedDataQualityRule(qualified_name=self.qualified_name) - @classmethod @init_guid def custom_sql_creator( @@ -1209,9 +1175,20 @@ class DataQualityRuleAttributes(AssetAttributes): dq_rule_row_scope_filtering_enabled: Union[bool, None, UnsetType] = UNSET """Whether row scope filtering is enabled for this data quality rule (true) or not (false).""" + dq_rule_latest_result_details: Union[str, None, UnsetType] = UNSET + """JSON string with anomaly detection result details (forecast, upper_bound, lower_bound, is_anomaly) from Snowflake AD.""" + + dq_rule_ad_status: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="dqRuleADStatus" + ) + """Anomaly detection lifecycle status for this rule.""" + dq_is_part_of_contract: Union[bool, None, UnsetType] = UNSET """Whether this data quality is part of contract (true) or not (false).""" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + class DataQualityRuleRelationshipAttributes(AssetRelationshipAttributes): """DataQualityRule-specific relationship attributes for nested API format.""" @@ -1231,6 +1208,12 @@ class DataQualityRuleRelationshipAttributes(AssetRelationshipAttributes): application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET """ApplicationField owning the Asset.""" + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET """Data products for which this asset is an output port.""" @@ -1271,6 +1254,11 @@ class DataQualityRuleRelationshipAttributes(AssetRelationshipAttributes): dq_rule_reference_columns: Union[List[RelatedColumn], None, UnsetType] = UNSET """Columns referenced in this rule.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1312,7 +1300,7 @@ class DataQualityRuleRelationshipAttributes(AssetRelationshipAttributes): schema_registry_subjects: Union[ List[RelatedSchemaRegistrySubject], None, UnsetType ] = UNSET - """""" + """Schema registry subjects associated with this asset.""" soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET """""" @@ -1350,6 +1338,8 @@ class DataQualityRuleNested(AssetNested): "anomalo_checks", "application", "application_field", + "data_contract_latest", + "data_contract_latest_certified", "output_port_data_products", "input_port_data_products", "model_implemented_entities", @@ -1362,6 +1352,7 @@ class DataQualityRuleNested(AssetNested): "dq_rule_reference_datasets", "dq_reference_dataset_rules", "dq_rule_reference_columns", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1415,7 +1406,10 @@ def _populate_data_quality_rule_attrs( attrs.dq_rule_custom_sql_return_type = obj.dq_rule_custom_sql_return_type attrs.dq_rule_failed_rows_sql = obj.dq_rule_failed_rows_sql attrs.dq_rule_row_scope_filtering_enabled = obj.dq_rule_row_scope_filtering_enabled + attrs.dq_rule_latest_result_details = obj.dq_rule_latest_result_details + attrs.dq_rule_ad_status = obj.dq_rule_ad_status attrs.dq_is_part_of_contract = obj.dq_is_part_of_contract + attrs.catalog_dataset_guid = obj.catalog_dataset_guid def _extract_data_quality_rule_attrs(attrs: DataQualityRuleAttributes) -> dict: @@ -1460,7 +1454,10 @@ def _extract_data_quality_rule_attrs(attrs: DataQualityRuleAttributes) -> dict: result["dq_rule_row_scope_filtering_enabled"] = ( attrs.dq_rule_row_scope_filtering_enabled ) + result["dq_rule_latest_result_details"] = attrs.dq_rule_latest_result_details + result["dq_rule_ad_status"] = attrs.dq_rule_ad_status result["dq_is_part_of_contract"] = attrs.dq_is_part_of_contract + result["catalog_dataset_guid"] = attrs.catalog_dataset_guid return result @@ -1501,9 +1498,6 @@ def _data_quality_rule_to_nested( is_incomplete=data_quality_rule.is_incomplete, provenance_type=data_quality_rule.provenance_type, home_id=data_quality_rule.home_id, - depth=data_quality_rule.depth, - immediate_upstream=data_quality_rule.immediate_upstream, - immediate_downstream=data_quality_rule.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1537,6 +1531,7 @@ def _data_quality_rule_from_nested(nested: DataQualityRuleNested) -> DataQuality updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1545,9 +1540,6 @@ def _data_quality_rule_from_nested(nested: DataQualityRuleNested) -> DataQuality is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_quality_rule_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1640,14 +1632,25 @@ def _data_quality_rule_from_nested_bytes(data: bytes, serde: Serde) -> DataQuali DataQualityRule.DQ_RULE_ROW_SCOPE_FILTERING_ENABLED = BooleanField( "dqRuleRowScopeFilteringEnabled", "dqRuleRowScopeFilteringEnabled" ) +DataQualityRule.DQ_RULE_LATEST_RESULT_DETAILS = KeywordField( + "dqRuleLatestResultDetails", "dqRuleLatestResultDetails" +) +DataQualityRule.DQ_RULE_AD_STATUS = KeywordField("dqRuleADStatus", "dqRuleADStatus") DataQualityRule.DQ_IS_PART_OF_CONTRACT = BooleanField( "dqIsPartOfContract", "dqIsPartOfContract" ) +DataQualityRule.CATALOG_DATASET_GUID = KeywordField( + "catalogDatasetGuid", "catalogDatasetGuid" +) DataQualityRule.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") DataQualityRule.OUTPUT_FROM_AIRFLOW_TASKS = RelationField("outputFromAirflowTasks") DataQualityRule.ANOMALO_CHECKS = RelationField("anomaloChecks") DataQualityRule.APPLICATION = RelationField("application") DataQualityRule.APPLICATION_FIELD = RelationField("applicationField") +DataQualityRule.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +DataQualityRule.DATA_CONTRACT_LATEST_CERTIFIED = RelationField( + "dataContractLatestCertified" +) DataQualityRule.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") DataQualityRule.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") DataQualityRule.MODEL_IMPLEMENTED_ENTITIES = RelationField("modelImplementedEntities") @@ -1662,6 +1665,9 @@ def _data_quality_rule_from_nested_bytes(data: bytes, serde: Serde) -> DataQuali DataQualityRule.DQ_RULE_REFERENCE_DATASETS = RelationField("dqRuleReferenceDatasets") DataQualityRule.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") DataQualityRule.DQ_RULE_REFERENCE_COLUMNS = RelationField("dqRuleReferenceColumns") +DataQualityRule.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DataQualityRule.MEANINGS = RelationField("meanings") DataQualityRule.MC_MONITORS = RelationField("mcMonitors") DataQualityRule.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/data_quality_rule_template.py b/pyatlan_v9/model/assets/data_quality_rule_template.py index 1c45e3966..69039faaa 100644 --- a/pyatlan_v9/model/assets/data_quality_rule_template.py +++ b/pyatlan_v9/model/assets/data_quality_rule_template.py @@ -39,11 +39,8 @@ ) from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct -from .data_quality_related import ( - RelatedDataQualityRule, - RelatedDataQualityRuleTemplate, - RelatedMetric, -) +from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -86,6 +83,7 @@ class DataQualityRuleTemplate(Asset): DQ_RULES: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,6 +101,8 @@ class DataQualityRuleTemplate(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataQualityRuleTemplate" + dq_rule_template_dimension: Union[str, None, UnsetType] = UNSET """Name of the dimension the rule belongs to.""" @@ -167,6 +167,11 @@ class DataQualityRuleTemplate(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -222,68 +227,6 @@ class DataQualityRuleTemplate(Asset): def __post_init__(self) -> None: self.type_name = "DataQualityRuleTemplate" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataQualityRuleTemplate instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DataQualityRuleTemplate validation failed: {errors}") - - def minimize(self) -> "DataQualityRuleTemplate": - """ - Return a minimal copy of this DataQualityRuleTemplate with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataQualityRuleTemplate with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataQualityRuleTemplate instance with only the minimum required fields. - """ - self.validate() - return DataQualityRuleTemplate( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedDataQualityRuleTemplate": - """ - Create a :class:`RelatedDataQualityRuleTemplate` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataQualityRuleTemplate reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataQualityRuleTemplate(guid=self.guid) - return RelatedDataQualityRuleTemplate(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -409,6 +352,11 @@ class DataQualityRuleTemplateRelationshipAttributes(AssetRelationshipAttributes) ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -498,6 +446,7 @@ class DataQualityRuleTemplateNested(AssetNested): "dq_rules", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -581,9 +530,6 @@ def _data_quality_rule_template_to_nested( is_incomplete=data_quality_rule_template.is_incomplete, provenance_type=data_quality_rule_template.provenance_type, home_id=data_quality_rule_template.home_id, - depth=data_quality_rule_template.depth, - immediate_upstream=data_quality_rule_template.immediate_upstream, - immediate_downstream=data_quality_rule_template.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -619,6 +565,7 @@ def _data_quality_rule_template_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -627,9 +574,6 @@ def _data_quality_rule_template_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_quality_rule_template_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -706,6 +650,9 @@ def _data_quality_rule_template_from_nested_bytes( DataQualityRuleTemplate.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +DataQualityRuleTemplate.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DataQualityRuleTemplate.MEANINGS = RelationField("meanings") DataQualityRuleTemplate.MC_MONITORS = RelationField("mcMonitors") DataQualityRuleTemplate.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/data_set.py b/pyatlan_v9/model/assets/data_set.py index 1cf1b1141..6fb395d77 100644 --- a/pyatlan_v9/model/assets/data_set.py +++ b/pyatlan_v9/model/assets/data_set.py @@ -27,10 +27,10 @@ from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField -from .asset_related import RelatedDataSet from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable import ( @@ -196,6 +196,11 @@ class DataSet(Referenceable): ASSET_SODA_CHECK_STATUSES: ClassVar[Any] = None ASSET_SODA_SOURCE_URL: ClassVar[Any] = None ASSET_ICON: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_SCORE_VALUE: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_ENTITIES: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_LATEST_SCORES: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_AVG_SCORES: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_MIN_SCORES: ClassVar[Any] = None ASSET_EXTERNAL_DQ_METADATA_DETAILS: ClassVar[Any] = None IS_PARTIAL: ClassVar[Any] = None IS_AI_GENERATED: ClassVar[Any] = None @@ -260,6 +265,7 @@ class DataSet(Referenceable): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -271,6 +277,8 @@ class DataSet(Referenceable): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataSet" + name: Union[str, None, UnsetType] = UNSET """Name of this asset. Fallback for display purposes, if displayName is empty.""" @@ -714,6 +722,31 @@ class DataSet(Referenceable): asset_icon: Union[str, None, UnsetType] = UNSET """Name of the icon to use for this asset. (Only applies to glossaries, currently.)""" + asset_external_dq_score_value: Union[float, None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQScoreValue" + ) + """Single asset-level DQ score (0–100). Populated natively by tools that provide one.""" + + asset_external_dq_test_entities: Union[List[str], None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQTestEntities" + ) + """Ordered list of DQ test/scan names on this asset. Positionally aligned with the score metrics.""" + + asset_external_dq_test_latest_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestLatestScores") + ) + """List of scores of the most recent run for each DQ test.""" + + asset_external_dq_test_avg_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestAvgScores") + ) + """List of mean scores across all runs for each DQ test.""" + + asset_external_dq_test_min_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestMinScores") + ) + """List of minimum (floor) score across all runs for each DQ test.""" + asset_external_dq_metadata_details: Union[ Dict[str, Dict[str, Any]], None, UnsetType ] = msgspec.field(default=UNSET, name="assetExternalDQMetadataDetails") @@ -982,6 +1015,11 @@ class DataSet(Referenceable): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1019,66 +1057,6 @@ class DataSet(Referenceable): def __post_init__(self) -> None: self.type_name = "DataSet" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataSet instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DataSet validation failed: {errors}") - - def minimize(self) -> "DataSet": - """ - Return a minimal copy of this DataSet with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataSet with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataSet instance with only the minimum required fields. - """ - self.validate() - return DataSet(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataSet": - """ - Create a :class:`RelatedDataSet` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataSet reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataSet(guid=self.guid) - return RelatedDataSet(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1577,6 +1555,31 @@ class DataSetAttributes(ReferenceableAttributes): asset_icon: Union[str, None, UnsetType] = UNSET """Name of the icon to use for this asset. (Only applies to glossaries, currently.)""" + asset_external_dq_score_value: Union[float, None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQScoreValue" + ) + """Single asset-level DQ score (0–100). Populated natively by tools that provide one.""" + + asset_external_dq_test_entities: Union[List[str], None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQTestEntities" + ) + """Ordered list of DQ test/scan names on this asset. Positionally aligned with the score metrics.""" + + asset_external_dq_test_latest_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestLatestScores") + ) + """List of scores of the most recent run for each DQ test.""" + + asset_external_dq_test_avg_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestAvgScores") + ) + """List of mean scores across all runs for each DQ test.""" + + asset_external_dq_test_min_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestMinScores") + ) + """List of minimum (floor) score across all runs for each DQ test.""" + asset_external_dq_metadata_details: Union[ Dict[str, Dict[str, Any]], None, UnsetType ] = msgspec.field(default=UNSET, name="assetExternalDQMetadataDetails") @@ -1849,6 +1852,11 @@ class DataSetRelationshipAttributes(ReferenceableRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1913,6 +1921,7 @@ class DataSetNested(ReferenceableNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -2106,6 +2115,13 @@ def _populate_data_set_attrs(attrs: DataSetAttributes, obj: DataSet) -> None: attrs.asset_soda_check_statuses = obj.asset_soda_check_statuses attrs.asset_soda_source_url = obj.asset_soda_source_url attrs.asset_icon = obj.asset_icon + attrs.asset_external_dq_score_value = obj.asset_external_dq_score_value + attrs.asset_external_dq_test_entities = obj.asset_external_dq_test_entities + attrs.asset_external_dq_test_latest_scores = ( + obj.asset_external_dq_test_latest_scores + ) + attrs.asset_external_dq_test_avg_scores = obj.asset_external_dq_test_avg_scores + attrs.asset_external_dq_test_min_scores = obj.asset_external_dq_test_min_scores attrs.asset_external_dq_metadata_details = obj.asset_external_dq_metadata_details attrs.is_partial = obj.is_partial attrs.is_ai_generated = obj.is_ai_generated @@ -2380,6 +2396,17 @@ def _extract_data_set_attrs(attrs: DataSetAttributes) -> dict: result["asset_soda_check_statuses"] = attrs.asset_soda_check_statuses result["asset_soda_source_url"] = attrs.asset_soda_source_url result["asset_icon"] = attrs.asset_icon + result["asset_external_dq_score_value"] = attrs.asset_external_dq_score_value + result["asset_external_dq_test_entities"] = attrs.asset_external_dq_test_entities + result["asset_external_dq_test_latest_scores"] = ( + attrs.asset_external_dq_test_latest_scores + ) + result["asset_external_dq_test_avg_scores"] = ( + attrs.asset_external_dq_test_avg_scores + ) + result["asset_external_dq_test_min_scores"] = ( + attrs.asset_external_dq_test_min_scores + ) result["asset_external_dq_metadata_details"] = ( attrs.asset_external_dq_metadata_details ) @@ -2494,9 +2521,6 @@ def _data_set_to_nested(data_set: DataSet) -> DataSetNested: is_incomplete=data_set.is_incomplete, provenance_type=data_set.provenance_type, home_id=data_set.home_id, - depth=data_set.depth, - immediate_upstream=data_set.immediate_upstream, - immediate_downstream=data_set.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -2526,6 +2550,7 @@ def _data_set_from_nested(nested: DataSetNested) -> DataSet: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -2534,9 +2559,6 @@ def _data_set_from_nested(nested: DataSetNested) -> DataSet: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_set_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -2894,6 +2916,21 @@ def _data_set_from_nested_bytes(data: bytes, serde: Serde) -> DataSet: ) DataSet.ASSET_SODA_SOURCE_URL = KeywordField("assetSodaSourceURL", "assetSodaSourceURL") DataSet.ASSET_ICON = KeywordField("assetIcon", "assetIcon") +DataSet.ASSET_EXTERNAL_DQ_SCORE_VALUE = NumericField( + "assetExternalDQScoreValue", "assetExternalDQScoreValue" +) +DataSet.ASSET_EXTERNAL_DQ_TEST_ENTITIES = KeywordField( + "assetExternalDQTestEntities", "assetExternalDQTestEntities" +) +DataSet.ASSET_EXTERNAL_DQ_TEST_LATEST_SCORES = NumericField( + "assetExternalDQTestLatestScores", "assetExternalDQTestLatestScores" +) +DataSet.ASSET_EXTERNAL_DQ_TEST_AVG_SCORES = NumericField( + "assetExternalDQTestAvgScores", "assetExternalDQTestAvgScores" +) +DataSet.ASSET_EXTERNAL_DQ_TEST_MIN_SCORES = NumericField( + "assetExternalDQTestMinScores", "assetExternalDQTestMinScores" +) DataSet.ASSET_EXTERNAL_DQ_METADATA_DETAILS = KeywordField( "assetExternalDQMetadataDetails", "assetExternalDQMetadataDetails" ) @@ -3043,6 +3080,9 @@ def _data_set_from_nested_bytes(data: bytes, serde: Serde) -> DataSet: DataSet.METRICS = RelationField("metrics") DataSet.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DataSet.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +DataSet.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DataSet.MEANINGS = RelationField("meanings") DataSet.MC_MONITORS = RelationField("mcMonitors") DataSet.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/data_studio.py b/pyatlan_v9/model/assets/data_studio.py index 98db17740..222fd8261 100644 --- a/pyatlan_v9/model/assets/data_studio.py +++ b/pyatlan_v9/model/assets/data_studio.py @@ -40,7 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .data_studio_related import RelatedDataStudio +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -87,6 +87,7 @@ class DataStudio(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -104,6 +105,8 @@ class DataStudio(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataStudio" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -180,6 +183,11 @@ class DataStudio(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -235,66 +243,6 @@ class DataStudio(Asset): def __post_init__(self) -> None: self.type_name = "DataStudio" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataStudio instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DataStudio validation failed: {errors}") - - def minimize(self) -> "DataStudio": - """ - Return a minimal copy of this DataStudio with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataStudio with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataStudio instance with only the minimum required fields. - """ - self.validate() - return DataStudio(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataStudio": - """ - Create a :class:`RelatedDataStudio` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataStudio reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataStudio(guid=self.guid) - return RelatedDataStudio(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -430,6 +378,11 @@ class DataStudioRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -516,6 +469,7 @@ class DataStudioNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -599,9 +553,6 @@ def _data_studio_to_nested(data_studio: DataStudio) -> DataStudioNested: is_incomplete=data_studio.is_incomplete, provenance_type=data_studio.provenance_type, home_id=data_studio.home_id, - depth=data_studio.depth, - immediate_upstream=data_studio.immediate_upstream, - immediate_downstream=data_studio.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -633,6 +584,7 @@ def _data_studio_from_nested(nested: DataStudioNested) -> DataStudio: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -641,9 +593,6 @@ def _data_studio_from_nested(nested: DataStudioNested) -> DataStudio: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_studio_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -707,6 +656,9 @@ def _data_studio_from_nested_bytes(data: bytes, serde: Serde) -> DataStudio: DataStudio.METRICS = RelationField("metrics") DataStudio.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DataStudio.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +DataStudio.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DataStudio.MEANINGS = RelationField("meanings") DataStudio.MC_MONITORS = RelationField("mcMonitors") DataStudio.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/data_studio_asset.py b/pyatlan_v9/model/assets/data_studio_asset.py index 8f9125e44..84ecf7c3a 100644 --- a/pyatlan_v9/model/assets/data_studio_asset.py +++ b/pyatlan_v9/model/assets/data_studio_asset.py @@ -41,7 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .data_studio_related import RelatedDataStudioAsset +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -92,6 +92,7 @@ class DataStudioAsset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -109,6 +110,8 @@ class DataStudioAsset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataStudioAsset" + data_studio_asset_type: Union[str, None, UnsetType] = UNSET """Type of the Google Data Studio asset, for example: REPORT or DATA_SOURCE.""" @@ -197,6 +200,11 @@ class DataStudioAsset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -252,69 +260,6 @@ class DataStudioAsset(Asset): def __post_init__(self) -> None: self.type_name = "DataStudioAsset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataStudioAsset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.data_studio_asset_type is UNSET: - errors.append("data_studio_asset_type is required for creation") - if errors: - raise ValueError(f"DataStudioAsset validation failed: {errors}") - - def minimize(self) -> "DataStudioAsset": - """ - Return a minimal copy of this DataStudioAsset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataStudioAsset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataStudioAsset instance with only the minimum required fields. - """ - self.validate() - return DataStudioAsset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataStudioAsset": - """ - Create a :class:`RelatedDataStudioAsset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataStudioAsset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataStudioAsset(guid=self.guid) - return RelatedDataStudioAsset(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -501,6 +446,11 @@ class DataStudioAssetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -589,6 +539,7 @@ class DataStudioAssetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -686,9 +637,6 @@ def _data_studio_asset_to_nested( is_incomplete=data_studio_asset.is_incomplete, provenance_type=data_studio_asset.provenance_type, home_id=data_studio_asset.home_id, - depth=data_studio_asset.depth, - immediate_upstream=data_studio_asset.immediate_upstream, - immediate_downstream=data_studio_asset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -722,6 +670,7 @@ def _data_studio_asset_from_nested(nested: DataStudioAssetNested) -> DataStudioA updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -730,9 +679,6 @@ def _data_studio_asset_from_nested(nested: DataStudioAssetNested) -> DataStudioA is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_data_studio_asset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -815,6 +761,9 @@ def _data_studio_asset_from_nested_bytes(data: bytes, serde: Serde) -> DataStudi DataStudioAsset.METRICS = RelationField("metrics") DataStudioAsset.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DataStudioAsset.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +DataStudioAsset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DataStudioAsset.MEANINGS = RelationField("meanings") DataStudioAsset.MC_MONITORS = RelationField("mcMonitors") DataStudioAsset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/database.py b/pyatlan_v9/model/assets/database.py index 236c5f35b..673263813 100644 --- a/pyatlan_v9/model/assets/database.py +++ b/pyatlan_v9/model/assets/database.py @@ -49,6 +49,7 @@ RelatedDbtTest, ) from .fabric_related import RelatedFabricWorkspace +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -64,7 +65,7 @@ RelatedSqlInsightBusinessQuestion, RelatedSqlInsightJoin, ) -from .sql_related import RelatedDatabase, RelatedSchema +from .sql_related import RelatedSchema # ============================================================================= # FLAT ASSET CLASS @@ -124,6 +125,7 @@ class Database(Asset): SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None FABRIC_WORKSPACE: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -146,6 +148,8 @@ class Database(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Database" + schema_count: Union[int, None, UnsetType] = UNSET """Number of schemas in this database.""" @@ -295,6 +299,11 @@ class Database(Asset): fabric_workspace: Union[RelatedFabricWorkspace, None, UnsetType] = UNSET """Workspace containing the database.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -373,66 +382,6 @@ class Database(Asset): def __post_init__(self) -> None: self.type_name = "Database" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Database instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Database validation failed: {errors}") - - def minimize(self) -> "Database": - """ - Return a minimal copy of this Database with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Database with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Database instance with only the minimum required fields. - """ - self.validate() - return Database(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDatabase": - """ - Create a :class:`RelatedDatabase` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabase reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabase(guid=self.guid) - return RelatedDatabase(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -699,6 +648,11 @@ class DatabaseRelationshipAttributes(AssetRelationshipAttributes): fabric_workspace: Union[RelatedFabricWorkspace, None, UnsetType] = UNSET """Workspace containing the database.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -815,6 +769,7 @@ class DatabaseNested(AssetNested): "sql_dbt_sources", "dbt_seed_assets", "fabric_workspace", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -949,9 +904,6 @@ def _database_to_nested(database: Database) -> DatabaseNested: is_incomplete=database.is_incomplete, provenance_type=database.provenance_type, home_id=database.home_id, - depth=database.depth, - immediate_upstream=database.immediate_upstream, - immediate_downstream=database.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -983,6 +935,7 @@ def _database_from_nested(nested: DatabaseNested) -> Database: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -991,9 +944,6 @@ def _database_from_nested(nested: DatabaseNested) -> Database: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_database_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1091,6 +1041,9 @@ def _database_from_nested_bytes(data: bytes, serde: Serde) -> Database: Database.SQL_DBT_SOURCES = RelationField("sqlDBTSources") Database.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") Database.FABRIC_WORKSPACE = RelationField("fabricWorkspace") +Database.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Database.MEANINGS = RelationField("meanings") Database.MC_MONITORS = RelationField("mcMonitors") Database.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/databricks.py b/pyatlan_v9/model/assets/databricks.py index e686b9b4e..df5615cea 100644 --- a/pyatlan_v9/model/assets/databricks.py +++ b/pyatlan_v9/model/assets/databricks.py @@ -41,13 +41,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import RelatedDatabricks from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -120,6 +120,7 @@ class Databricks(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -141,6 +142,8 @@ class Databricks(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Databricks" + query_count: Union[int, None, UnsetType] = UNSET """Number of times this asset has been queried.""" @@ -284,6 +287,11 @@ class Databricks(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -359,66 +367,6 @@ class Databricks(Asset): def __post_init__(self) -> None: self.type_name = "Databricks" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Databricks instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Databricks validation failed: {errors}") - - def minimize(self) -> "Databricks": - """ - Return a minimal copy of this Databricks with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Databricks with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Databricks instance with only the minimum required fields. - """ - self.validate() - return Databricks(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDatabricks": - """ - Create a :class:`RelatedDatabricks` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricks reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricks(guid=self.guid) - return RelatedDatabricks(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -621,6 +569,11 @@ class DatabricksRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -733,6 +686,7 @@ class DatabricksNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -864,9 +818,6 @@ def _databricks_to_nested(databricks: Databricks) -> DatabricksNested: is_incomplete=databricks.is_incomplete, provenance_type=databricks.provenance_type, home_id=databricks.home_id, - depth=databricks.depth, - immediate_upstream=databricks.immediate_upstream, - immediate_downstream=databricks.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -898,6 +849,7 @@ def _databricks_from_nested(nested: DatabricksNested) -> Databricks: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -906,9 +858,6 @@ def _databricks_from_nested(nested: DatabricksNested) -> Databricks: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1008,6 +957,9 @@ def _databricks_from_nested_bytes(data: bytes, serde: Serde) -> Databricks: Databricks.DBT_SOURCES = RelationField("dbtSources") Databricks.SQL_DBT_SOURCES = RelationField("sqlDBTSources") Databricks.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +Databricks.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Databricks.MEANINGS = RelationField("meanings") Databricks.MC_MONITORS = RelationField("mcMonitors") Databricks.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/databricks_ai_model_context.py b/pyatlan_v9/model/assets/databricks_ai_model_context.py index fe635aad8..0ba0913d1 100644 --- a/pyatlan_v9/model/assets/databricks_ai_model_context.py +++ b/pyatlan_v9/model/assets/databricks_ai_model_context.py @@ -43,16 +43,14 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import ( - RelatedDatabricksAIModelContext, - RelatedDatabricksAIModelVersion, -) +from .databricks_related import RelatedDatabricksAIModelVersion from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -81,7 +79,7 @@ class DatabricksAIModelContext(Asset): Instance of an ai model in databricks. """ - DATABRICKS_AI_MODEL_CONTEXT_METASTORE_ID: ClassVar[Any] = None + DATABRICKS_METASTORE_ID: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -141,6 +139,7 @@ class DatabricksAIModelContext(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -162,9 +161,9 @@ class DatabricksAIModelContext(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None - databricks_ai_model_context_metastore_id: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelContextMetastoreId") - ) + type_name: Union[str, UnsetType] = "DatabricksAIModelContext" + + databricks_metastore_id: Union[str, None, UnsetType] = UNSET """The id of the model, common across versions.""" query_count: Union[int, None, UnsetType] = UNSET @@ -372,6 +371,11 @@ class DatabricksAIModelContext(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -455,80 +459,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DatabricksAIModelContext instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"DatabricksAIModelContext validation failed: {errors}") - - def minimize(self) -> "DatabricksAIModelContext": - """ - Return a minimal copy of this DatabricksAIModelContext with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DatabricksAIModelContext with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DatabricksAIModelContext instance with only the minimum required fields. - """ - self.validate() - return DatabricksAIModelContext( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedDatabricksAIModelContext": - """ - Create a :class:`RelatedDatabricksAIModelContext` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricksAIModelContext reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricksAIModelContext(guid=self.guid) - return RelatedDatabricksAIModelContext(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -586,9 +516,7 @@ def from_json( class DatabricksAIModelContextAttributes(AssetAttributes): """DatabricksAIModelContext-specific attributes for nested API format.""" - databricks_ai_model_context_metastore_id: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelContextMetastoreId") - ) + databricks_metastore_id: Union[str, None, UnsetType] = UNSET """The id of the model, common across versions.""" query_count: Union[int, None, UnsetType] = UNSET @@ -800,6 +728,11 @@ class DatabricksAIModelContextRelationshipAttributes(AssetRelationshipAttributes dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -918,6 +851,7 @@ class DatabricksAIModelContextNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -946,9 +880,7 @@ def _populate_databricks_ai_model_context_attrs( ) -> None: """Populate DatabricksAIModelContext-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.databricks_ai_model_context_metastore_id = ( - obj.databricks_ai_model_context_metastore_id - ) + attrs.databricks_metastore_id = obj.databricks_metastore_id attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -999,9 +931,7 @@ def _extract_databricks_ai_model_context_attrs( ) -> dict: """Extract all DatabricksAIModelContext attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["databricks_ai_model_context_metastore_id"] = ( - attrs.databricks_ai_model_context_metastore_id - ) + result["databricks_metastore_id"] = attrs.databricks_metastore_id result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -1093,9 +1023,6 @@ def _databricks_ai_model_context_to_nested( is_incomplete=databricks_ai_model_context.is_incomplete, provenance_type=databricks_ai_model_context.provenance_type, home_id=databricks_ai_model_context.home_id, - depth=databricks_ai_model_context.depth, - immediate_upstream=databricks_ai_model_context.immediate_upstream, - immediate_downstream=databricks_ai_model_context.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1131,6 +1058,7 @@ def _databricks_ai_model_context_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1139,9 +1067,6 @@ def _databricks_ai_model_context_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_ai_model_context_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1175,8 +1100,8 @@ def _databricks_ai_model_context_from_nested_bytes( RelationField, ) -DatabricksAIModelContext.DATABRICKS_AI_MODEL_CONTEXT_METASTORE_ID = KeywordField( - "databricksAIModelContextMetastoreId", "databricksAIModelContextMetastoreId" +DatabricksAIModelContext.DATABRICKS_METASTORE_ID = KeywordField( + "databricksMetastoreId", "databricksMetastoreId" ) DatabricksAIModelContext.QUERY_COUNT = NumericField("queryCount", "queryCount") DatabricksAIModelContext.QUERY_USER_COUNT = NumericField( @@ -1311,6 +1236,9 @@ def _databricks_ai_model_context_from_nested_bytes( DatabricksAIModelContext.DBT_SOURCES = RelationField("dbtSources") DatabricksAIModelContext.SQL_DBT_SOURCES = RelationField("sqlDBTSources") DatabricksAIModelContext.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +DatabricksAIModelContext.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DatabricksAIModelContext.MEANINGS = RelationField("meanings") DatabricksAIModelContext.MC_MONITORS = RelationField("mcMonitors") DatabricksAIModelContext.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/databricks_ai_model_version.py b/pyatlan_v9/model/assets/databricks_ai_model_version.py index 03c130ae8..518fc5662 100644 --- a/pyatlan_v9/model/assets/databricks_ai_model_version.py +++ b/pyatlan_v9/model/assets/databricks_ai_model_version.py @@ -43,16 +43,14 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import ( - RelatedDatabricksAIModelContext, - RelatedDatabricksAIModelVersion, -) +from .databricks_related import RelatedDatabricksAIModelContext from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -80,18 +78,18 @@ class DatabricksAIModelVersion(Asset): Instance of an ai model version in databricks. """ - DATABRICKS_AI_MODEL_VERSION_ID: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_RUN_ID: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_RUN_NAME: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_RUN_START_TIME: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_RUN_END_TIME: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_STATUS: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_ALIASES: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_DATASET_COUNT: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_SOURCE: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_ARTIFACT_URI: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_METRICS: ClassVar[Any] = None - DATABRICKS_AI_MODEL_VERSION_PARAMS: ClassVar[Any] = None + DATABRICKS_ID: ClassVar[Any] = None + DATABRICKS_RUN_ID: ClassVar[Any] = None + DATABRICKS_RUN_NAME: ClassVar[Any] = None + DATABRICKS_RUN_START_TIME: ClassVar[Any] = None + DATABRICKS_RUN_END_TIME: ClassVar[Any] = None + DATABRICKS_STATUS: ClassVar[Any] = None + DATABRICKS_ALIASES: ClassVar[Any] = None + DATABRICKS_DATASET_COUNT: ClassVar[Any] = None + DATABRICKS_SOURCE: ClassVar[Any] = None + DATABRICKS_ARTIFACT_URI: ClassVar[Any] = None + DATABRICKS_METRICS: ClassVar[Any] = None + DATABRICKS_PARAMS: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -149,6 +147,7 @@ class DatabricksAIModelVersion(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -170,64 +169,42 @@ class DatabricksAIModelVersion(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None - databricks_ai_model_version_id: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionId" - ) + type_name: Union[str, UnsetType] = "DatabricksAIModelVersion" + + databricks_id: Union[int, None, UnsetType] = UNSET """The id of the model, unique to every version.""" - databricks_ai_model_version_run_id: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionRunId" - ) + databricks_run_id: Union[str, None, UnsetType] = UNSET """The run id of the model.""" - databricks_ai_model_version_run_name: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionRunName" - ) + databricks_run_name: Union[str, None, UnsetType] = UNSET """The run name of the model.""" - databricks_ai_model_version_run_start_time: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionRunStartTime") - ) + databricks_run_start_time: Union[int, None, UnsetType] = UNSET """The run start time of the model.""" - databricks_ai_model_version_run_end_time: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionRunEndTime") - ) + databricks_run_end_time: Union[int, None, UnsetType] = UNSET """The run end time of the model.""" - databricks_ai_model_version_status: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionStatus" - ) + databricks_status: Union[str, None, UnsetType] = UNSET """The status of the model.""" - databricks_ai_model_version_aliases: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionAliases") - ) + databricks_aliases: Union[List[str], None, UnsetType] = UNSET """The aliases of the model.""" - databricks_ai_model_version_dataset_count: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionDatasetCount") - ) + databricks_dataset_count: Union[int, None, UnsetType] = UNSET """Number of datasets.""" - databricks_ai_model_version_source: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionSource" - ) + databricks_source: Union[str, None, UnsetType] = UNSET """Source artifact link for the model.""" - databricks_ai_model_version_artifact_uri: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionArtifactUri") - ) + databricks_artifact_uri: Union[str, None, UnsetType] = UNSET """Artifact uri for the model.""" - databricks_ai_model_version_metrics: Union[ - List[Dict[str, Any]], None, UnsetType - ] = msgspec.field(default=UNSET, name="databricksAIModelVersionMetrics") + databricks_metrics: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Metrics for an individual experiment.""" - databricks_ai_model_version_params: Union[Dict[str, str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionParams") - ) + databricks_params: Union[Dict[str, str], None, UnsetType] = UNSET """Params with key mapped to value for an individual experiment.""" query_count: Union[int, None, UnsetType] = UNSET @@ -425,6 +402,11 @@ class DatabricksAIModelVersion(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -508,82 +490,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DatabricksAIModelVersion instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.databricks_ai_model_context is UNSET: - errors.append("databricks_ai_model_context is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"DatabricksAIModelVersion validation failed: {errors}") - - def minimize(self) -> "DatabricksAIModelVersion": - """ - Return a minimal copy of this DatabricksAIModelVersion with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DatabricksAIModelVersion with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DatabricksAIModelVersion instance with only the minimum required fields. - """ - self.validate() - return DatabricksAIModelVersion( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedDatabricksAIModelVersion": - """ - Create a :class:`RelatedDatabricksAIModelVersion` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricksAIModelVersion reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricksAIModelVersion(guid=self.guid) - return RelatedDatabricksAIModelVersion(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -641,64 +547,40 @@ def from_json( class DatabricksAIModelVersionAttributes(AssetAttributes): """DatabricksAIModelVersion-specific attributes for nested API format.""" - databricks_ai_model_version_id: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionId" - ) + databricks_id: Union[int, None, UnsetType] = UNSET """The id of the model, unique to every version.""" - databricks_ai_model_version_run_id: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionRunId" - ) + databricks_run_id: Union[str, None, UnsetType] = UNSET """The run id of the model.""" - databricks_ai_model_version_run_name: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionRunName" - ) + databricks_run_name: Union[str, None, UnsetType] = UNSET """The run name of the model.""" - databricks_ai_model_version_run_start_time: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionRunStartTime") - ) + databricks_run_start_time: Union[int, None, UnsetType] = UNSET """The run start time of the model.""" - databricks_ai_model_version_run_end_time: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionRunEndTime") - ) + databricks_run_end_time: Union[int, None, UnsetType] = UNSET """The run end time of the model.""" - databricks_ai_model_version_status: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionStatus" - ) + databricks_status: Union[str, None, UnsetType] = UNSET """The status of the model.""" - databricks_ai_model_version_aliases: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionAliases") - ) + databricks_aliases: Union[List[str], None, UnsetType] = UNSET """The aliases of the model.""" - databricks_ai_model_version_dataset_count: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionDatasetCount") - ) + databricks_dataset_count: Union[int, None, UnsetType] = UNSET """Number of datasets.""" - databricks_ai_model_version_source: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionSource" - ) + databricks_source: Union[str, None, UnsetType] = UNSET """Source artifact link for the model.""" - databricks_ai_model_version_artifact_uri: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionArtifactUri") - ) + databricks_artifact_uri: Union[str, None, UnsetType] = UNSET """Artifact uri for the model.""" - databricks_ai_model_version_metrics: Union[ - List[Dict[str, Any]], None, UnsetType - ] = msgspec.field(default=UNSET, name="databricksAIModelVersionMetrics") + databricks_metrics: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Metrics for an individual experiment.""" - databricks_ai_model_version_params: Union[Dict[str, str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionParams") - ) + databricks_params: Union[Dict[str, str], None, UnsetType] = UNSET """Params with key mapped to value for an individual experiment.""" query_count: Union[int, None, UnsetType] = UNSET @@ -900,6 +782,11 @@ class DatabricksAIModelVersionRelationshipAttributes(AssetRelationshipAttributes dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1016,6 +903,7 @@ class DatabricksAIModelVersionNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1044,28 +932,18 @@ def _populate_databricks_ai_model_version_attrs( ) -> None: """Populate DatabricksAIModelVersion-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.databricks_ai_model_version_id = obj.databricks_ai_model_version_id - attrs.databricks_ai_model_version_run_id = obj.databricks_ai_model_version_run_id - attrs.databricks_ai_model_version_run_name = ( - obj.databricks_ai_model_version_run_name - ) - attrs.databricks_ai_model_version_run_start_time = ( - obj.databricks_ai_model_version_run_start_time - ) - attrs.databricks_ai_model_version_run_end_time = ( - obj.databricks_ai_model_version_run_end_time - ) - attrs.databricks_ai_model_version_status = obj.databricks_ai_model_version_status - attrs.databricks_ai_model_version_aliases = obj.databricks_ai_model_version_aliases - attrs.databricks_ai_model_version_dataset_count = ( - obj.databricks_ai_model_version_dataset_count - ) - attrs.databricks_ai_model_version_source = obj.databricks_ai_model_version_source - attrs.databricks_ai_model_version_artifact_uri = ( - obj.databricks_ai_model_version_artifact_uri - ) - attrs.databricks_ai_model_version_metrics = obj.databricks_ai_model_version_metrics - attrs.databricks_ai_model_version_params = obj.databricks_ai_model_version_params + attrs.databricks_id = obj.databricks_id + attrs.databricks_run_id = obj.databricks_run_id + attrs.databricks_run_name = obj.databricks_run_name + attrs.databricks_run_start_time = obj.databricks_run_start_time + attrs.databricks_run_end_time = obj.databricks_run_end_time + attrs.databricks_status = obj.databricks_status + attrs.databricks_aliases = obj.databricks_aliases + attrs.databricks_dataset_count = obj.databricks_dataset_count + attrs.databricks_source = obj.databricks_source + attrs.databricks_artifact_uri = obj.databricks_artifact_uri + attrs.databricks_metrics = obj.databricks_metrics + attrs.databricks_params = obj.databricks_params attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -1116,40 +994,18 @@ def _extract_databricks_ai_model_version_attrs( ) -> dict: """Extract all DatabricksAIModelVersion attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["databricks_ai_model_version_id"] = attrs.databricks_ai_model_version_id - result["databricks_ai_model_version_run_id"] = ( - attrs.databricks_ai_model_version_run_id - ) - result["databricks_ai_model_version_run_name"] = ( - attrs.databricks_ai_model_version_run_name - ) - result["databricks_ai_model_version_run_start_time"] = ( - attrs.databricks_ai_model_version_run_start_time - ) - result["databricks_ai_model_version_run_end_time"] = ( - attrs.databricks_ai_model_version_run_end_time - ) - result["databricks_ai_model_version_status"] = ( - attrs.databricks_ai_model_version_status - ) - result["databricks_ai_model_version_aliases"] = ( - attrs.databricks_ai_model_version_aliases - ) - result["databricks_ai_model_version_dataset_count"] = ( - attrs.databricks_ai_model_version_dataset_count - ) - result["databricks_ai_model_version_source"] = ( - attrs.databricks_ai_model_version_source - ) - result["databricks_ai_model_version_artifact_uri"] = ( - attrs.databricks_ai_model_version_artifact_uri - ) - result["databricks_ai_model_version_metrics"] = ( - attrs.databricks_ai_model_version_metrics - ) - result["databricks_ai_model_version_params"] = ( - attrs.databricks_ai_model_version_params - ) + result["databricks_id"] = attrs.databricks_id + result["databricks_run_id"] = attrs.databricks_run_id + result["databricks_run_name"] = attrs.databricks_run_name + result["databricks_run_start_time"] = attrs.databricks_run_start_time + result["databricks_run_end_time"] = attrs.databricks_run_end_time + result["databricks_status"] = attrs.databricks_status + result["databricks_aliases"] = attrs.databricks_aliases + result["databricks_dataset_count"] = attrs.databricks_dataset_count + result["databricks_source"] = attrs.databricks_source + result["databricks_artifact_uri"] = attrs.databricks_artifact_uri + result["databricks_metrics"] = attrs.databricks_metrics + result["databricks_params"] = attrs.databricks_params result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -1241,9 +1097,6 @@ def _databricks_ai_model_version_to_nested( is_incomplete=databricks_ai_model_version.is_incomplete, provenance_type=databricks_ai_model_version.provenance_type, home_id=databricks_ai_model_version.home_id, - depth=databricks_ai_model_version.depth, - immediate_upstream=databricks_ai_model_version.immediate_upstream, - immediate_downstream=databricks_ai_model_version.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1279,6 +1132,7 @@ def _databricks_ai_model_version_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1287,9 +1141,6 @@ def _databricks_ai_model_version_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_ai_model_version_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1323,41 +1174,39 @@ def _databricks_ai_model_version_from_nested_bytes( RelationField, ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_ID = NumericField( - "databricksAIModelVersionId", "databricksAIModelVersionId" +DatabricksAIModelVersion.DATABRICKS_ID = NumericField("databricksId", "databricksId") +DatabricksAIModelVersion.DATABRICKS_RUN_ID = KeywordField( + "databricksRunId", "databricksRunId" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_RUN_ID = KeywordField( - "databricksAIModelVersionRunId", "databricksAIModelVersionRunId" +DatabricksAIModelVersion.DATABRICKS_RUN_NAME = KeywordField( + "databricksRunName", "databricksRunName" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_RUN_NAME = KeywordField( - "databricksAIModelVersionRunName", "databricksAIModelVersionRunName" +DatabricksAIModelVersion.DATABRICKS_RUN_START_TIME = NumericField( + "databricksRunStartTime", "databricksRunStartTime" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_RUN_START_TIME = NumericField( - "databricksAIModelVersionRunStartTime", "databricksAIModelVersionRunStartTime" +DatabricksAIModelVersion.DATABRICKS_RUN_END_TIME = NumericField( + "databricksRunEndTime", "databricksRunEndTime" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_RUN_END_TIME = NumericField( - "databricksAIModelVersionRunEndTime", "databricksAIModelVersionRunEndTime" +DatabricksAIModelVersion.DATABRICKS_STATUS = KeywordField( + "databricksStatus", "databricksStatus" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_STATUS = KeywordField( - "databricksAIModelVersionStatus", "databricksAIModelVersionStatus" +DatabricksAIModelVersion.DATABRICKS_ALIASES = KeywordField( + "databricksAliases", "databricksAliases" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_ALIASES = KeywordField( - "databricksAIModelVersionAliases", "databricksAIModelVersionAliases" +DatabricksAIModelVersion.DATABRICKS_DATASET_COUNT = NumericField( + "databricksDatasetCount", "databricksDatasetCount" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_DATASET_COUNT = NumericField( - "databricksAIModelVersionDatasetCount", "databricksAIModelVersionDatasetCount" +DatabricksAIModelVersion.DATABRICKS_SOURCE = KeywordField( + "databricksSource", "databricksSource" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_SOURCE = KeywordField( - "databricksAIModelVersionSource", "databricksAIModelVersionSource" +DatabricksAIModelVersion.DATABRICKS_ARTIFACT_URI = KeywordField( + "databricksArtifactUri", "databricksArtifactUri" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_ARTIFACT_URI = KeywordField( - "databricksAIModelVersionArtifactUri", "databricksAIModelVersionArtifactUri" +DatabricksAIModelVersion.DATABRICKS_METRICS = KeywordField( + "databricksMetrics", "databricksMetrics" ) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_METRICS = KeywordField( - "databricksAIModelVersionMetrics", "databricksAIModelVersionMetrics" -) -DatabricksAIModelVersion.DATABRICKS_AI_MODEL_VERSION_PARAMS = KeywordField( - "databricksAIModelVersionParams", "databricksAIModelVersionParams" +DatabricksAIModelVersion.DATABRICKS_PARAMS = KeywordField( + "databricksParams", "databricksParams" ) DatabricksAIModelVersion.QUERY_COUNT = NumericField("queryCount", "queryCount") DatabricksAIModelVersion.QUERY_USER_COUNT = NumericField( @@ -1488,6 +1337,9 @@ def _databricks_ai_model_version_from_nested_bytes( DatabricksAIModelVersion.DBT_SOURCES = RelationField("dbtSources") DatabricksAIModelVersion.SQL_DBT_SOURCES = RelationField("sqlDBTSources") DatabricksAIModelVersion.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +DatabricksAIModelVersion.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DatabricksAIModelVersion.MEANINGS = RelationField("meanings") DatabricksAIModelVersion.MC_MONITORS = RelationField("mcMonitors") DatabricksAIModelVersion.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/databricks_external_location.py b/pyatlan_v9/model/assets/databricks_external_location.py index 44de29efe..6505120a0 100644 --- a/pyatlan_v9/model/assets/databricks_external_location.py +++ b/pyatlan_v9/model/assets/databricks_external_location.py @@ -41,16 +41,14 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import ( - RelatedDatabricksExternalLocation, - RelatedDatabricksExternalLocationPath, -) +from .databricks_related import RelatedDatabricksExternalLocationPath from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -126,6 +124,7 @@ class DatabricksExternalLocation(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -147,6 +146,8 @@ class DatabricksExternalLocation(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DatabricksExternalLocation" + databricks_url: Union[str, None, UnsetType] = UNSET """URL of the external location.""" @@ -301,6 +302,11 @@ class DatabricksExternalLocation(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -376,68 +382,6 @@ class DatabricksExternalLocation(Asset): def __post_init__(self) -> None: self.type_name = "DatabricksExternalLocation" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DatabricksExternalLocation instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DatabricksExternalLocation validation failed: {errors}") - - def minimize(self) -> "DatabricksExternalLocation": - """ - Return a minimal copy of this DatabricksExternalLocation with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DatabricksExternalLocation with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DatabricksExternalLocation instance with only the minimum required fields. - """ - self.validate() - return DatabricksExternalLocation( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedDatabricksExternalLocation": - """ - Create a :class:`RelatedDatabricksExternalLocation` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricksExternalLocation reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricksExternalLocation(guid=self.guid) - return RelatedDatabricksExternalLocation(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -653,6 +597,11 @@ class DatabricksExternalLocationRelationshipAttributes(AssetRelationshipAttribut dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -768,6 +717,7 @@ class DatabricksExternalLocationNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -911,9 +861,6 @@ def _databricks_external_location_to_nested( is_incomplete=databricks_external_location.is_incomplete, provenance_type=databricks_external_location.provenance_type, home_id=databricks_external_location.home_id, - depth=databricks_external_location.depth, - immediate_upstream=databricks_external_location.immediate_upstream, - immediate_downstream=databricks_external_location.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -949,6 +896,7 @@ def _databricks_external_location_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -957,9 +905,6 @@ def _databricks_external_location_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_external_location_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1098,6 +1043,9 @@ def _databricks_external_location_from_nested_bytes( DatabricksExternalLocation.DBT_SOURCES = RelationField("dbtSources") DatabricksExternalLocation.SQL_DBT_SOURCES = RelationField("sqlDBTSources") DatabricksExternalLocation.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +DatabricksExternalLocation.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DatabricksExternalLocation.MEANINGS = RelationField("meanings") DatabricksExternalLocation.MC_MONITORS = RelationField("mcMonitors") DatabricksExternalLocation.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/databricks_external_location_path.py b/pyatlan_v9/model/assets/databricks_external_location_path.py index c4271ab51..72e683aa0 100644 --- a/pyatlan_v9/model/assets/databricks_external_location_path.py +++ b/pyatlan_v9/model/assets/databricks_external_location_path.py @@ -42,16 +42,14 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import ( - RelatedDatabricksExternalLocation, - RelatedDatabricksExternalLocationPath, -) +from .databricks_related import RelatedDatabricksExternalLocation from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -128,6 +126,7 @@ class DatabricksExternalLocationPath(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -149,6 +148,8 @@ class DatabricksExternalLocationPath(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DatabricksExternalLocationPath" + databricks_path: Union[str, None, UnsetType] = UNSET """Path of data at the external location.""" @@ -306,6 +307,11 @@ class DatabricksExternalLocationPath(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -387,76 +393,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DatabricksExternalLocationPath instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.databricks_external_location is UNSET: - errors.append("databricks_external_location is required for creation") - if errors: - raise ValueError( - f"DatabricksExternalLocationPath validation failed: {errors}" - ) - - def minimize(self) -> "DatabricksExternalLocationPath": - """ - Return a minimal copy of this DatabricksExternalLocationPath with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DatabricksExternalLocationPath with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DatabricksExternalLocationPath instance with only the minimum required fields. - """ - self.validate() - return DatabricksExternalLocationPath( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedDatabricksExternalLocationPath": - """ - Create a :class:`RelatedDatabricksExternalLocationPath` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricksExternalLocationPath reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricksExternalLocationPath(guid=self.guid) - return RelatedDatabricksExternalLocationPath(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -675,6 +611,11 @@ class DatabricksExternalLocationPathRelationshipAttributes(AssetRelationshipAttr dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -790,6 +731,7 @@ class DatabricksExternalLocationPathNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -937,9 +879,6 @@ def _databricks_external_location_path_to_nested( is_incomplete=databricks_external_location_path.is_incomplete, provenance_type=databricks_external_location_path.provenance_type, home_id=databricks_external_location_path.home_id, - depth=databricks_external_location_path.depth, - immediate_upstream=databricks_external_location_path.immediate_upstream, - immediate_downstream=databricks_external_location_path.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -975,6 +914,7 @@ def _databricks_external_location_path_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -983,9 +923,6 @@ def _databricks_external_location_path_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_external_location_path_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1139,6 +1076,9 @@ def _databricks_external_location_path_from_nested_bytes( DatabricksExternalLocationPath.DBT_SOURCES = RelationField("dbtSources") DatabricksExternalLocationPath.SQL_DBT_SOURCES = RelationField("sqlDBTSources") DatabricksExternalLocationPath.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +DatabricksExternalLocationPath.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = ( + RelationField("gcpDataplexAspectTypeMetadataEntities") +) DatabricksExternalLocationPath.MEANINGS = RelationField("meanings") DatabricksExternalLocationPath.MC_MONITORS = RelationField("mcMonitors") DatabricksExternalLocationPath.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/databricks_metric_view.py b/pyatlan_v9/model/assets/databricks_metric_view.py index 3f2d8f35d..02f2aad65 100644 --- a/pyatlan_v9/model/assets/databricks_metric_view.py +++ b/pyatlan_v9/model/assets/databricks_metric_view.py @@ -41,13 +41,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import RelatedDatabricksMetricView from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -129,6 +129,7 @@ class DatabricksMetricView(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -153,6 +154,8 @@ class DatabricksMetricView(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DatabricksMetricView" + query_count: Union[int, None, UnsetType] = UNSET """Number of times this asset has been queried.""" @@ -320,6 +323,11 @@ class DatabricksMetricView(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -404,66 +412,6 @@ class DatabricksMetricView(Asset): def __post_init__(self) -> None: self.type_name = "DatabricksMetricView" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DatabricksMetricView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DatabricksMetricView validation failed: {errors}") - - def minimize(self) -> "DatabricksMetricView": - """ - Return a minimal copy of this DatabricksMetricView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DatabricksMetricView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DatabricksMetricView instance with only the minimum required fields. - """ - self.validate() - return DatabricksMetricView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDatabricksMetricView": - """ - Create a :class:`RelatedDatabricksMetricView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricksMetricView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricksMetricView(guid=self.guid) - return RelatedDatabricksMetricView(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -692,6 +640,11 @@ class DatabricksMetricViewRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -815,6 +768,7 @@ class DatabricksMetricViewNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -973,9 +927,6 @@ def _databricks_metric_view_to_nested( is_incomplete=databricks_metric_view.is_incomplete, provenance_type=databricks_metric_view.provenance_type, home_id=databricks_metric_view.home_id, - depth=databricks_metric_view.depth, - immediate_upstream=databricks_metric_view.immediate_upstream, - immediate_downstream=databricks_metric_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1011,6 +962,7 @@ def _databricks_metric_view_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1019,9 +971,6 @@ def _databricks_metric_view_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_metric_view_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1147,6 +1096,9 @@ def _databricks_metric_view_from_nested_bytes( DatabricksMetricView.DBT_SOURCES = RelationField("dbtSources") DatabricksMetricView.SQL_DBT_SOURCES = RelationField("sqlDBTSources") DatabricksMetricView.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +DatabricksMetricView.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DatabricksMetricView.MEANINGS = RelationField("meanings") DatabricksMetricView.MC_MONITORS = RelationField("mcMonitors") DatabricksMetricView.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/databricks_notebook.py b/pyatlan_v9/model/assets/databricks_notebook.py index 80968a7d4..b70a66a10 100644 --- a/pyatlan_v9/model/assets/databricks_notebook.py +++ b/pyatlan_v9/model/assets/databricks_notebook.py @@ -41,13 +41,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import RelatedDatabricksNotebook from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -75,8 +75,8 @@ class DatabricksNotebook(Asset): Base class for all databricks notebook assets. """ - DATABRICKS_NOTEBOOK_PATH: ClassVar[Any] = None - DATABRICKS_NOTEBOOK_WORKSPACE_ID: ClassVar[Any] = None + DATABRICKS_PATH: ClassVar[Any] = None + DATABRICKS_WORKSPACE_ID: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -122,6 +122,7 @@ class DatabricksNotebook(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -143,10 +144,12 @@ class DatabricksNotebook(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None - databricks_notebook_path: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "DatabricksNotebook" + + databricks_path: Union[str, None, UnsetType] = UNSET """Path of the notebook.""" - databricks_notebook_workspace_id: Union[str, None, UnsetType] = UNSET + databricks_workspace_id: Union[str, None, UnsetType] = UNSET """Workspace Id of the notebook.""" query_count: Union[int, None, UnsetType] = UNSET @@ -292,6 +295,11 @@ class DatabricksNotebook(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -367,66 +375,6 @@ class DatabricksNotebook(Asset): def __post_init__(self) -> None: self.type_name = "DatabricksNotebook" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DatabricksNotebook instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DatabricksNotebook validation failed: {errors}") - - def minimize(self) -> "DatabricksNotebook": - """ - Return a minimal copy of this DatabricksNotebook with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DatabricksNotebook with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DatabricksNotebook instance with only the minimum required fields. - """ - self.validate() - return DatabricksNotebook(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDatabricksNotebook": - """ - Create a :class:`RelatedDatabricksNotebook` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricksNotebook reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricksNotebook(guid=self.guid) - return RelatedDatabricksNotebook(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -484,10 +432,10 @@ def from_json( class DatabricksNotebookAttributes(AssetAttributes): """DatabricksNotebook-specific attributes for nested API format.""" - databricks_notebook_path: Union[str, None, UnsetType] = UNSET + databricks_path: Union[str, None, UnsetType] = UNSET """Path of the notebook.""" - databricks_notebook_workspace_id: Union[str, None, UnsetType] = UNSET + databricks_workspace_id: Union[str, None, UnsetType] = UNSET """Workspace Id of the notebook.""" query_count: Union[int, None, UnsetType] = UNSET @@ -637,6 +585,11 @@ class DatabricksNotebookRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -751,6 +704,7 @@ class DatabricksNotebookNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -779,8 +733,8 @@ def _populate_databricks_notebook_attrs( ) -> None: """Populate DatabricksNotebook-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.databricks_notebook_path = obj.databricks_notebook_path - attrs.databricks_notebook_workspace_id = obj.databricks_notebook_workspace_id + attrs.databricks_path = obj.databricks_path + attrs.databricks_workspace_id = obj.databricks_workspace_id attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -815,8 +769,8 @@ def _populate_databricks_notebook_attrs( def _extract_databricks_notebook_attrs(attrs: DatabricksNotebookAttributes) -> dict: """Extract all DatabricksNotebook attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["databricks_notebook_path"] = attrs.databricks_notebook_path - result["databricks_notebook_workspace_id"] = attrs.databricks_notebook_workspace_id + result["databricks_path"] = attrs.databricks_path + result["databricks_workspace_id"] = attrs.databricks_workspace_id result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -892,9 +846,6 @@ def _databricks_notebook_to_nested( is_incomplete=databricks_notebook.is_incomplete, provenance_type=databricks_notebook.provenance_type, home_id=databricks_notebook.home_id, - depth=databricks_notebook.depth, - immediate_upstream=databricks_notebook.immediate_upstream, - immediate_downstream=databricks_notebook.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -930,6 +881,7 @@ def _databricks_notebook_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -938,9 +890,6 @@ def _databricks_notebook_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_notebook_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -972,11 +921,9 @@ def _databricks_notebook_from_nested_bytes( RelationField, ) -DatabricksNotebook.DATABRICKS_NOTEBOOK_PATH = KeywordField( - "databricksNotebookPath", "databricksNotebookPath" -) -DatabricksNotebook.DATABRICKS_NOTEBOOK_WORKSPACE_ID = KeywordField( - "databricksNotebookWorkspaceId", "databricksNotebookWorkspaceId" +DatabricksNotebook.DATABRICKS_PATH = KeywordField("databricksPath", "databricksPath") +DatabricksNotebook.DATABRICKS_WORKSPACE_ID = KeywordField( + "databricksWorkspaceId", "databricksWorkspaceId" ) DatabricksNotebook.QUERY_COUNT = NumericField("queryCount", "queryCount") DatabricksNotebook.QUERY_USER_COUNT = NumericField("queryUserCount", "queryUserCount") @@ -1060,6 +1007,9 @@ def _databricks_notebook_from_nested_bytes( DatabricksNotebook.DBT_SOURCES = RelationField("dbtSources") DatabricksNotebook.SQL_DBT_SOURCES = RelationField("sqlDBTSources") DatabricksNotebook.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +DatabricksNotebook.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DatabricksNotebook.MEANINGS = RelationField("meanings") DatabricksNotebook.MC_MONITORS = RelationField("mcMonitors") DatabricksNotebook.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/databricks_related.py b/pyatlan_v9/model/assets/databricks_related.py index bfb81a71e..7b4c15f13 100644 --- a/pyatlan_v9/model/assets/databricks_related.py +++ b/pyatlan_v9/model/assets/databricks_related.py @@ -13,7 +13,6 @@ from typing import Any, Dict, List, Union -import msgspec from msgspec import UNSET, UnsetType from .referenceable_related import RelatedReferenceable @@ -58,13 +57,13 @@ class RelatedDatabricksVolume(RelatedDatabricks): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "DatabricksVolume" so it serializes correctly - databricks_volume_owner: Union[str, None, UnsetType] = UNSET + databricks_owner: Union[str, None, UnsetType] = UNSET """User or group (principal) currently owning the volume.""" - databricks_volume_external_location: Union[str, None, UnsetType] = UNSET + databricks_external_location: Union[str, None, UnsetType] = UNSET """The storage location where the volume is created.""" - databricks_volume_type: Union[str, None, UnsetType] = UNSET + databricks_type: Union[str, None, UnsetType] = UNSET """Type of the volume.""" def __post_init__(self) -> None: @@ -82,13 +81,13 @@ class RelatedDatabricksVolumePath(RelatedDatabricks): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "DatabricksVolumePath" so it serializes correctly - databricks_volume_path_path: Union[str, None, UnsetType] = UNSET + databricks_path: Union[str, None, UnsetType] = UNSET """Path of data on the volume.""" - databricks_volume_path_volume_qualified_name: Union[str, None, UnsetType] = UNSET + databricks_volume_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent volume.""" - databricks_volume_path_volume_name: Union[str, None, UnsetType] = UNSET + databricks_volume_name: Union[str, None, UnsetType] = UNSET """Name of the parent volume.""" def __post_init__(self) -> None: @@ -151,9 +150,7 @@ class RelatedDatabricksAIModelContext(RelatedDatabricks): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "DatabricksAIModelContext" so it serializes correctly - databricks_ai_model_context_metastore_id: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelContextMetastoreId") - ) + databricks_metastore_id: Union[str, None, UnsetType] = UNSET """The id of the model, common across versions.""" def __post_init__(self) -> None: @@ -171,64 +168,40 @@ class RelatedDatabricksAIModelVersion(RelatedDatabricks): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "DatabricksAIModelVersion" so it serializes correctly - databricks_ai_model_version_id: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionId" - ) + databricks_id: Union[int, None, UnsetType] = UNSET """The id of the model, unique to every version.""" - databricks_ai_model_version_run_id: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionRunId" - ) + databricks_run_id: Union[str, None, UnsetType] = UNSET """The run id of the model.""" - databricks_ai_model_version_run_name: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionRunName" - ) + databricks_run_name: Union[str, None, UnsetType] = UNSET """The run name of the model.""" - databricks_ai_model_version_run_start_time: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionRunStartTime") - ) + databricks_run_start_time: Union[int, None, UnsetType] = UNSET """The run start time of the model.""" - databricks_ai_model_version_run_end_time: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionRunEndTime") - ) + databricks_run_end_time: Union[int, None, UnsetType] = UNSET """The run end time of the model.""" - databricks_ai_model_version_status: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionStatus" - ) + databricks_status: Union[str, None, UnsetType] = UNSET """The status of the model.""" - databricks_ai_model_version_aliases: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionAliases") - ) + databricks_aliases: Union[List[str], None, UnsetType] = UNSET """The aliases of the model.""" - databricks_ai_model_version_dataset_count: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionDatasetCount") - ) + databricks_dataset_count: Union[int, None, UnsetType] = UNSET """Number of datasets.""" - databricks_ai_model_version_source: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="databricksAIModelVersionSource" - ) + databricks_source: Union[str, None, UnsetType] = UNSET """Source artifact link for the model.""" - databricks_ai_model_version_artifact_uri: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionArtifactUri") - ) + databricks_artifact_uri: Union[str, None, UnsetType] = UNSET """Artifact uri for the model.""" - databricks_ai_model_version_metrics: Union[ - List[Dict[str, Any]], None, UnsetType - ] = msgspec.field(default=UNSET, name="databricksAIModelVersionMetrics") + databricks_metrics: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Metrics for an individual experiment.""" - databricks_ai_model_version_params: Union[Dict[str, str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="databricksAIModelVersionParams") - ) + databricks_params: Union[Dict[str, str], None, UnsetType] = UNSET """Params with key mapped to value for an individual experiment.""" def __post_init__(self) -> None: @@ -261,10 +234,10 @@ class RelatedDatabricksNotebook(RelatedDatabricks): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "DatabricksNotebook" so it serializes correctly - databricks_notebook_path: Union[str, None, UnsetType] = UNSET + databricks_path: Union[str, None, UnsetType] = UNSET """Path of the notebook.""" - databricks_notebook_workspace_id: Union[str, None, UnsetType] = UNSET + databricks_workspace_id: Union[str, None, UnsetType] = UNSET """Workspace Id of the notebook.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/databricks_volume.py b/pyatlan_v9/model/assets/databricks_volume.py index d039edfd3..eb663ed94 100644 --- a/pyatlan_v9/model/assets/databricks_volume.py +++ b/pyatlan_v9/model/assets/databricks_volume.py @@ -42,13 +42,14 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import RelatedDatabricksVolume, RelatedDatabricksVolumePath +from .databricks_related import RelatedDatabricksVolumePath from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -77,9 +78,9 @@ class DatabricksVolume(Asset): Represents a Databricks Volume, a storage object for managing and accessing data files within Databricks workspaces. """ - DATABRICKS_VOLUME_OWNER: ClassVar[Any] = None - DATABRICKS_VOLUME_EXTERNAL_LOCATION: ClassVar[Any] = None - DATABRICKS_VOLUME_TYPE: ClassVar[Any] = None + DATABRICKS_OWNER: ClassVar[Any] = None + DATABRICKS_EXTERNAL_LOCATION: ClassVar[Any] = None + DATABRICKS_TYPE: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -127,6 +128,7 @@ class DatabricksVolume(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -148,13 +150,15 @@ class DatabricksVolume(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None - databricks_volume_owner: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "DatabricksVolume" + + databricks_owner: Union[str, None, UnsetType] = UNSET """User or group (principal) currently owning the volume.""" - databricks_volume_external_location: Union[str, None, UnsetType] = UNSET + databricks_external_location: Union[str, None, UnsetType] = UNSET """The storage location where the volume is created.""" - databricks_volume_type: Union[str, None, UnsetType] = UNSET + databricks_type: Union[str, None, UnsetType] = UNSET """Type of the volume.""" query_count: Union[int, None, UnsetType] = UNSET @@ -308,6 +312,11 @@ class DatabricksVolume(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -391,78 +400,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DatabricksVolume instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"DatabricksVolume validation failed: {errors}") - - def minimize(self) -> "DatabricksVolume": - """ - Return a minimal copy of this DatabricksVolume with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DatabricksVolume with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DatabricksVolume instance with only the minimum required fields. - """ - self.validate() - return DatabricksVolume(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDatabricksVolume": - """ - Create a :class:`RelatedDatabricksVolume` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricksVolume reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricksVolume(guid=self.guid) - return RelatedDatabricksVolume(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -520,13 +457,13 @@ def from_json( class DatabricksVolumeAttributes(AssetAttributes): """DatabricksVolume-specific attributes for nested API format.""" - databricks_volume_owner: Union[str, None, UnsetType] = UNSET + databricks_owner: Union[str, None, UnsetType] = UNSET """User or group (principal) currently owning the volume.""" - databricks_volume_external_location: Union[str, None, UnsetType] = UNSET + databricks_external_location: Union[str, None, UnsetType] = UNSET """The storage location where the volume is created.""" - databricks_volume_type: Union[str, None, UnsetType] = UNSET + databricks_type: Union[str, None, UnsetType] = UNSET """Type of the volume.""" query_count: Union[int, None, UnsetType] = UNSET @@ -684,6 +621,11 @@ class DatabricksVolumeRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -800,6 +742,7 @@ class DatabricksVolumeNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -828,9 +771,9 @@ def _populate_databricks_volume_attrs( ) -> None: """Populate DatabricksVolume-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.databricks_volume_owner = obj.databricks_volume_owner - attrs.databricks_volume_external_location = obj.databricks_volume_external_location - attrs.databricks_volume_type = obj.databricks_volume_type + attrs.databricks_owner = obj.databricks_owner + attrs.databricks_external_location = obj.databricks_external_location + attrs.databricks_type = obj.databricks_type attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -865,11 +808,9 @@ def _populate_databricks_volume_attrs( def _extract_databricks_volume_attrs(attrs: DatabricksVolumeAttributes) -> dict: """Extract all DatabricksVolume attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["databricks_volume_owner"] = attrs.databricks_volume_owner - result["databricks_volume_external_location"] = ( - attrs.databricks_volume_external_location - ) - result["databricks_volume_type"] = attrs.databricks_volume_type + result["databricks_owner"] = attrs.databricks_owner + result["databricks_external_location"] = attrs.databricks_external_location + result["databricks_type"] = attrs.databricks_type result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -945,9 +886,6 @@ def _databricks_volume_to_nested( is_incomplete=databricks_volume.is_incomplete, provenance_type=databricks_volume.provenance_type, home_id=databricks_volume.home_id, - depth=databricks_volume.depth, - immediate_upstream=databricks_volume.immediate_upstream, - immediate_downstream=databricks_volume.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -981,6 +919,7 @@ def _databricks_volume_from_nested(nested: DatabricksVolumeNested) -> Databricks updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -989,9 +928,6 @@ def _databricks_volume_from_nested(nested: DatabricksVolumeNested) -> Databricks is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_volume_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1021,15 +957,11 @@ def _databricks_volume_from_nested_bytes(data: bytes, serde: Serde) -> Databrick RelationField, ) -DatabricksVolume.DATABRICKS_VOLUME_OWNER = KeywordField( - "databricksVolumeOwner", "databricksVolumeOwner" -) -DatabricksVolume.DATABRICKS_VOLUME_EXTERNAL_LOCATION = KeywordField( - "databricksVolumeExternalLocation", "databricksVolumeExternalLocation" -) -DatabricksVolume.DATABRICKS_VOLUME_TYPE = KeywordField( - "databricksVolumeType", "databricksVolumeType" +DatabricksVolume.DATABRICKS_OWNER = KeywordField("databricksOwner", "databricksOwner") +DatabricksVolume.DATABRICKS_EXTERNAL_LOCATION = KeywordField( + "databricksExternalLocation", "databricksExternalLocation" ) +DatabricksVolume.DATABRICKS_TYPE = KeywordField("databricksType", "databricksType") DatabricksVolume.QUERY_COUNT = NumericField("queryCount", "queryCount") DatabricksVolume.QUERY_USER_COUNT = NumericField("queryUserCount", "queryUserCount") DatabricksVolume.QUERY_USER_MAP = KeywordField("queryUserMap", "queryUserMap") @@ -1112,6 +1044,9 @@ def _databricks_volume_from_nested_bytes(data: bytes, serde: Serde) -> Databrick DatabricksVolume.DBT_SOURCES = RelationField("dbtSources") DatabricksVolume.SQL_DBT_SOURCES = RelationField("sqlDBTSources") DatabricksVolume.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +DatabricksVolume.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DatabricksVolume.MEANINGS = RelationField("meanings") DatabricksVolume.MC_MONITORS = RelationField("mcMonitors") DatabricksVolume.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/databricks_volume_path.py b/pyatlan_v9/model/assets/databricks_volume_path.py index 89d97d729..dd5e3fa2f 100644 --- a/pyatlan_v9/model/assets/databricks_volume_path.py +++ b/pyatlan_v9/model/assets/databricks_volume_path.py @@ -42,13 +42,14 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .databricks_related import RelatedDatabricksVolume, RelatedDatabricksVolumePath +from .databricks_related import RelatedDatabricksVolume from .dbt_related import ( RelatedDbtModel, RelatedDbtSeed, RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -76,9 +77,9 @@ class DatabricksVolumePath(Asset): Represents a path within a Databricks Volume, providing access to specific data files or directories. """ - DATABRICKS_VOLUME_PATH_PATH: ClassVar[Any] = None - DATABRICKS_VOLUME_PATH_VOLUME_QUALIFIED_NAME: ClassVar[Any] = None - DATABRICKS_VOLUME_PATH_VOLUME_NAME: ClassVar[Any] = None + DATABRICKS_PATH: ClassVar[Any] = None + DATABRICKS_VOLUME_QUALIFIED_NAME: ClassVar[Any] = None + DATABRICKS_VOLUME_NAME: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -125,6 +126,7 @@ class DatabricksVolumePath(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -146,13 +148,15 @@ class DatabricksVolumePath(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None - databricks_volume_path_path: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "DatabricksVolumePath" + + databricks_path: Union[str, None, UnsetType] = UNSET """Path of data on the volume.""" - databricks_volume_path_volume_qualified_name: Union[str, None, UnsetType] = UNSET + databricks_volume_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent volume.""" - databricks_volume_path_volume_name: Union[str, None, UnsetType] = UNSET + databricks_volume_name: Union[str, None, UnsetType] = UNSET """Name of the parent volume.""" query_count: Union[int, None, UnsetType] = UNSET @@ -301,6 +305,11 @@ class DatabricksVolumePath(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -384,86 +393,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DatabricksVolumePath instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.databricks_volume is UNSET: - errors.append("databricks_volume is required for creation") - if self.databricks_volume_name is UNSET: - errors.append("databricks_volume_name is required for creation") - if self.databricks_volume_qualified_name is UNSET: - errors.append( - "databricks_volume_qualified_name is required for creation" - ) - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"DatabricksVolumePath validation failed: {errors}") - - def minimize(self) -> "DatabricksVolumePath": - """ - Return a minimal copy of this DatabricksVolumePath with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DatabricksVolumePath with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DatabricksVolumePath instance with only the minimum required fields. - """ - self.validate() - return DatabricksVolumePath(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDatabricksVolumePath": - """ - Create a :class:`RelatedDatabricksVolumePath` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDatabricksVolumePath reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDatabricksVolumePath(guid=self.guid) - return RelatedDatabricksVolumePath(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -521,13 +450,13 @@ def from_json( class DatabricksVolumePathAttributes(AssetAttributes): """DatabricksVolumePath-specific attributes for nested API format.""" - databricks_volume_path_path: Union[str, None, UnsetType] = UNSET + databricks_path: Union[str, None, UnsetType] = UNSET """Path of data on the volume.""" - databricks_volume_path_volume_qualified_name: Union[str, None, UnsetType] = UNSET + databricks_volume_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent volume.""" - databricks_volume_path_volume_name: Union[str, None, UnsetType] = UNSET + databricks_volume_name: Union[str, None, UnsetType] = UNSET """Name of the parent volume.""" query_count: Union[int, None, UnsetType] = UNSET @@ -680,6 +609,11 @@ class DatabricksVolumePathRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -795,6 +729,7 @@ class DatabricksVolumePathNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -823,11 +758,9 @@ def _populate_databricks_volume_path_attrs( ) -> None: """Populate DatabricksVolumePath-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.databricks_volume_path_path = obj.databricks_volume_path_path - attrs.databricks_volume_path_volume_qualified_name = ( - obj.databricks_volume_path_volume_qualified_name - ) - attrs.databricks_volume_path_volume_name = obj.databricks_volume_path_volume_name + attrs.databricks_path = obj.databricks_path + attrs.databricks_volume_qualified_name = obj.databricks_volume_qualified_name + attrs.databricks_volume_name = obj.databricks_volume_name attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -864,13 +797,9 @@ def _extract_databricks_volume_path_attrs( ) -> dict: """Extract all DatabricksVolumePath attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["databricks_volume_path_path"] = attrs.databricks_volume_path_path - result["databricks_volume_path_volume_qualified_name"] = ( - attrs.databricks_volume_path_volume_qualified_name - ) - result["databricks_volume_path_volume_name"] = ( - attrs.databricks_volume_path_volume_name - ) + result["databricks_path"] = attrs.databricks_path + result["databricks_volume_qualified_name"] = attrs.databricks_volume_qualified_name + result["databricks_volume_name"] = attrs.databricks_volume_name result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -946,9 +875,6 @@ def _databricks_volume_path_to_nested( is_incomplete=databricks_volume_path.is_incomplete, provenance_type=databricks_volume_path.provenance_type, home_id=databricks_volume_path.home_id, - depth=databricks_volume_path.depth, - immediate_upstream=databricks_volume_path.immediate_upstream, - immediate_downstream=databricks_volume_path.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -984,6 +910,7 @@ def _databricks_volume_path_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -992,9 +919,6 @@ def _databricks_volume_path_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_databricks_volume_path_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1026,14 +950,12 @@ def _databricks_volume_path_from_nested_bytes( RelationField, ) -DatabricksVolumePath.DATABRICKS_VOLUME_PATH_PATH = KeywordField( - "databricksVolumePathPath", "databricksVolumePathPath" -) -DatabricksVolumePath.DATABRICKS_VOLUME_PATH_VOLUME_QUALIFIED_NAME = KeywordField( - "databricksVolumePathVolumeQualifiedName", "databricksVolumePathVolumeQualifiedName" +DatabricksVolumePath.DATABRICKS_PATH = KeywordField("databricksPath", "databricksPath") +DatabricksVolumePath.DATABRICKS_VOLUME_QUALIFIED_NAME = KeywordField( + "databricksVolumeQualifiedName", "databricksVolumeQualifiedName" ) -DatabricksVolumePath.DATABRICKS_VOLUME_PATH_VOLUME_NAME = KeywordField( - "databricksVolumePathVolumeName", "databricksVolumePathVolumeName" +DatabricksVolumePath.DATABRICKS_VOLUME_NAME = KeywordField( + "databricksVolumeName", "databricksVolumeName" ) DatabricksVolumePath.QUERY_COUNT = NumericField("queryCount", "queryCount") DatabricksVolumePath.QUERY_USER_COUNT = NumericField("queryUserCount", "queryUserCount") @@ -1120,6 +1042,9 @@ def _databricks_volume_path_from_nested_bytes( DatabricksVolumePath.DBT_SOURCES = RelationField("dbtSources") DatabricksVolumePath.SQL_DBT_SOURCES = RelationField("sqlDBTSources") DatabricksVolumePath.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +DatabricksVolumePath.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DatabricksVolumePath.MEANINGS = RelationField("meanings") DatabricksVolumePath.MC_MONITORS = RelationField("mcMonitors") DatabricksVolumePath.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dataverse.py b/pyatlan_v9/model/assets/dataverse.py index f40a13703..839bfeac6 100644 --- a/pyatlan_v9/model/assets/dataverse.py +++ b/pyatlan_v9/model/assets/dataverse.py @@ -40,7 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dataverse_related import RelatedDataverse +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -81,6 +81,7 @@ class Dataverse(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -98,6 +99,8 @@ class Dataverse(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Dataverse" + dataverse_is_custom: Union[bool, None, UnsetType] = UNSET """Indicator if DataverseEntity is custom built.""" @@ -156,6 +159,11 @@ class Dataverse(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -211,66 +219,6 @@ class Dataverse(Asset): def __post_init__(self) -> None: self.type_name = "Dataverse" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Dataverse instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Dataverse validation failed: {errors}") - - def minimize(self) -> "Dataverse": - """ - Return a minimal copy of this Dataverse with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Dataverse with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Dataverse instance with only the minimum required fields. - """ - self.validate() - return Dataverse(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataverse": - """ - Create a :class:`RelatedDataverse` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataverse reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataverse(guid=self.guid) - return RelatedDataverse(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -388,6 +336,11 @@ class DataverseRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -474,6 +427,7 @@ class DataverseNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -545,9 +499,6 @@ def _dataverse_to_nested(dataverse: Dataverse) -> DataverseNested: is_incomplete=dataverse.is_incomplete, provenance_type=dataverse.provenance_type, home_id=dataverse.home_id, - depth=dataverse.depth, - immediate_upstream=dataverse.immediate_upstream, - immediate_downstream=dataverse.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -579,6 +530,7 @@ def _dataverse_from_nested(nested: DataverseNested) -> Dataverse: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -587,9 +539,6 @@ def _dataverse_from_nested(nested: DataverseNested) -> Dataverse: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dataverse_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -640,6 +589,9 @@ def _dataverse_from_nested_bytes(data: bytes, serde: Serde) -> Dataverse: Dataverse.METRICS = RelationField("metrics") Dataverse.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Dataverse.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Dataverse.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Dataverse.MEANINGS = RelationField("meanings") Dataverse.MC_MONITORS = RelationField("mcMonitors") Dataverse.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dataverse_attribute.py b/pyatlan_v9/model/assets/dataverse_attribute.py index 6c3a3b4eb..7244eeea0 100644 --- a/pyatlan_v9/model/assets/dataverse_attribute.py +++ b/pyatlan_v9/model/assets/dataverse_attribute.py @@ -42,7 +42,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dataverse_related import RelatedDataverseAttribute, RelatedDataverseEntity +from .dataverse_related import RelatedDataverseEntity +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -89,6 +90,7 @@ class DataverseAttribute(Asset): DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None DATAVERSE_ENTITY: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -106,6 +108,8 @@ class DataverseAttribute(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataverseAttribute" + dataverse_entity_qualified_name: Union[str, None, UnsetType] = UNSET """Entity Qualified Name of the DataverseAttribute.""" @@ -182,6 +186,11 @@ class DataverseAttribute(Asset): dataverse_entity: Union[RelatedDataverseEntity, None, UnsetType] = UNSET """DataverseEntity asset containing this DataverseAttribute.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -243,76 +252,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataverseAttribute instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dataverse_entity is UNSET: - errors.append("dataverse_entity is required for creation") - if self.dataverse_entity_qualified_name is UNSET: - errors.append( - "dataverse_entity_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"DataverseAttribute validation failed: {errors}") - - def minimize(self) -> "DataverseAttribute": - """ - Return a minimal copy of this DataverseAttribute with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataverseAttribute with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataverseAttribute instance with only the minimum required fields. - """ - self.validate() - return DataverseAttribute(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataverseAttribute": - """ - Create a :class:`RelatedDataverseAttribute` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataverseAttribute reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataverseAttribute(guid=self.guid) - return RelatedDataverseAttribute(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -500,6 +439,11 @@ class DataverseAttributeRelationshipAttributes(AssetRelationshipAttributes): dataverse_entity: Union[RelatedDataverseEntity, None, UnsetType] = UNSET """DataverseEntity asset containing this DataverseAttribute.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -589,6 +533,7 @@ class DataverseAttributeNested(AssetNested): "dq_base_dataset_rules", "dq_reference_dataset_rules", "dataverse_entity", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -680,9 +625,6 @@ def _dataverse_attribute_to_nested( is_incomplete=dataverse_attribute.is_incomplete, provenance_type=dataverse_attribute.provenance_type, home_id=dataverse_attribute.home_id, - depth=dataverse_attribute.depth, - immediate_upstream=dataverse_attribute.immediate_upstream, - immediate_downstream=dataverse_attribute.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -718,6 +660,7 @@ def _dataverse_attribute_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -726,9 +669,6 @@ def _dataverse_attribute_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dataverse_attribute_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -807,6 +747,9 @@ def _dataverse_attribute_from_nested_bytes( DataverseAttribute.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DataverseAttribute.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") DataverseAttribute.DATAVERSE_ENTITY = RelationField("dataverseEntity") +DataverseAttribute.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DataverseAttribute.MEANINGS = RelationField("meanings") DataverseAttribute.MC_MONITORS = RelationField("mcMonitors") DataverseAttribute.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dataverse_entity.py b/pyatlan_v9/model/assets/dataverse_entity.py index 6a17eb770..dc6173d1f 100644 --- a/pyatlan_v9/model/assets/dataverse_entity.py +++ b/pyatlan_v9/model/assets/dataverse_entity.py @@ -41,7 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dataverse_related import RelatedDataverseAttribute, RelatedDataverseEntity +from .dataverse_related import RelatedDataverseAttribute +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +86,7 @@ class DataverseEntity(Asset): DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None DATAVERSE_ATTRIBUTES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +104,8 @@ class DataverseEntity(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DataverseEntity" + dataverse_entity_schema_name: Union[str, None, UnsetType] = UNSET """Schema Name of the DataverseEntity.""" @@ -171,6 +175,11 @@ class DataverseEntity(Asset): ) """DataverseAttribute assets contained within this DataverseEntity.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -226,66 +235,6 @@ class DataverseEntity(Asset): def __post_init__(self) -> None: self.type_name = "DataverseEntity" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DataverseEntity instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DataverseEntity validation failed: {errors}") - - def minimize(self) -> "DataverseEntity": - """ - Return a minimal copy of this DataverseEntity with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DataverseEntity with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DataverseEntity instance with only the minimum required fields. - """ - self.validate() - return DataverseEntity(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDataverseEntity": - """ - Create a :class:`RelatedDataverseEntity` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDataverseEntity reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDataverseEntity(guid=self.guid) - return RelatedDataverseEntity(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -452,6 +401,11 @@ class DataverseEntityRelationshipAttributes(AssetRelationshipAttributes): ) """DataverseAttribute assets contained within this DataverseEntity.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -541,6 +495,7 @@ class DataverseEntityNested(AssetNested): "dq_base_dataset_rules", "dq_reference_dataset_rules", "dataverse_attributes", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -622,9 +577,6 @@ def _dataverse_entity_to_nested( is_incomplete=dataverse_entity.is_incomplete, provenance_type=dataverse_entity.provenance_type, home_id=dataverse_entity.home_id, - depth=dataverse_entity.depth, - immediate_upstream=dataverse_entity.immediate_upstream, - immediate_downstream=dataverse_entity.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -658,6 +610,7 @@ def _dataverse_entity_from_nested(nested: DataverseEntityNested) -> DataverseEnt updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -666,9 +619,6 @@ def _dataverse_entity_from_nested(nested: DataverseEntityNested) -> DataverseEnt is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dataverse_entity_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -734,6 +684,9 @@ def _dataverse_entity_from_nested_bytes(data: bytes, serde: Serde) -> DataverseE DataverseEntity.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DataverseEntity.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") DataverseEntity.DATAVERSE_ATTRIBUTES = RelationField("dataverseAttributes") +DataverseEntity.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DataverseEntity.MEANINGS = RelationField("meanings") DataverseEntity.MC_MONITORS = RelationField("mcMonitors") DataverseEntity.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dbt.py b/pyatlan_v9/model/assets/dbt.py index 3505c16ce..d7b3feb0e 100644 --- a/pyatlan_v9/model/assets/dbt.py +++ b/pyatlan_v9/model/assets/dbt.py @@ -40,7 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbt +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -97,6 +97,7 @@ class Dbt(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -114,6 +115,8 @@ class Dbt(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Dbt" + dbt_alias: Union[str, None, UnsetType] = UNSET """Alias of this asset in dbt.""" @@ -220,6 +223,11 @@ class Dbt(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -275,66 +283,6 @@ class Dbt(Asset): def __post_init__(self) -> None: self.type_name = "Dbt" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Dbt instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Dbt validation failed: {errors}") - - def minimize(self) -> "Dbt": - """ - Return a minimal copy of this Dbt with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Dbt with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Dbt instance with only the minimum required fields. - """ - self.validate() - return Dbt(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbt": - """ - Create a :class:`RelatedDbt` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbt reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbt(guid=self.guid) - return RelatedDbt(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -500,6 +448,11 @@ class DbtRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -582,6 +535,7 @@ class DbtNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -685,9 +639,6 @@ def _dbt_to_nested(dbt: Dbt) -> DbtNested: is_incomplete=dbt.is_incomplete, provenance_type=dbt.provenance_type, home_id=dbt.home_id, - depth=dbt.depth, - immediate_upstream=dbt.immediate_upstream, - immediate_downstream=dbt.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -717,6 +668,7 @@ def _dbt_from_nested(nested: DbtNested) -> Dbt: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -725,9 +677,6 @@ def _dbt_from_nested(nested: DbtNested) -> Dbt: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -798,6 +747,9 @@ def _dbt_from_nested_bytes(data: bytes, serde: Serde) -> Dbt: Dbt.METRICS = RelationField("metrics") Dbt.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Dbt.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Dbt.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Dbt.MEANINGS = RelationField("meanings") Dbt.MC_MONITORS = RelationField("mcMonitors") Dbt.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dbt_column_process.py b/pyatlan_v9/model/assets/dbt_column_process.py index e313e8953..ee3dac57e 100644 --- a/pyatlan_v9/model/assets/dbt_column_process.py +++ b/pyatlan_v9/model/assets/dbt_column_process.py @@ -43,10 +43,10 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtColumnProcess from .fabric_related import RelatedFabricActivity from .fivetran_related import RelatedFivetranConnector from .flow_related import RelatedFlowControlOperation +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .matillion_related import RelatedMatillionComponent from .model_related import RelatedModelAttribute, RelatedModelEntity @@ -99,6 +99,7 @@ class DbtColumnProcess(Asset): AST: ClassVar[Any] = None ADDITIONAL_ETL_CONTEXT: ClassVar[Any] = None AI_DATASET_TYPE: ClassVar[Any] = None + IS_PASS_THROUGH: ClassVar[Any] = None ADF_ACTIVITY: ClassVar[Any] = None AIRFLOW_TASKS: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None @@ -118,6 +119,7 @@ class DbtColumnProcess(Asset): FABRIC_ACTIVITIES: ClassVar[Any] = None FIVETRAN_CONNECTOR: ClassVar[Any] = None FLOW_ORCHESTRATED_BY: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MATILLION_COMPONENT: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -144,6 +146,8 @@ class DbtColumnProcess(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtColumnProcess" + dbt_column_process_job_status: Union[str, None, UnsetType] = UNSET """Status of the dbt column process job.""" @@ -225,6 +229,9 @@ class DbtColumnProcess(Asset): ai_dataset_type: Union[str, None, UnsetType] = UNSET """Dataset type for AI Model - dataset process.""" + is_pass_through: Union[bool, None, UnsetType] = UNSET + """Whether this process represents a pass-through data flow where data is moved without transformation, as opposed to a flow where data is actively modified.""" + adf_activity: Union[RelatedAdfActivity, None, UnsetType] = UNSET """ADF Activity that is associated with this lineage process.""" @@ -286,6 +293,11 @@ class DbtColumnProcess(Asset): flow_orchestrated_by: Union[RelatedFlowControlOperation, None, UnsetType] = UNSET """Orchestrated control operation that ran these data flows (process).""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -370,66 +382,6 @@ class DbtColumnProcess(Asset): def __post_init__(self) -> None: self.type_name = "DbtColumnProcess" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtColumnProcess instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DbtColumnProcess validation failed: {errors}") - - def minimize(self) -> "DbtColumnProcess": - """ - Return a minimal copy of this DbtColumnProcess with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtColumnProcess with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtColumnProcess instance with only the minimum required fields. - """ - self.validate() - return DbtColumnProcess(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtColumnProcess": - """ - Create a :class:`RelatedDbtColumnProcess` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtColumnProcess reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtColumnProcess(guid=self.guid) - return RelatedDbtColumnProcess(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -568,6 +520,9 @@ class DbtColumnProcessAttributes(AssetAttributes): ai_dataset_type: Union[str, None, UnsetType] = UNSET """Dataset type for AI Model - dataset process.""" + is_pass_through: Union[bool, None, UnsetType] = UNSET + """Whether this process represents a pass-through data flow where data is moved without transformation, as opposed to a flow where data is actively modified.""" + class DbtColumnProcessRelationshipAttributes(AssetRelationshipAttributes): """DbtColumnProcess-specific relationship attributes for nested API format.""" @@ -633,6 +588,11 @@ class DbtColumnProcessRelationshipAttributes(AssetRelationshipAttributes): flow_orchestrated_by: Union[RelatedFlowControlOperation, None, UnsetType] = UNSET """Orchestrated control operation that ran these data flows (process).""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -755,6 +715,7 @@ class DbtColumnProcessNested(AssetNested): "fabric_activities", "fivetran_connector", "flow_orchestrated_by", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "matillion_component", "mc_monitors", @@ -817,6 +778,7 @@ def _populate_dbt_column_process_attrs( attrs.ast = obj.ast attrs.additional_etl_context = obj.additional_etl_context attrs.ai_dataset_type = obj.ai_dataset_type + attrs.is_pass_through = obj.is_pass_through def _extract_dbt_column_process_attrs(attrs: DbtColumnProcessAttributes) -> dict: @@ -851,6 +813,7 @@ def _extract_dbt_column_process_attrs(attrs: DbtColumnProcessAttributes) -> dict result["ast"] = attrs.ast result["additional_etl_context"] = attrs.additional_etl_context result["ai_dataset_type"] = attrs.ai_dataset_type + result["is_pass_through"] = attrs.is_pass_through return result @@ -891,9 +854,6 @@ def _dbt_column_process_to_nested( is_incomplete=dbt_column_process.is_incomplete, provenance_type=dbt_column_process.provenance_type, home_id=dbt_column_process.home_id, - depth=dbt_column_process.depth, - immediate_upstream=dbt_column_process.immediate_upstream, - immediate_downstream=dbt_column_process.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -927,6 +887,7 @@ def _dbt_column_process_from_nested(nested: DbtColumnProcessNested) -> DbtColumn updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -935,9 +896,6 @@ def _dbt_column_process_from_nested(nested: DbtColumnProcessNested) -> DbtColumn is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_column_process_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -963,6 +921,7 @@ def _dbt_column_process_from_nested_bytes( # Deferred field descriptor initialization # --------------------------------------------------------------------------- from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + BooleanField, KeywordField, NumericField, RelationField, @@ -1015,6 +974,7 @@ def _dbt_column_process_from_nested_bytes( "additionalEtlContext", "additionalEtlContext" ) DbtColumnProcess.AI_DATASET_TYPE = KeywordField("aiDatasetType", "aiDatasetType") +DbtColumnProcess.IS_PASS_THROUGH = BooleanField("isPassThrough", "isPassThrough") DbtColumnProcess.ADF_ACTIVITY = RelationField("adfActivity") DbtColumnProcess.AIRFLOW_TASKS = RelationField("airflowTasks") DbtColumnProcess.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") @@ -1038,6 +998,9 @@ def _dbt_column_process_from_nested_bytes( DbtColumnProcess.FABRIC_ACTIVITIES = RelationField("fabricActivities") DbtColumnProcess.FIVETRAN_CONNECTOR = RelationField("fivetranConnector") DbtColumnProcess.FLOW_ORCHESTRATED_BY = RelationField("flowOrchestratedBy") +DbtColumnProcess.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DbtColumnProcess.MEANINGS = RelationField("meanings") DbtColumnProcess.MATILLION_COMPONENT = RelationField("matillionComponent") DbtColumnProcess.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/dbt_dimension.py b/pyatlan_v9/model/assets/dbt_dimension.py index 5ff46d6c2..ca7e7333f 100644 --- a/pyatlan_v9/model/assets/dbt_dimension.py +++ b/pyatlan_v9/model/assets/dbt_dimension.py @@ -40,7 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtDimension +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -107,6 +107,7 @@ class DbtDimension(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -125,6 +126,8 @@ class DbtDimension(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtDimension" + dbt_semantic_model_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the dbt semantic model this dimension belongs to.""" @@ -258,6 +261,11 @@ class DbtDimension(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -316,66 +324,6 @@ class DbtDimension(Asset): def __post_init__(self) -> None: self.type_name = "DbtDimension" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtDimension instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DbtDimension validation failed: {errors}") - - def minimize(self) -> "DbtDimension": - """ - Return a minimal copy of this DbtDimension with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtDimension with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtDimension instance with only the minimum required fields. - """ - self.validate() - return DbtDimension(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtDimension": - """ - Create a :class:`RelatedDbtDimension` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtDimension reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtDimension(guid=self.guid) - return RelatedDbtDimension(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -568,6 +516,11 @@ class DbtDimensionRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -659,6 +612,7 @@ class DbtDimensionNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -787,9 +741,6 @@ def _dbt_dimension_to_nested(dbt_dimension: DbtDimension) -> DbtDimensionNested: is_incomplete=dbt_dimension.is_incomplete, provenance_type=dbt_dimension.provenance_type, home_id=dbt_dimension.home_id, - depth=dbt_dimension.depth, - immediate_upstream=dbt_dimension.immediate_upstream, - immediate_downstream=dbt_dimension.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -823,6 +774,7 @@ def _dbt_dimension_from_nested(nested: DbtDimensionNested) -> DbtDimension: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -831,9 +783,6 @@ def _dbt_dimension_from_nested(nested: DbtDimensionNested) -> DbtDimension: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_dimension_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -930,6 +879,9 @@ def _dbt_dimension_from_nested_bytes(data: bytes, serde: Serde) -> DbtDimension: DbtDimension.METRICS = RelationField("metrics") DbtDimension.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DbtDimension.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +DbtDimension.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DbtDimension.MEANINGS = RelationField("meanings") DbtDimension.MC_MONITORS = RelationField("mcMonitors") DbtDimension.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dbt_entity.py b/pyatlan_v9/model/assets/dbt_entity.py index d7b5a03c8..98389e221 100644 --- a/pyatlan_v9/model/assets/dbt_entity.py +++ b/pyatlan_v9/model/assets/dbt_entity.py @@ -40,7 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtEntity +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -106,6 +106,7 @@ class DbtEntity(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -124,6 +125,8 @@ class DbtEntity(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtEntity" + dbt_semantic_model_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the dbt semantic model this entity belongs to.""" @@ -254,6 +257,11 @@ class DbtEntity(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -312,66 +320,6 @@ class DbtEntity(Asset): def __post_init__(self) -> None: self.type_name = "DbtEntity" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtEntity instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DbtEntity validation failed: {errors}") - - def minimize(self) -> "DbtEntity": - """ - Return a minimal copy of this DbtEntity with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtEntity with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtEntity instance with only the minimum required fields. - """ - self.validate() - return DbtEntity(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtEntity": - """ - Create a :class:`RelatedDbtEntity` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtEntity reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtEntity(guid=self.guid) - return RelatedDbtEntity(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -561,6 +509,11 @@ class DbtEntityRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -650,6 +603,7 @@ class DbtEntityNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -772,9 +726,6 @@ def _dbt_entity_to_nested(dbt_entity: DbtEntity) -> DbtEntityNested: is_incomplete=dbt_entity.is_incomplete, provenance_type=dbt_entity.provenance_type, home_id=dbt_entity.home_id, - depth=dbt_entity.depth, - immediate_upstream=dbt_entity.immediate_upstream, - immediate_downstream=dbt_entity.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -806,6 +757,7 @@ def _dbt_entity_from_nested(nested: DbtEntityNested) -> DbtEntity: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -814,9 +766,6 @@ def _dbt_entity_from_nested(nested: DbtEntityNested) -> DbtEntity: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_entity_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -906,6 +855,9 @@ def _dbt_entity_from_nested_bytes(data: bytes, serde: Serde) -> DbtEntity: DbtEntity.METRICS = RelationField("metrics") DbtEntity.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DbtEntity.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +DbtEntity.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DbtEntity.MEANINGS = RelationField("meanings") DbtEntity.MC_MONITORS = RelationField("mcMonitors") DbtEntity.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dbt_measure.py b/pyatlan_v9/model/assets/dbt_measure.py index c46c92519..d7e79c7cd 100644 --- a/pyatlan_v9/model/assets/dbt_measure.py +++ b/pyatlan_v9/model/assets/dbt_measure.py @@ -40,7 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtMeasure +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -106,6 +106,7 @@ class DbtMeasure(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -124,6 +125,8 @@ class DbtMeasure(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtMeasure" + dbt_semantic_model_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the dbt semantic model this measure belongs to.""" @@ -254,6 +257,11 @@ class DbtMeasure(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -312,66 +320,6 @@ class DbtMeasure(Asset): def __post_init__(self) -> None: self.type_name = "DbtMeasure" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtMeasure instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DbtMeasure validation failed: {errors}") - - def minimize(self) -> "DbtMeasure": - """ - Return a minimal copy of this DbtMeasure with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtMeasure with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtMeasure instance with only the minimum required fields. - """ - self.validate() - return DbtMeasure(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtMeasure": - """ - Create a :class:`RelatedDbtMeasure` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtMeasure reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtMeasure(guid=self.guid) - return RelatedDbtMeasure(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -561,6 +509,11 @@ class DbtMeasureRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -650,6 +603,7 @@ class DbtMeasureNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -772,9 +726,6 @@ def _dbt_measure_to_nested(dbt_measure: DbtMeasure) -> DbtMeasureNested: is_incomplete=dbt_measure.is_incomplete, provenance_type=dbt_measure.provenance_type, home_id=dbt_measure.home_id, - depth=dbt_measure.depth, - immediate_upstream=dbt_measure.immediate_upstream, - immediate_downstream=dbt_measure.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -806,6 +757,7 @@ def _dbt_measure_from_nested(nested: DbtMeasureNested) -> DbtMeasure: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -814,9 +766,6 @@ def _dbt_measure_from_nested(nested: DbtMeasureNested) -> DbtMeasure: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_measure_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -908,6 +857,9 @@ def _dbt_measure_from_nested_bytes(data: bytes, serde: Serde) -> DbtMeasure: DbtMeasure.METRICS = RelationField("metrics") DbtMeasure.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DbtMeasure.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +DbtMeasure.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DbtMeasure.MEANINGS = RelationField("meanings") DbtMeasure.MC_MONITORS = RelationField("mcMonitors") DbtMeasure.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dbt_metric.py b/pyatlan_v9/model/assets/dbt_metric.py index fe8b881f9..472f94492 100644 --- a/pyatlan_v9/model/assets/dbt_metric.py +++ b/pyatlan_v9/model/assets/dbt_metric.py @@ -42,7 +42,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtMetric, RelatedDbtModel +from .dbt_related import RelatedDbtModel +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -115,6 +116,7 @@ class DbtMetric(Asset): DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None DBT_MODEL: ClassVar[Any] = None DBT_METRIC_FILTER_COLUMNS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -132,6 +134,8 @@ class DbtMetric(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtMetric" + dbt_metric_filters: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Filters applied to the dbt metric.""" @@ -285,6 +289,11 @@ class DbtMetric(Asset): dbt_metric_filter_columns: Union[List[RelatedColumn], None, UnsetType] = UNSET """Model columns related to this metric.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -340,66 +349,6 @@ class DbtMetric(Asset): def __post_init__(self) -> None: self.type_name = "DbtMetric" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtMetric instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DbtMetric validation failed: {errors}") - - def minimize(self) -> "DbtMetric": - """ - Return a minimal copy of this DbtMetric with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtMetric with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtMetric instance with only the minimum required fields. - """ - self.validate() - return DbtMetric(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtMetric": - """ - Create a :class:`RelatedDbtMetric` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtMetric reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtMetric(guid=self.guid) - return RelatedDbtMetric(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -612,6 +561,11 @@ class DbtMetricRelationshipAttributes(AssetRelationshipAttributes): dbt_metric_filter_columns: Union[List[RelatedColumn], None, UnsetType] = UNSET """Model columns related to this metric.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -703,6 +657,7 @@ class DbtMetricNested(AssetNested): "dq_reference_dataset_rules", "dbt_model", "dbt_metric_filter_columns", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -828,9 +783,6 @@ def _dbt_metric_to_nested(dbt_metric: DbtMetric) -> DbtMetricNested: is_incomplete=dbt_metric.is_incomplete, provenance_type=dbt_metric.provenance_type, home_id=dbt_metric.home_id, - depth=dbt_metric.depth, - immediate_upstream=dbt_metric.immediate_upstream, - immediate_downstream=dbt_metric.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -862,6 +814,7 @@ def _dbt_metric_from_nested(nested: DbtMetricNested) -> DbtMetric: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -870,9 +823,6 @@ def _dbt_metric_from_nested(nested: DbtMetricNested) -> DbtMetric: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_metric_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -969,6 +919,9 @@ def _dbt_metric_from_nested_bytes(data: bytes, serde: Serde) -> DbtMetric: DbtMetric.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") DbtMetric.DBT_MODEL = RelationField("dbtModel") DbtMetric.DBT_METRIC_FILTER_COLUMNS = RelationField("dbtMetricFilterColumns") +DbtMetric.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DbtMetric.MEANINGS = RelationField("meanings") DbtMetric.MC_MONITORS = RelationField("mcMonitors") DbtMetric.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dbt_model.py b/pyatlan_v9/model/assets/dbt_model.py index 5f65540f6..26062cc52 100644 --- a/pyatlan_v9/model/assets/dbt_model.py +++ b/pyatlan_v9/model/assets/dbt_model.py @@ -42,12 +42,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import ( - RelatedDbtMetric, - RelatedDbtModel, - RelatedDbtModelColumn, - RelatedDbtTest, -) +from .dbt_related import RelatedDbtMetric, RelatedDbtModelColumn, RelatedDbtTest +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -123,6 +119,7 @@ class DbtModel(Asset): DBT_TESTS: ClassVar[Any] = None DBT_METRICS: ClassVar[Any] = None DBT_MODEL_COLUMNS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -140,6 +137,8 @@ class DbtModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtModel" + dbt_status: Union[str, None, UnsetType] = UNSET """Status of the dbt model.""" @@ -304,6 +303,11 @@ class DbtModel(Asset): dbt_model_columns: Union[List[RelatedDbtModelColumn], None, UnsetType] = UNSET """Columns that exist within this dbt model.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -365,72 +369,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dbt_model_sql_assets is UNSET: - errors.append("dbt_model_sql_assets is required for creation") - if errors: - raise ValueError(f"DbtModel validation failed: {errors}") - - def minimize(self) -> "DbtModel": - """ - Return a minimal copy of this DbtModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtModel instance with only the minimum required fields. - """ - self.validate() - return DbtModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtModel": - """ - Create a :class:`RelatedDbtModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtModel(guid=self.guid) - return RelatedDbtModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -654,6 +592,11 @@ class DbtModelRelationshipAttributes(AssetRelationshipAttributes): dbt_model_columns: Union[List[RelatedDbtModelColumn], None, UnsetType] = UNSET """Columns that exist within this dbt model.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -745,6 +688,7 @@ class DbtModelNested(AssetNested): "dbt_tests", "dbt_metrics", "dbt_model_columns", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -874,9 +818,6 @@ def _dbt_model_to_nested(dbt_model: DbtModel) -> DbtModelNested: is_incomplete=dbt_model.is_incomplete, provenance_type=dbt_model.provenance_type, home_id=dbt_model.home_id, - depth=dbt_model.depth, - immediate_upstream=dbt_model.immediate_upstream, - immediate_downstream=dbt_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -908,6 +849,7 @@ def _dbt_model_from_nested(nested: DbtModelNested) -> DbtModel: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -916,9 +858,6 @@ def _dbt_model_from_nested(nested: DbtModelNested) -> DbtModel: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_model_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1023,6 +962,9 @@ def _dbt_model_from_nested_bytes(data: bytes, serde: Serde) -> DbtModel: DbtModel.DBT_TESTS = RelationField("dbtTests") DbtModel.DBT_METRICS = RelationField("dbtMetrics") DbtModel.DBT_MODEL_COLUMNS = RelationField("dbtModelColumns") +DbtModel.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DbtModel.MEANINGS = RelationField("meanings") DbtModel.MC_MONITORS = RelationField("mcMonitors") DbtModel.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dbt_model_column.py b/pyatlan_v9/model/assets/dbt_model_column.py index c65fb473a..ada436aed 100644 --- a/pyatlan_v9/model/assets/dbt_model_column.py +++ b/pyatlan_v9/model/assets/dbt_model_column.py @@ -41,12 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import ( - RelatedDbtModel, - RelatedDbtModelColumn, - RelatedDbtSeed, - RelatedDbtTest, -) +from .dbt_related import RelatedDbtModel, RelatedDbtSeed, RelatedDbtTest +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -112,6 +108,7 @@ class DbtModelColumn(Asset): SQL_COLUMN: ClassVar[Any] = None DBT_MODEL_COLUMN_SQL_COLUMNS: ClassVar[Any] = None DBT_SEED: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -129,6 +126,8 @@ class DbtModelColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtModelColumn" + dbt_model_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the dbt model this column belongs to.""" @@ -259,6 +258,11 @@ class DbtModelColumn(Asset): dbt_seed: Union[RelatedDbtSeed, None, UnsetType] = UNSET """Seed in which this dbt column exists.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -322,74 +326,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtModelColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dbt_model is UNSET: - errors.append("dbt_model is required for creation") - if self.dbt_model_qualified_name is UNSET: - errors.append("dbt_model_qualified_name is required for creation") - if errors: - raise ValueError(f"DbtModelColumn validation failed: {errors}") - - def minimize(self) -> "DbtModelColumn": - """ - Return a minimal copy of this DbtModelColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtModelColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtModelColumn instance with only the minimum required fields. - """ - self.validate() - return DbtModelColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtModelColumn": - """ - Create a :class:`RelatedDbtModelColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtModelColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtModelColumn(guid=self.guid) - return RelatedDbtModelColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -579,6 +515,11 @@ class DbtModelColumnRelationshipAttributes(AssetRelationshipAttributes): dbt_seed: Union[RelatedDbtSeed, None, UnsetType] = UNSET """Seed in which this dbt column exists.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -672,6 +613,7 @@ class DbtModelColumnNested(AssetNested): "sql_column", "dbt_model_column_sql_columns", "dbt_seed", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -787,9 +729,6 @@ def _dbt_model_column_to_nested( is_incomplete=dbt_model_column.is_incomplete, provenance_type=dbt_model_column.provenance_type, home_id=dbt_model_column.home_id, - depth=dbt_model_column.depth, - immediate_upstream=dbt_model_column.immediate_upstream, - immediate_downstream=dbt_model_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -823,6 +762,7 @@ def _dbt_model_column_from_nested(nested: DbtModelColumnNested) -> DbtModelColum updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -831,9 +771,6 @@ def _dbt_model_column_from_nested(nested: DbtModelColumnNested) -> DbtModelColum is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_model_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -929,6 +866,9 @@ def _dbt_model_column_from_nested_bytes(data: bytes, serde: Serde) -> DbtModelCo DbtModelColumn.SQL_COLUMN = RelationField("sqlColumn") DbtModelColumn.DBT_MODEL_COLUMN_SQL_COLUMNS = RelationField("dbtModelColumnSqlColumns") DbtModelColumn.DBT_SEED = RelationField("dbtSeed") +DbtModelColumn.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DbtModelColumn.MEANINGS = RelationField("meanings") DbtModelColumn.MC_MONITORS = RelationField("mcMonitors") DbtModelColumn.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dbt_process.py b/pyatlan_v9/model/assets/dbt_process.py index 82fbf9198..3089037d8 100644 --- a/pyatlan_v9/model/assets/dbt_process.py +++ b/pyatlan_v9/model/assets/dbt_process.py @@ -43,10 +43,10 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtProcess from .fabric_related import RelatedFabricActivity from .fivetran_related import RelatedFivetranConnector from .flow_related import RelatedFlowControlOperation +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .matillion_related import RelatedMatillionComponent from .model_related import RelatedModelAttribute, RelatedModelEntity @@ -100,6 +100,7 @@ class DbtProcess(Asset): AST: ClassVar[Any] = None ADDITIONAL_ETL_CONTEXT: ClassVar[Any] = None AI_DATASET_TYPE: ClassVar[Any] = None + IS_PASS_THROUGH: ClassVar[Any] = None ADF_ACTIVITY: ClassVar[Any] = None AIRFLOW_TASKS: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None @@ -119,6 +120,7 @@ class DbtProcess(Asset): FABRIC_ACTIVITIES: ClassVar[Any] = None FIVETRAN_CONNECTOR: ClassVar[Any] = None FLOW_ORCHESTRATED_BY: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MATILLION_COMPONENT: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -144,6 +146,8 @@ class DbtProcess(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtProcess" + dbt_process_job_status: Union[str, None, UnsetType] = UNSET """Status of the dbt process job.""" @@ -228,6 +232,9 @@ class DbtProcess(Asset): ai_dataset_type: Union[str, None, UnsetType] = UNSET """Dataset type for AI Model - dataset process.""" + is_pass_through: Union[bool, None, UnsetType] = UNSET + """Whether this process represents a pass-through data flow where data is moved without transformation, as opposed to a flow where data is actively modified.""" + adf_activity: Union[RelatedAdfActivity, None, UnsetType] = UNSET """ADF Activity that is associated with this lineage process.""" @@ -289,6 +296,11 @@ class DbtProcess(Asset): flow_orchestrated_by: Union[RelatedFlowControlOperation, None, UnsetType] = UNSET """Orchestrated control operation that ran these data flows (process).""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -370,66 +382,6 @@ class DbtProcess(Asset): def __post_init__(self) -> None: self.type_name = "DbtProcess" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtProcess instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DbtProcess validation failed: {errors}") - - def minimize(self) -> "DbtProcess": - """ - Return a minimal copy of this DbtProcess with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtProcess with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtProcess instance with only the minimum required fields. - """ - self.validate() - return DbtProcess(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtProcess": - """ - Create a :class:`RelatedDbtProcess` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtProcess reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtProcess(guid=self.guid) - return RelatedDbtProcess(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -569,6 +521,9 @@ class DbtProcessAttributes(AssetAttributes): ai_dataset_type: Union[str, None, UnsetType] = UNSET """Dataset type for AI Model - dataset process.""" + is_pass_through: Union[bool, None, UnsetType] = UNSET + """Whether this process represents a pass-through data flow where data is moved without transformation, as opposed to a flow where data is actively modified.""" + class DbtProcessRelationshipAttributes(AssetRelationshipAttributes): """DbtProcess-specific relationship attributes for nested API format.""" @@ -634,6 +589,11 @@ class DbtProcessRelationshipAttributes(AssetRelationshipAttributes): flow_orchestrated_by: Union[RelatedFlowControlOperation, None, UnsetType] = UNSET """Orchestrated control operation that ran these data flows (process).""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -751,6 +711,7 @@ class DbtProcessNested(AssetNested): "fabric_activities", "fivetran_connector", "flow_orchestrated_by", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "matillion_component", "mc_monitors", @@ -811,6 +772,7 @@ def _populate_dbt_process_attrs(attrs: DbtProcessAttributes, obj: DbtProcess) -> attrs.ast = obj.ast attrs.additional_etl_context = obj.additional_etl_context attrs.ai_dataset_type = obj.ai_dataset_type + attrs.is_pass_through = obj.is_pass_through def _extract_dbt_process_attrs(attrs: DbtProcessAttributes) -> dict: @@ -846,6 +808,7 @@ def _extract_dbt_process_attrs(attrs: DbtProcessAttributes) -> dict: result["ast"] = attrs.ast result["additional_etl_context"] = attrs.additional_etl_context result["ai_dataset_type"] = attrs.ai_dataset_type + result["is_pass_through"] = attrs.is_pass_through return result @@ -882,9 +845,6 @@ def _dbt_process_to_nested(dbt_process: DbtProcess) -> DbtProcessNested: is_incomplete=dbt_process.is_incomplete, provenance_type=dbt_process.provenance_type, home_id=dbt_process.home_id, - depth=dbt_process.depth, - immediate_upstream=dbt_process.immediate_upstream, - immediate_downstream=dbt_process.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -916,6 +876,7 @@ def _dbt_process_from_nested(nested: DbtProcessNested) -> DbtProcess: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -924,9 +885,6 @@ def _dbt_process_from_nested(nested: DbtProcessNested) -> DbtProcess: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_process_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -948,6 +906,7 @@ def _dbt_process_from_nested_bytes(data: bytes, serde: Serde) -> DbtProcess: # Deferred field descriptor initialization # --------------------------------------------------------------------------- from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + BooleanField, KeywordField, NumericField, RelationField, @@ -1003,6 +962,7 @@ def _dbt_process_from_nested_bytes(data: bytes, serde: Serde) -> DbtProcess: "additionalEtlContext", "additionalEtlContext" ) DbtProcess.AI_DATASET_TYPE = KeywordField("aiDatasetType", "aiDatasetType") +DbtProcess.IS_PASS_THROUGH = BooleanField("isPassThrough", "isPassThrough") DbtProcess.ADF_ACTIVITY = RelationField("adfActivity") DbtProcess.AIRFLOW_TASKS = RelationField("airflowTasks") DbtProcess.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") @@ -1022,6 +982,9 @@ def _dbt_process_from_nested_bytes(data: bytes, serde: Serde) -> DbtProcess: DbtProcess.FABRIC_ACTIVITIES = RelationField("fabricActivities") DbtProcess.FIVETRAN_CONNECTOR = RelationField("fivetranConnector") DbtProcess.FLOW_ORCHESTRATED_BY = RelationField("flowOrchestratedBy") +DbtProcess.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DbtProcess.MEANINGS = RelationField("meanings") DbtProcess.MATILLION_COMPONENT = RelationField("matillionComponent") DbtProcess.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/dbt_seed.py b/pyatlan_v9/model/assets/dbt_seed.py index f88a8aafb..1e2abd088 100644 --- a/pyatlan_v9/model/assets/dbt_seed.py +++ b/pyatlan_v9/model/assets/dbt_seed.py @@ -41,7 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtModelColumn, RelatedDbtSeed +from .dbt_related import RelatedDbtModelColumn +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -103,6 +104,7 @@ class DbtSeed(Asset): DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None DBT_SEED_SQL_ASSETS: ClassVar[Any] = None DBT_MODEL_COLUMNS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -120,6 +122,8 @@ class DbtSeed(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtSeed" + dbt_seed_file_path: Union[str, None, UnsetType] = UNSET """File path of the dbt seed.""" @@ -238,6 +242,11 @@ class DbtSeed(Asset): dbt_model_columns: Union[List[RelatedDbtModelColumn], None, UnsetType] = UNSET """Columns that exist within this dbt seed.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -299,72 +308,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtSeed instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dbt_seed_sql_assets is UNSET: - errors.append("dbt_seed_sql_assets is required for creation") - if errors: - raise ValueError(f"DbtSeed validation failed: {errors}") - - def minimize(self) -> "DbtSeed": - """ - Return a minimal copy of this DbtSeed with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtSeed with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtSeed instance with only the minimum required fields. - """ - self.validate() - return DbtSeed(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtSeed": - """ - Create a :class:`RelatedDbtSeed` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtSeed reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtSeed(guid=self.guid) - return RelatedDbtSeed(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -542,6 +485,11 @@ class DbtSeedRelationshipAttributes(AssetRelationshipAttributes): dbt_model_columns: Union[List[RelatedDbtModelColumn], None, UnsetType] = UNSET """Columns that exist within this dbt seed.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -630,6 +578,7 @@ class DbtSeedNested(AssetNested): "dq_reference_dataset_rules", "dbt_seed_sql_assets", "dbt_model_columns", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -737,9 +686,6 @@ def _dbt_seed_to_nested(dbt_seed: DbtSeed) -> DbtSeedNested: is_incomplete=dbt_seed.is_incomplete, provenance_type=dbt_seed.provenance_type, home_id=dbt_seed.home_id, - depth=dbt_seed.depth, - immediate_upstream=dbt_seed.immediate_upstream, - immediate_downstream=dbt_seed.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -769,6 +715,7 @@ def _dbt_seed_from_nested(nested: DbtSeedNested) -> DbtSeed: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -777,9 +724,6 @@ def _dbt_seed_from_nested(nested: DbtSeedNested) -> DbtSeed: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_seed_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -854,6 +798,9 @@ def _dbt_seed_from_nested_bytes(data: bytes, serde: Serde) -> DbtSeed: DbtSeed.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") DbtSeed.DBT_SEED_SQL_ASSETS = RelationField("dbtSeedSqlAssets") DbtSeed.DBT_MODEL_COLUMNS = RelationField("dbtModelColumns") +DbtSeed.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DbtSeed.MEANINGS = RelationField("meanings") DbtSeed.MC_MONITORS = RelationField("mcMonitors") DbtSeed.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dbt_semantic_model.py b/pyatlan_v9/model/assets/dbt_semantic_model.py index ff7e1b3dc..4c5502afa 100644 --- a/pyatlan_v9/model/assets/dbt_semantic_model.py +++ b/pyatlan_v9/model/assets/dbt_semantic_model.py @@ -40,7 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtSemanticModel +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -102,6 +102,7 @@ class DbtSemanticModel(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -122,6 +123,8 @@ class DbtSemanticModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtSemanticModel" + dbt_alias: Union[str, None, UnsetType] = UNSET """Alias of this asset in dbt.""" @@ -228,6 +231,11 @@ class DbtSemanticModel(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -292,66 +300,6 @@ class DbtSemanticModel(Asset): def __post_init__(self) -> None: self.type_name = "DbtSemanticModel" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtSemanticModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DbtSemanticModel validation failed: {errors}") - - def minimize(self) -> "DbtSemanticModel": - """ - Return a minimal copy of this DbtSemanticModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtSemanticModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtSemanticModel instance with only the minimum required fields. - """ - self.validate() - return DbtSemanticModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtSemanticModel": - """ - Create a :class:`RelatedDbtSemanticModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtSemanticModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtSemanticModel(guid=self.guid) - return RelatedDbtSemanticModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -519,6 +467,11 @@ class DbtSemanticModelRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -616,6 +569,7 @@ class DbtSemanticModelNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -728,9 +682,6 @@ def _dbt_semantic_model_to_nested( is_incomplete=dbt_semantic_model.is_incomplete, provenance_type=dbt_semantic_model.provenance_type, home_id=dbt_semantic_model.home_id, - depth=dbt_semantic_model.depth, - immediate_upstream=dbt_semantic_model.immediate_upstream, - immediate_downstream=dbt_semantic_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -764,6 +715,7 @@ def _dbt_semantic_model_from_nested(nested: DbtSemanticModelNested) -> DbtSemant updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -772,9 +724,6 @@ def _dbt_semantic_model_from_nested(nested: DbtSemanticModelNested) -> DbtSemant is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_semantic_model_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -857,6 +806,9 @@ def _dbt_semantic_model_from_nested_bytes( DbtSemanticModel.METRICS = RelationField("metrics") DbtSemanticModel.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DbtSemanticModel.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +DbtSemanticModel.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DbtSemanticModel.MEANINGS = RelationField("meanings") DbtSemanticModel.MC_MONITORS = RelationField("mcMonitors") DbtSemanticModel.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dbt_source.py b/pyatlan_v9/model/assets/dbt_source.py index cb2d5787f..4baed1ad1 100644 --- a/pyatlan_v9/model/assets/dbt_source.py +++ b/pyatlan_v9/model/assets/dbt_source.py @@ -41,7 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtSource, RelatedDbtTest +from .dbt_related import RelatedDbtTest +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -104,6 +105,7 @@ class DbtSource(Asset): DBT_TESTS: ClassVar[Any] = None SQL_ASSET: ClassVar[Any] = None SQL_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -121,6 +123,8 @@ class DbtSource(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtSource" + dbt_state: Union[str, None, UnsetType] = UNSET """State of the dbt source.""" @@ -242,6 +246,11 @@ class DbtSource(Asset): sql_assets: Union[List[RelatedSQL], None, UnsetType] = UNSET """Assets related to this source.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -303,72 +312,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtSource instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sql_assets is UNSET: - errors.append("sql_assets is required for creation") - if errors: - raise ValueError(f"DbtSource validation failed: {errors}") - - def minimize(self) -> "DbtSource": - """ - Return a minimal copy of this DbtSource with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtSource with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtSource instance with only the minimum required fields. - """ - self.validate() - return DbtSource(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtSource": - """ - Create a :class:`RelatedDbtSource` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtSource reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtSource(guid=self.guid) - return RelatedDbtSource(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -549,6 +492,11 @@ class DbtSourceRelationshipAttributes(AssetRelationshipAttributes): sql_assets: Union[List[RelatedSQL], None, UnsetType] = UNSET """Assets related to this source.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -638,6 +586,7 @@ class DbtSourceNested(AssetNested): "dbt_tests", "sql_asset", "sql_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -745,9 +694,6 @@ def _dbt_source_to_nested(dbt_source: DbtSource) -> DbtSourceNested: is_incomplete=dbt_source.is_incomplete, provenance_type=dbt_source.provenance_type, home_id=dbt_source.home_id, - depth=dbt_source.depth, - immediate_upstream=dbt_source.immediate_upstream, - immediate_downstream=dbt_source.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -779,6 +725,7 @@ def _dbt_source_from_nested(nested: DbtSourceNested) -> DbtSource: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -787,9 +734,6 @@ def _dbt_source_from_nested(nested: DbtSourceNested) -> DbtSource: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_source_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -871,6 +815,9 @@ def _dbt_source_from_nested_bytes(data: bytes, serde: Serde) -> DbtSource: DbtSource.DBT_TESTS = RelationField("dbtTests") DbtSource.SQL_ASSET = RelationField("sqlAsset") DbtSource.SQL_ASSETS = RelationField("sqlAssets") +DbtSource.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DbtSource.MEANINGS = RelationField("meanings") DbtSource.MC_MONITORS = RelationField("mcMonitors") DbtSource.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dbt_tag.py b/pyatlan_v9/model/assets/dbt_tag.py index 94d529b88..2b6079d66 100644 --- a/pyatlan_v9/model/assets/dbt_tag.py +++ b/pyatlan_v9/model/assets/dbt_tag.py @@ -41,7 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import RelatedDbtTag +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -102,6 +102,7 @@ class DbtTag(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -119,6 +120,8 @@ class DbtTag(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtTag" + dbt_alias: Union[str, None, UnsetType] = UNSET """Alias of this asset in dbt.""" @@ -237,6 +240,11 @@ class DbtTag(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -300,74 +308,6 @@ def __post_init__(self) -> None: r"^.+/account/[^/]+/project/[^/]+/tag/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtTag instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.tag_id is UNSET: - errors.append("tag_id is required for creation") - if self.tag_allowed_values is UNSET: - errors.append("tag_allowed_values is required for creation") - if self.mapped_classification_name is UNSET: - errors.append("mapped_classification_name is required for creation") - if errors: - raise ValueError(f"DbtTag validation failed: {errors}") - - def minimize(self) -> "DbtTag": - """ - Return a minimal copy of this DbtTag with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtTag with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtTag instance with only the minimum required fields. - """ - self.validate() - return DbtTag(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtTag": - """ - Create a :class:`RelatedDbtTag` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtTag reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtTag(guid=self.guid) - return RelatedDbtTag(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -545,6 +485,11 @@ class DbtTagRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -631,6 +576,7 @@ class DbtTagNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -742,9 +688,6 @@ def _dbt_tag_to_nested(dbt_tag: DbtTag) -> DbtTagNested: is_incomplete=dbt_tag.is_incomplete, provenance_type=dbt_tag.provenance_type, home_id=dbt_tag.home_id, - depth=dbt_tag.depth, - immediate_upstream=dbt_tag.immediate_upstream, - immediate_downstream=dbt_tag.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -774,6 +717,7 @@ def _dbt_tag_from_nested(nested: DbtTagNested) -> DbtTag: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -782,9 +726,6 @@ def _dbt_tag_from_nested(nested: DbtTagNested) -> DbtTag: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_tag_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -864,6 +805,9 @@ def _dbt_tag_from_nested_bytes(data: bytes, serde: Serde) -> DbtTag: DbtTag.METRICS = RelationField("metrics") DbtTag.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DbtTag.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +DbtTag.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DbtTag.MEANINGS = RelationField("meanings") DbtTag.MC_MONITORS = RelationField("mcMonitors") DbtTag.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dbt_test.py b/pyatlan_v9/model/assets/dbt_test.py index 70ab0a9d8..0457d5465 100644 --- a/pyatlan_v9/model/assets/dbt_test.py +++ b/pyatlan_v9/model/assets/dbt_test.py @@ -41,12 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dbt_related import ( - RelatedDbtModel, - RelatedDbtModelColumn, - RelatedDbtSource, - RelatedDbtTest, -) +from .dbt_related import RelatedDbtModel, RelatedDbtModelColumn, RelatedDbtSource +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -116,6 +112,7 @@ class DbtTest(Asset): DBT_SOURCES: ClassVar[Any] = None DBT_MODEL_COLUMNS: ClassVar[Any] = None SQL_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -133,6 +130,8 @@ class DbtTest(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DbtTest" + dbt_test_status: Union[str, None, UnsetType] = UNSET """Details of the results of the test. For errors, it reads "ERROR".""" @@ -279,6 +278,11 @@ class DbtTest(Asset): sql_assets: Union[List[RelatedSQL], None, UnsetType] = UNSET """Assets related to this test.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -334,66 +338,6 @@ class DbtTest(Asset): def __post_init__(self) -> None: self.type_name = "DbtTest" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DbtTest instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DbtTest validation failed: {errors}") - - def minimize(self) -> "DbtTest": - """ - Return a minimal copy of this DbtTest with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DbtTest with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DbtTest instance with only the minimum required fields. - """ - self.validate() - return DbtTest(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDbtTest": - """ - Create a :class:`RelatedDbtTest` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDbtTest reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDbtTest(guid=self.guid) - return RelatedDbtTest(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -599,6 +543,11 @@ class DbtTestRelationshipAttributes(AssetRelationshipAttributes): sql_assets: Union[List[RelatedSQL], None, UnsetType] = UNSET """Assets related to this test.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -689,6 +638,7 @@ class DbtTestNested(AssetNested): "dbt_sources", "dbt_model_columns", "sql_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -808,9 +758,6 @@ def _dbt_test_to_nested(dbt_test: DbtTest) -> DbtTestNested: is_incomplete=dbt_test.is_incomplete, provenance_type=dbt_test.provenance_type, home_id=dbt_test.home_id, - depth=dbt_test.depth, - immediate_upstream=dbt_test.immediate_upstream, - immediate_downstream=dbt_test.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -840,6 +787,7 @@ def _dbt_test_from_nested(nested: DbtTestNested) -> DbtTest: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -848,9 +796,6 @@ def _dbt_test_from_nested(nested: DbtTestNested) -> DbtTest: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dbt_test_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -935,6 +880,9 @@ def _dbt_test_from_nested_bytes(data: bytes, serde: Serde) -> DbtTest: DbtTest.DBT_SOURCES = RelationField("dbtSources") DbtTest.DBT_MODEL_COLUMNS = RelationField("dbtModelColumns") DbtTest.SQL_ASSETS = RelationField("sqlAssets") +DbtTest.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DbtTest.MEANINGS = RelationField("meanings") DbtTest.MC_MONITORS = RelationField("mcMonitors") DbtTest.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/document_db.py b/pyatlan_v9/model/assets/document_db.py index 372914edd..cbe23b5b8 100644 --- a/pyatlan_v9/model/assets/document_db.py +++ b/pyatlan_v9/model/assets/document_db.py @@ -41,7 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .document_db_related import RelatedDocumentDB +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -80,6 +80,7 @@ class DocumentDB(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +98,8 @@ class DocumentDB(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DocumentDB" + no_sql_schema_definition: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="noSQLSchemaDefinition" ) @@ -151,6 +154,11 @@ class DocumentDB(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -206,66 +214,6 @@ class DocumentDB(Asset): def __post_init__(self) -> None: self.type_name = "DocumentDB" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DocumentDB instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DocumentDB validation failed: {errors}") - - def minimize(self) -> "DocumentDB": - """ - Return a minimal copy of this DocumentDB with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DocumentDB with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DocumentDB instance with only the minimum required fields. - """ - self.validate() - return DocumentDB(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDocumentDB": - """ - Create a :class:`RelatedDocumentDB` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDocumentDB reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDocumentDB(guid=self.guid) - return RelatedDocumentDB(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -379,6 +327,11 @@ class DocumentDBRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -465,6 +418,7 @@ class DocumentDBNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -532,9 +486,6 @@ def _document_db_to_nested(document_db: DocumentDB) -> DocumentDBNested: is_incomplete=document_db.is_incomplete, provenance_type=document_db.provenance_type, home_id=document_db.home_id, - depth=document_db.depth, - immediate_upstream=document_db.immediate_upstream, - immediate_downstream=document_db.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -566,6 +517,7 @@ def _document_db_from_nested(nested: DocumentDBNested) -> DocumentDB: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -574,9 +526,6 @@ def _document_db_from_nested(nested: DocumentDBNested) -> DocumentDB: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_document_db_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -619,6 +568,9 @@ def _document_db_from_nested_bytes(data: bytes, serde: Serde) -> DocumentDB: DocumentDB.METRICS = RelationField("metrics") DocumentDB.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DocumentDB.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +DocumentDB.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DocumentDB.MEANINGS = RelationField("meanings") DocumentDB.MC_MONITORS = RelationField("mcMonitors") DocumentDB.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/document_db_collection.py b/pyatlan_v9/model/assets/document_db_collection.py index c2c2e1fe2..90ab35bc6 100644 --- a/pyatlan_v9/model/assets/document_db_collection.py +++ b/pyatlan_v9/model/assets/document_db_collection.py @@ -49,7 +49,8 @@ RelatedDbtSource, RelatedDbtTest, ) -from .document_db_related import RelatedDocumentDBCollection, RelatedDocumentDBDatabase +from .document_db_related import RelatedDocumentDBDatabase +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -170,6 +171,7 @@ class DocumentDBCollection(Asset): SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None DOCUMENT_DB_DATABASE: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -197,6 +199,8 @@ class DocumentDBCollection(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DocumentDBCollection" + document_db_collection_subtype: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="documentDBCollectionSubtype" ) @@ -491,6 +495,11 @@ class DocumentDBCollection(Asset): ) """Database in which the collection exists.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -590,76 +599,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DocumentDBCollection instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.document_db_database is UNSET: - errors.append("document_db_database is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"DocumentDBCollection validation failed: {errors}") - - def minimize(self) -> "DocumentDBCollection": - """ - Return a minimal copy of this DocumentDBCollection with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DocumentDBCollection with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DocumentDBCollection instance with only the minimum required fields. - """ - self.validate() - return DocumentDBCollection(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDocumentDBCollection": - """ - Create a :class:`RelatedDocumentDBCollection` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDocumentDBCollection reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDocumentDBCollection(guid=self.guid) - return RelatedDocumentDBCollection(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -1061,6 +1000,11 @@ class DocumentDBCollectionRelationshipAttributes(AssetRelationshipAttributes): ) """Database in which the collection exists.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1194,6 +1138,7 @@ class DocumentDBCollectionNested(AssetNested): "sql_dbt_sources", "dbt_seed_assets", "document_db_database", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1451,9 +1396,6 @@ def _document_db_collection_to_nested( is_incomplete=document_db_collection.is_incomplete, provenance_type=document_db_collection.provenance_type, home_id=document_db_collection.home_id, - depth=document_db_collection.depth, - immediate_upstream=document_db_collection.immediate_upstream, - immediate_downstream=document_db_collection.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1489,6 +1431,7 @@ def _document_db_collection_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1497,9 +1440,6 @@ def _document_db_collection_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_document_db_collection_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1716,6 +1656,9 @@ def _document_db_collection_from_nested_bytes( DocumentDBCollection.SQL_DBT_SOURCES = RelationField("sqlDBTSources") DocumentDBCollection.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") DocumentDBCollection.DOCUMENT_DB_DATABASE = RelationField("documentDBDatabase") +DocumentDBCollection.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DocumentDBCollection.MEANINGS = RelationField("meanings") DocumentDBCollection.MC_MONITORS = RelationField("mcMonitors") DocumentDBCollection.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/document_db_database.py b/pyatlan_v9/model/assets/document_db_database.py index adcb507d8..8779765d9 100644 --- a/pyatlan_v9/model/assets/document_db_database.py +++ b/pyatlan_v9/model/assets/document_db_database.py @@ -48,8 +48,9 @@ RelatedDbtSource, RelatedDbtTest, ) -from .document_db_related import RelatedDocumentDBCollection, RelatedDocumentDBDatabase +from .document_db_related import RelatedDocumentDBCollection from .fabric_related import RelatedFabricWorkspace +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -128,6 +129,7 @@ class DocumentDBDatabase(Asset): DBT_SEED_ASSETS: ClassVar[Any] = None DOCUMENT_DB_COLLECTIONS: ClassVar[Any] = None FABRIC_WORKSPACE: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -150,6 +152,8 @@ class DocumentDBDatabase(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DocumentDBDatabase" + document_db_database_collection_count: Union[int, None, UnsetType] = msgspec.field( default=UNSET, name="documentDBDatabaseCollectionCount" ) @@ -314,6 +318,11 @@ class DocumentDBDatabase(Asset): fabric_workspace: Union[RelatedFabricWorkspace, None, UnsetType] = UNSET """Workspace containing the database.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -392,66 +401,6 @@ class DocumentDBDatabase(Asset): def __post_init__(self) -> None: self.type_name = "DocumentDBDatabase" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DocumentDBDatabase instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DocumentDBDatabase validation failed: {errors}") - - def minimize(self) -> "DocumentDBDatabase": - """ - Return a minimal copy of this DocumentDBDatabase with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DocumentDBDatabase with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DocumentDBDatabase instance with only the minimum required fields. - """ - self.validate() - return DocumentDBDatabase(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDocumentDBDatabase": - """ - Create a :class:`RelatedDocumentDBDatabase` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDocumentDBDatabase reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDocumentDBDatabase(guid=self.guid) - return RelatedDocumentDBDatabase(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -711,6 +660,11 @@ class DocumentDBDatabaseRelationshipAttributes(AssetRelationshipAttributes): fabric_workspace: Union[RelatedFabricWorkspace, None, UnsetType] = UNSET """Workspace containing the database.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -830,6 +784,7 @@ class DocumentDBDatabaseNested(AssetNested): "dbt_seed_assets", "document_db_collections", "fabric_workspace", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -978,9 +933,6 @@ def _document_db_database_to_nested( is_incomplete=document_db_database.is_incomplete, provenance_type=document_db_database.provenance_type, home_id=document_db_database.home_id, - depth=document_db_database.depth, - immediate_upstream=document_db_database.immediate_upstream, - immediate_downstream=document_db_database.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1016,6 +968,7 @@ def _document_db_database_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1024,9 +977,6 @@ def _document_db_database_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_document_db_database_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1149,6 +1099,9 @@ def _document_db_database_from_nested_bytes( DocumentDBDatabase.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") DocumentDBDatabase.DOCUMENT_DB_COLLECTIONS = RelationField("documentDBCollections") DocumentDBDatabase.FABRIC_WORKSPACE = RelationField("fabricWorkspace") +DocumentDBDatabase.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DocumentDBDatabase.MEANINGS = RelationField("meanings") DocumentDBDatabase.MC_MONITORS = RelationField("mcMonitors") DocumentDBDatabase.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/domo.py b/pyatlan_v9/model/assets/domo.py index d7e6642b5..3ee68aebb 100644 --- a/pyatlan_v9/model/assets/domo.py +++ b/pyatlan_v9/model/assets/domo.py @@ -40,7 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .domo_related import RelatedDomo +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -80,6 +80,7 @@ class Domo(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +98,8 @@ class Domo(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Domo" + domo_id: Union[str, None, UnsetType] = UNSET """Id of the Domo dataset.""" @@ -152,6 +155,11 @@ class Domo(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -207,66 +215,6 @@ class Domo(Asset): def __post_init__(self) -> None: self.type_name = "Domo" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Domo instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Domo validation failed: {errors}") - - def minimize(self) -> "Domo": - """ - Return a minimal copy of this Domo with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Domo with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Domo instance with only the minimum required fields. - """ - self.validate() - return Domo(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDomo": - """ - Create a :class:`RelatedDomo` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDomo reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDomo(guid=self.guid) - return RelatedDomo(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -381,6 +329,11 @@ class DomoRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -463,6 +416,7 @@ class DomoNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -532,9 +486,6 @@ def _domo_to_nested(domo: Domo) -> DomoNested: is_incomplete=domo.is_incomplete, provenance_type=domo.provenance_type, home_id=domo.home_id, - depth=domo.depth, - immediate_upstream=domo.immediate_upstream, - immediate_downstream=domo.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -564,6 +515,7 @@ def _domo_from_nested(nested: DomoNested) -> Domo: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -572,9 +524,6 @@ def _domo_from_nested(nested: DomoNested) -> Domo: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_domo_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -614,6 +563,9 @@ def _domo_from_nested_bytes(data: bytes, serde: Serde) -> Domo: Domo.METRICS = RelationField("metrics") Domo.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Domo.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Domo.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Domo.MEANINGS = RelationField("meanings") Domo.MC_MONITORS = RelationField("mcMonitors") Domo.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/domo_card.py b/pyatlan_v9/model/assets/domo_card.py index ad30cc137..99b5030fc 100644 --- a/pyatlan_v9/model/assets/domo_card.py +++ b/pyatlan_v9/model/assets/domo_card.py @@ -41,7 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .domo_related import RelatedDomoCard, RelatedDomoDashboard, RelatedDomoDataset +from .domo_related import RelatedDomoDashboard, RelatedDomoDataset +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -86,6 +87,7 @@ class DomoCard(Asset): DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None DOMO_DASHBOARDS: ClassVar[Any] = None DOMO_DATASET: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,6 +105,8 @@ class DomoCard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DomoCard" + domo_card_type: Union[str, None, UnsetType] = UNSET """Type of the Domo Card.""" @@ -173,6 +177,11 @@ class DomoCard(Asset): domo_dataset: Union[RelatedDomoDataset, None, UnsetType] = UNSET """Domo Dataset that contains this Domo Card.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -234,72 +243,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DomoCard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.domo_dataset is UNSET: - errors.append("domo_dataset is required for creation") - if errors: - raise ValueError(f"DomoCard validation failed: {errors}") - - def minimize(self) -> "DomoCard": - """ - Return a minimal copy of this DomoCard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DomoCard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DomoCard instance with only the minimum required fields. - """ - self.validate() - return DomoCard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDomoCard": - """ - Create a :class:`RelatedDomoCard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDomoCard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDomoCard(guid=self.guid) - return RelatedDomoCard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -429,6 +372,11 @@ class DomoCardRelationshipAttributes(AssetRelationshipAttributes): domo_dataset: Union[RelatedDomoDataset, None, UnsetType] = UNSET """Domo Dataset that contains this Domo Card.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -517,6 +465,7 @@ class DomoCardNested(AssetNested): "dq_reference_dataset_rules", "domo_dashboards", "domo_dataset", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -592,9 +541,6 @@ def _domo_card_to_nested(domo_card: DomoCard) -> DomoCardNested: is_incomplete=domo_card.is_incomplete, provenance_type=domo_card.provenance_type, home_id=domo_card.home_id, - depth=domo_card.depth, - immediate_upstream=domo_card.immediate_upstream, - immediate_downstream=domo_card.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -626,6 +572,7 @@ def _domo_card_from_nested(nested: DomoCardNested) -> DomoCard: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -634,9 +581,6 @@ def _domo_card_from_nested(nested: DomoCardNested) -> DomoCard: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_domo_card_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -687,6 +631,9 @@ def _domo_card_from_nested_bytes(data: bytes, serde: Serde) -> DomoCard: DomoCard.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") DomoCard.DOMO_DASHBOARDS = RelationField("domoDashboards") DomoCard.DOMO_DATASET = RelationField("domoDataset") +DomoCard.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DomoCard.MEANINGS = RelationField("meanings") DomoCard.MC_MONITORS = RelationField("mcMonitors") DomoCard.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/domo_dashboard.py b/pyatlan_v9/model/assets/domo_dashboard.py index 403bb4c1c..ecc9cd9e6 100644 --- a/pyatlan_v9/model/assets/domo_dashboard.py +++ b/pyatlan_v9/model/assets/domo_dashboard.py @@ -42,6 +42,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .domo_related import RelatedDomoCard, RelatedDomoDashboard +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +86,7 @@ class DomoDashboard(Asset): DOMO_CARDS: ClassVar[Any] = None DOMO_DASHBOARD_CHILDREN: ClassVar[Any] = None DOMO_DASHBOARD_PARENT: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +104,8 @@ class DomoDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DomoDashboard" + domo_dashboard_card_count: Union[int, None, UnsetType] = UNSET """Number of cards linked to this dashboard.""" @@ -169,6 +173,11 @@ class DomoDashboard(Asset): domo_dashboard_parent: Union[RelatedDomoDashboard, None, UnsetType] = UNSET """Parent Domo Dashboard that contains this child Domo Dashboard.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -230,70 +239,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DomoDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"DomoDashboard validation failed: {errors}") - - def minimize(self) -> "DomoDashboard": - """ - Return a minimal copy of this DomoDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DomoDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DomoDashboard instance with only the minimum required fields. - """ - self.validate() - return DomoDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDomoDashboard": - """ - Create a :class:`RelatedDomoDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDomoDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDomoDashboard(guid=self.guid) - return RelatedDomoDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -420,6 +365,11 @@ class DomoDashboardRelationshipAttributes(AssetRelationshipAttributes): domo_dashboard_parent: Union[RelatedDomoDashboard, None, UnsetType] = UNSET """Parent Domo Dashboard that contains this child Domo Dashboard.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -511,6 +461,7 @@ class DomoDashboardNested(AssetNested): "domo_cards", "domo_dashboard_children", "domo_dashboard_parent", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -584,9 +535,6 @@ def _domo_dashboard_to_nested(domo_dashboard: DomoDashboard) -> DomoDashboardNes is_incomplete=domo_dashboard.is_incomplete, provenance_type=domo_dashboard.provenance_type, home_id=domo_dashboard.home_id, - depth=domo_dashboard.depth, - immediate_upstream=domo_dashboard.immediate_upstream, - immediate_downstream=domo_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -620,6 +568,7 @@ def _domo_dashboard_from_nested(nested: DomoDashboardNested) -> DomoDashboard: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -628,9 +577,6 @@ def _domo_dashboard_from_nested(nested: DomoDashboardNested) -> DomoDashboard: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_domo_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -686,6 +632,9 @@ def _domo_dashboard_from_nested_bytes(data: bytes, serde: Serde) -> DomoDashboar DomoDashboard.DOMO_CARDS = RelationField("domoCards") DomoDashboard.DOMO_DASHBOARD_CHILDREN = RelationField("domoDashboardChildren") DomoDashboard.DOMO_DASHBOARD_PARENT = RelationField("domoDashboardParent") +DomoDashboard.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DomoDashboard.MEANINGS = RelationField("meanings") DomoDashboard.MC_MONITORS = RelationField("mcMonitors") DomoDashboard.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/domo_dataset.py b/pyatlan_v9/model/assets/domo_dataset.py index 930beb5f0..5e36c531a 100644 --- a/pyatlan_v9/model/assets/domo_dataset.py +++ b/pyatlan_v9/model/assets/domo_dataset.py @@ -40,7 +40,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .domo_related import RelatedDomoCard, RelatedDomoDataset, RelatedDomoDatasetColumn +from .domo_related import RelatedDomoCard, RelatedDomoDatasetColumn +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -87,6 +88,7 @@ class DomoDataset(Asset): DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None DOMO_CARDS: ClassVar[Any] = None DOMO_DATASET_COLUMNS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -104,6 +106,8 @@ class DomoDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DomoDataset" + domo_dataset_row_count: Union[int, None, UnsetType] = UNSET """Number of rows in the Domo dataset.""" @@ -180,6 +184,11 @@ class DomoDataset(Asset): domo_dataset_columns: Union[List[RelatedDomoDatasetColumn], None, UnsetType] = UNSET """Domo Dataset Columns that are contained by this Domo Dataset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -235,66 +244,6 @@ class DomoDataset(Asset): def __post_init__(self) -> None: self.type_name = "DomoDataset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DomoDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DomoDataset validation failed: {errors}") - - def minimize(self) -> "DomoDataset": - """ - Return a minimal copy of this DomoDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DomoDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DomoDataset instance with only the minimum required fields. - """ - self.validate() - return DomoDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDomoDataset": - """ - Create a :class:`RelatedDomoDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDomoDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDomoDataset(guid=self.guid) - return RelatedDomoDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -430,6 +379,11 @@ class DomoDatasetRelationshipAttributes(AssetRelationshipAttributes): domo_dataset_columns: Union[List[RelatedDomoDatasetColumn], None, UnsetType] = UNSET """Domo Dataset Columns that are contained by this Domo Dataset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -518,6 +472,7 @@ class DomoDatasetNested(AssetNested): "dq_reference_dataset_rules", "domo_cards", "domo_dataset_columns", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -599,9 +554,6 @@ def _domo_dataset_to_nested(domo_dataset: DomoDataset) -> DomoDatasetNested: is_incomplete=domo_dataset.is_incomplete, provenance_type=domo_dataset.provenance_type, home_id=domo_dataset.home_id, - depth=domo_dataset.depth, - immediate_upstream=domo_dataset.immediate_upstream, - immediate_downstream=domo_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -633,6 +585,7 @@ def _domo_dataset_from_nested(nested: DomoDatasetNested) -> DomoDataset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -641,9 +594,6 @@ def _domo_dataset_from_nested(nested: DomoDatasetNested) -> DomoDataset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_domo_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -709,6 +659,9 @@ def _domo_dataset_from_nested_bytes(data: bytes, serde: Serde) -> DomoDataset: DomoDataset.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") DomoDataset.DOMO_CARDS = RelationField("domoCards") DomoDataset.DOMO_DATASET_COLUMNS = RelationField("domoDatasetColumns") +DomoDataset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DomoDataset.MEANINGS = RelationField("meanings") DomoDataset.MC_MONITORS = RelationField("mcMonitors") DomoDataset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/domo_dataset_column.py b/pyatlan_v9/model/assets/domo_dataset_column.py index bf39306b2..86cc30d2b 100644 --- a/pyatlan_v9/model/assets/domo_dataset_column.py +++ b/pyatlan_v9/model/assets/domo_dataset_column.py @@ -41,7 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .domo_related import RelatedDomoDataset, RelatedDomoDatasetColumn +from .domo_related import RelatedDomoDataset +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -86,6 +87,7 @@ class DomoDatasetColumn(Asset): DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None DOMO_DATASET: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,6 +105,8 @@ class DomoDatasetColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DomoDatasetColumn" + domo_dataset_column_type: Union[str, None, UnsetType] = UNSET """Type of Domo Dataset Column.""" @@ -173,6 +177,11 @@ class DomoDatasetColumn(Asset): domo_dataset: Union[RelatedDomoDataset, None, UnsetType] = UNSET """Domo Dataset that contains this Domo Dataset Column.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -234,74 +243,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DomoDatasetColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.domo_dataset is UNSET: - errors.append("domo_dataset is required for creation") - if self.domo_dataset_qualified_name is UNSET: - errors.append("domo_dataset_qualified_name is required for creation") - if errors: - raise ValueError(f"DomoDatasetColumn validation failed: {errors}") - - def minimize(self) -> "DomoDatasetColumn": - """ - Return a minimal copy of this DomoDatasetColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DomoDatasetColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DomoDatasetColumn instance with only the minimum required fields. - """ - self.validate() - return DomoDatasetColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDomoDatasetColumn": - """ - Create a :class:`RelatedDomoDatasetColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDomoDatasetColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDomoDatasetColumn(guid=self.guid) - return RelatedDomoDatasetColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -433,6 +374,11 @@ class DomoDatasetColumnRelationshipAttributes(AssetRelationshipAttributes): domo_dataset: Union[RelatedDomoDataset, None, UnsetType] = UNSET """Domo Dataset that contains this Domo Dataset Column.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -522,6 +468,7 @@ class DomoDatasetColumnNested(AssetNested): "dq_base_dataset_rules", "dq_reference_dataset_rules", "domo_dataset", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -607,9 +554,6 @@ def _domo_dataset_column_to_nested( is_incomplete=domo_dataset_column.is_incomplete, provenance_type=domo_dataset_column.provenance_type, home_id=domo_dataset_column.home_id, - depth=domo_dataset_column.depth, - immediate_upstream=domo_dataset_column.immediate_upstream, - immediate_downstream=domo_dataset_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -645,6 +589,7 @@ def _domo_dataset_column_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -653,9 +598,6 @@ def _domo_dataset_column_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_domo_dataset_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -722,6 +664,9 @@ def _domo_dataset_column_from_nested_bytes( DomoDatasetColumn.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DomoDatasetColumn.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") DomoDatasetColumn.DOMO_DATASET = RelationField("domoDataset") +DomoDatasetColumn.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DomoDatasetColumn.MEANINGS = RelationField("meanings") DomoDatasetColumn.MC_MONITORS = RelationField("mcMonitors") DomoDatasetColumn.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dremio.py b/pyatlan_v9/model/assets/dremio.py index a263b5e12..d2d5fe16e 100644 --- a/pyatlan_v9/model/assets/dremio.py +++ b/pyatlan_v9/model/assets/dremio.py @@ -47,7 +47,7 @@ RelatedDbtSource, RelatedDbtTest, ) -from .dremio_related import RelatedDremio +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -128,6 +128,7 @@ class Dremio(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -149,6 +150,8 @@ class Dremio(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Dremio" + dremio_id: Union[str, None, UnsetType] = UNSET """Source ID of this asset in Dremio.""" @@ -316,6 +319,11 @@ class Dremio(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -391,66 +399,6 @@ class Dremio(Asset): def __post_init__(self) -> None: self.type_name = "Dremio" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Dremio instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Dremio validation failed: {errors}") - - def minimize(self) -> "Dremio": - """ - Return a minimal copy of this Dremio with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Dremio with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Dremio instance with only the minimum required fields. - """ - self.validate() - return Dremio(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDremio": - """ - Create a :class:`RelatedDremio` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDremio reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDremio(guid=self.guid) - return RelatedDremio(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -677,6 +625,11 @@ class DremioRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -789,6 +742,7 @@ class DremioNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -938,9 +892,6 @@ def _dremio_to_nested(dremio: Dremio) -> DremioNested: is_incomplete=dremio.is_incomplete, provenance_type=dremio.provenance_type, home_id=dremio.home_id, - depth=dremio.depth, - immediate_upstream=dremio.immediate_upstream, - immediate_downstream=dremio.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -970,6 +921,7 @@ def _dremio_from_nested(nested: DremioNested) -> Dremio: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -978,9 +930,6 @@ def _dremio_from_nested(nested: DremioNested) -> Dremio: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dremio_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1092,6 +1041,9 @@ def _dremio_from_nested_bytes(data: bytes, serde: Serde) -> Dremio: Dremio.DBT_SOURCES = RelationField("dbtSources") Dremio.SQL_DBT_SOURCES = RelationField("sqlDBTSources") Dremio.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +Dremio.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Dremio.MEANINGS = RelationField("meanings") Dremio.MC_MONITORS = RelationField("mcMonitors") Dremio.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dremio_column.py b/pyatlan_v9/model/assets/dremio_column.py index 0941720b4..4d67a0748 100644 --- a/pyatlan_v9/model/assets/dremio_column.py +++ b/pyatlan_v9/model/assets/dremio_column.py @@ -50,7 +50,7 @@ RelatedDbtSource, RelatedDbtTest, ) -from .dremio_related import RelatedDremioColumn +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .mongo_db_related import RelatedMongoDBCollection @@ -155,6 +155,7 @@ class DremioColumn(Asset): PARENT_COLUMN_NAME: ClassVar[Any] = None COLUMN_DISTINCT_VALUES_COUNT: ClassVar[Any] = None COLUMN_DISTINCT_VALUES_COUNT_LONG: ClassVar[Any] = None + COLUMN_DISTINCT_VALUES_PERCENTAGE: ClassVar[Any] = None COLUMN_HISTOGRAM: ClassVar[Any] = None COLUMN_MAX: ClassVar[Any] = None COLUMN_MIN: ClassVar[Any] = None @@ -226,6 +227,7 @@ class DremioColumn(Asset): DBT_MODEL_COLUMNS: ClassVar[Any] = None COLUMN_DBT_MODEL_COLUMNS: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MONGO_DB_COLLECTION: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -260,6 +262,8 @@ class DremioColumn(Asset): SQL_INSIGHT_FILTERS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DremioColumn" + dremio_id: Union[str, None, UnsetType] = UNSET """Source ID of this asset in Dremio.""" @@ -451,6 +455,9 @@ class DremioColumn(Asset): column_distinct_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" + column_distinct_values_percentage: Union[float, None, UnsetType] = UNSET + """Percentage of rows in a column that contain distinct values.""" + column_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """List of values in a histogram that represents the contents of this column.""" @@ -678,6 +685,11 @@ class DremioColumn(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -796,69 +808,6 @@ class DremioColumn(Asset): def __post_init__(self) -> None: self.type_name = "DremioColumn" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DremioColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.order is UNSET: - errors.append("order is required for creation") - if errors: - raise ValueError(f"DremioColumn validation failed: {errors}") - - def minimize(self) -> "DremioColumn": - """ - Return a minimal copy of this DremioColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DremioColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DremioColumn instance with only the minimum required fields. - """ - self.validate() - return DremioColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDremioColumn": - """ - Create a :class:`RelatedDremioColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDremioColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDremioColumn(guid=self.guid) - return RelatedDremioColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1105,6 +1054,9 @@ class DremioColumnAttributes(AssetAttributes): column_distinct_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" + column_distinct_values_percentage: Union[float, None, UnsetType] = UNSET + """Percentage of rows in a column that contain distinct values.""" + column_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """List of values in a histogram that represents the contents of this column.""" @@ -1336,6 +1288,11 @@ class DremioColumnRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1501,6 +1458,7 @@ class DremioColumnNested(AssetNested): "dbt_model_columns", "column_dbt_model_columns", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mongo_db_collection", "mc_monitors", @@ -1609,6 +1567,7 @@ def _populate_dremio_column_attrs( attrs.parent_column_name = obj.parent_column_name attrs.column_distinct_values_count = obj.column_distinct_values_count attrs.column_distinct_values_count_long = obj.column_distinct_values_count_long + attrs.column_distinct_values_percentage = obj.column_distinct_values_percentage attrs.column_histogram = obj.column_histogram attrs.column_max = obj.column_max attrs.column_min = obj.column_min @@ -1736,6 +1695,9 @@ def _extract_dremio_column_attrs(attrs: DremioColumnAttributes) -> dict: result["column_distinct_values_count_long"] = ( attrs.column_distinct_values_count_long ) + result["column_distinct_values_percentage"] = ( + attrs.column_distinct_values_percentage + ) result["column_histogram"] = attrs.column_histogram result["column_max"] = attrs.column_max result["column_min"] = attrs.column_min @@ -1821,9 +1783,6 @@ def _dremio_column_to_nested(dremio_column: DremioColumn) -> DremioColumnNested: is_incomplete=dremio_column.is_incomplete, provenance_type=dremio_column.provenance_type, home_id=dremio_column.home_id, - depth=dremio_column.depth, - immediate_upstream=dremio_column.immediate_upstream, - immediate_downstream=dremio_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1857,6 +1816,7 @@ def _dremio_column_from_nested(nested: DremioColumnNested) -> DremioColumn: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1865,9 +1825,6 @@ def _dremio_column_from_nested(nested: DremioColumnNested) -> DremioColumn: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dremio_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -2010,6 +1967,9 @@ def _dremio_column_from_nested_bytes(data: bytes, serde: Serde) -> DremioColumn: DremioColumn.COLUMN_DISTINCT_VALUES_COUNT_LONG = NumericField( "columnDistinctValuesCountLong", "columnDistinctValuesCountLong" ) +DremioColumn.COLUMN_DISTINCT_VALUES_PERCENTAGE = NumericField( + "columnDistinctValuesPercentage", "columnDistinctValuesPercentage" +) DremioColumn.COLUMN_HISTOGRAM = KeywordField("columnHistogram", "columnHistogram") DremioColumn.COLUMN_MAX = NumericField("columnMax", "columnMax") DremioColumn.COLUMN_MIN = NumericField("columnMin", "columnMin") @@ -2138,6 +2098,9 @@ def _dremio_column_from_nested_bytes(data: bytes, serde: Serde) -> DremioColumn: DremioColumn.DBT_MODEL_COLUMNS = RelationField("dbtModelColumns") DremioColumn.COLUMN_DBT_MODEL_COLUMNS = RelationField("columnDbtModelColumns") DremioColumn.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +DremioColumn.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DremioColumn.MEANINGS = RelationField("meanings") DremioColumn.MONGO_DB_COLLECTION = RelationField("mongoDBCollection") DremioColumn.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/dremio_folder.py b/pyatlan_v9/model/assets/dremio_folder.py index 09b5f1dd3..0bf433fdf 100644 --- a/pyatlan_v9/model/assets/dremio_folder.py +++ b/pyatlan_v9/model/assets/dremio_folder.py @@ -55,6 +55,7 @@ RelatedDremioSpace, RelatedDremioVirtualDataset, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -142,6 +143,7 @@ class DremioFolder(Asset): DREMIO_PARENT_FOLDER: ClassVar[Any] = None DREMIO_PHYSICAL_DATASETS: ClassVar[Any] = None DREMIO_VIRTUAL_DATASETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -163,6 +165,8 @@ class DremioFolder(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DremioFolder" + dremio_parent_asset_type: Union[str, None, UnsetType] = UNSET """Type of top level asset that contains this folder.""" @@ -355,6 +359,11 @@ class DremioFolder(Asset): ] = UNSET """Virtual datasets (views) contained within the Dremio Folder.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -436,76 +445,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DremioFolder instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dremio_source is UNSET: - errors.append("dremio_source is required for creation") - if self.dremio_source_name is UNSET: - errors.append("dremio_source_name is required for creation") - if self.dremio_source_qualified_name is UNSET: - errors.append("dremio_source_qualified_name is required for creation") - if errors: - raise ValueError(f"DremioFolder validation failed: {errors}") - - def minimize(self) -> "DremioFolder": - """ - Return a minimal copy of this DremioFolder with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DremioFolder with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DremioFolder instance with only the minimum required fields. - """ - self.validate() - return DremioFolder(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDremioFolder": - """ - Create a :class:`RelatedDremioFolder` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDremioFolder reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDremioFolder(guid=self.guid) - return RelatedDremioFolder(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -757,6 +696,11 @@ class DremioFolderRelationshipAttributes(AssetRelationshipAttributes): ] = UNSET """Virtual datasets (views) contained within the Dremio Folder.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -877,6 +821,7 @@ class DremioFolderNested(AssetNested): "dremio_parent_folder", "dremio_physical_datasets", "dremio_virtual_datasets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1030,9 +975,6 @@ def _dremio_folder_to_nested(dremio_folder: DremioFolder) -> DremioFolderNested: is_incomplete=dremio_folder.is_incomplete, provenance_type=dremio_folder.provenance_type, home_id=dremio_folder.home_id, - depth=dremio_folder.depth, - immediate_upstream=dremio_folder.immediate_upstream, - immediate_downstream=dremio_folder.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1066,6 +1008,7 @@ def _dremio_folder_from_nested(nested: DremioFolderNested) -> DremioFolder: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1074,9 +1017,6 @@ def _dremio_folder_from_nested(nested: DremioFolderNested) -> DremioFolder: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dremio_folder_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1205,6 +1145,9 @@ def _dremio_folder_from_nested_bytes(data: bytes, serde: Serde) -> DremioFolder: DremioFolder.DREMIO_PARENT_FOLDER = RelationField("dremioParentFolder") DremioFolder.DREMIO_PHYSICAL_DATASETS = RelationField("dremioPhysicalDatasets") DremioFolder.DREMIO_VIRTUAL_DATASETS = RelationField("dremioVirtualDatasets") +DremioFolder.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DremioFolder.MEANINGS = RelationField("meanings") DremioFolder.MC_MONITORS = RelationField("mcMonitors") DremioFolder.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dremio_physical_dataset.py b/pyatlan_v9/model/assets/dremio_physical_dataset.py index 70460cd79..b6a5eb97b 100644 --- a/pyatlan_v9/model/assets/dremio_physical_dataset.py +++ b/pyatlan_v9/model/assets/dremio_physical_dataset.py @@ -48,11 +48,8 @@ RelatedDbtSource, RelatedDbtTest, ) -from .dremio_related import ( - RelatedDremioFolder, - RelatedDremioPhysicalDataset, - RelatedDremioSource, -) +from .dremio_related import RelatedDremioFolder, RelatedDremioSource +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -169,6 +166,7 @@ class DremioPhysicalDataset(Asset): DBT_SEED_ASSETS: ClassVar[Any] = None DREMIO_SOURCE: ClassVar[Any] = None DREMIO_FOLDER: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -196,6 +194,8 @@ class DremioPhysicalDataset(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DremioPhysicalDataset" + dremio_id: Union[str, None, UnsetType] = UNSET """Source ID of this asset in Dremio.""" @@ -450,6 +450,11 @@ class DremioPhysicalDataset(Asset): dremio_folder: Union[RelatedDremioFolder, None, UnsetType] = UNSET """Dremio Folder that contains the physical datasets (tables).""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -549,76 +554,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DremioPhysicalDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dremio_source is UNSET: - errors.append("dremio_source is required for creation") - if self.dremio_source_name is UNSET: - errors.append("dremio_source_name is required for creation") - if self.dremio_source_qualified_name is UNSET: - errors.append("dremio_source_qualified_name is required for creation") - if errors: - raise ValueError(f"DremioPhysicalDataset validation failed: {errors}") - - def minimize(self) -> "DremioPhysicalDataset": - """ - Return a minimal copy of this DremioPhysicalDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DremioPhysicalDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DremioPhysicalDataset instance with only the minimum required fields. - """ - self.validate() - return DremioPhysicalDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDremioPhysicalDataset": - """ - Create a :class:`RelatedDremioPhysicalDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDremioPhysicalDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDremioPhysicalDataset(guid=self.guid) - return RelatedDremioPhysicalDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -934,6 +869,11 @@ class DremioPhysicalDatasetRelationshipAttributes(AssetRelationshipAttributes): dremio_folder: Union[RelatedDremioFolder, None, UnsetType] = UNSET """Dremio Folder that contains the physical datasets (tables).""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1068,6 +1008,7 @@ class DremioPhysicalDatasetNested(AssetNested): "dbt_seed_assets", "dremio_source", "dremio_folder", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1285,9 +1226,6 @@ def _dremio_physical_dataset_to_nested( is_incomplete=dremio_physical_dataset.is_incomplete, provenance_type=dremio_physical_dataset.provenance_type, home_id=dremio_physical_dataset.home_id, - depth=dremio_physical_dataset.depth, - immediate_upstream=dremio_physical_dataset.immediate_upstream, - immediate_downstream=dremio_physical_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1323,6 +1261,7 @@ def _dremio_physical_dataset_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1331,9 +1270,6 @@ def _dremio_physical_dataset_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dremio_physical_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1540,6 +1476,9 @@ def _dremio_physical_dataset_from_nested_bytes( DremioPhysicalDataset.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") DremioPhysicalDataset.DREMIO_SOURCE = RelationField("dremioSource") DremioPhysicalDataset.DREMIO_FOLDER = RelationField("dremioFolder") +DremioPhysicalDataset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DremioPhysicalDataset.MEANINGS = RelationField("meanings") DremioPhysicalDataset.MC_MONITORS = RelationField("mcMonitors") DremioPhysicalDataset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dremio_source.py b/pyatlan_v9/model/assets/dremio_source.py index 10a04fa48..4fc0d84c5 100644 --- a/pyatlan_v9/model/assets/dremio_source.py +++ b/pyatlan_v9/model/assets/dremio_source.py @@ -47,11 +47,8 @@ RelatedDbtSource, RelatedDbtTest, ) -from .dremio_related import ( - RelatedDremioFolder, - RelatedDremioPhysicalDataset, - RelatedDremioSource, -) +from .dremio_related import RelatedDremioFolder, RelatedDremioPhysicalDataset +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -140,6 +137,7 @@ class DremioSource(Asset): DBT_SEED_ASSETS: ClassVar[Any] = None DREMIO_FOLDERS: ClassVar[Any] = None DREMIO_PHYSICAL_DATASETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -161,6 +159,8 @@ class DremioSource(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DremioSource" + dremio_source_type: Union[str, None, UnsetType] = UNSET """Type of external source.""" @@ -354,6 +354,11 @@ class DremioSource(Asset): ] = UNSET """Physical datasets (tables) directly contained within the Dremio Source.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -429,66 +434,6 @@ class DremioSource(Asset): def __post_init__(self) -> None: self.type_name = "DremioSource" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DremioSource instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DremioSource validation failed: {errors}") - - def minimize(self) -> "DremioSource": - """ - Return a minimal copy of this DremioSource with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DremioSource with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DremioSource instance with only the minimum required fields. - """ - self.validate() - return DremioSource(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDremioSource": - """ - Create a :class:`RelatedDremioSource` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDremioSource reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDremioSource(guid=self.guid) - return RelatedDremioSource(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -741,6 +686,11 @@ class DremioSourceRelationshipAttributes(AssetRelationshipAttributes): ] = UNSET """Physical datasets (tables) directly contained within the Dremio Source.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -857,6 +807,7 @@ class DremioSourceNested(AssetNested): "dbt_seed_assets", "dremio_folders", "dremio_physical_datasets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1024,9 +975,6 @@ def _dremio_source_to_nested(dremio_source: DremioSource) -> DremioSourceNested: is_incomplete=dremio_source.is_incomplete, provenance_type=dremio_source.provenance_type, home_id=dremio_source.home_id, - depth=dremio_source.depth, - immediate_upstream=dremio_source.immediate_upstream, - immediate_downstream=dremio_source.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1060,6 +1008,7 @@ def _dremio_source_from_nested(nested: DremioSourceNested) -> DremioSource: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1068,9 +1017,6 @@ def _dremio_source_from_nested(nested: DremioSourceNested) -> DremioSource: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dremio_source_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1208,6 +1154,9 @@ def _dremio_source_from_nested_bytes(data: bytes, serde: Serde) -> DremioSource: DremioSource.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") DremioSource.DREMIO_FOLDERS = RelationField("dremioFolders") DremioSource.DREMIO_PHYSICAL_DATASETS = RelationField("dremioPhysicalDatasets") +DremioSource.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DremioSource.MEANINGS = RelationField("meanings") DremioSource.MC_MONITORS = RelationField("mcMonitors") DremioSource.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dremio_space.py b/pyatlan_v9/model/assets/dremio_space.py index f019ac665..e27e2cf76 100644 --- a/pyatlan_v9/model/assets/dremio_space.py +++ b/pyatlan_v9/model/assets/dremio_space.py @@ -47,11 +47,8 @@ RelatedDbtSource, RelatedDbtTest, ) -from .dremio_related import ( - RelatedDremioFolder, - RelatedDremioSpace, - RelatedDremioVirtualDataset, -) +from .dremio_related import RelatedDremioFolder, RelatedDremioVirtualDataset +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -134,6 +131,7 @@ class DremioSpace(Asset): DBT_SEED_ASSETS: ClassVar[Any] = None DREMIO_FOLDERS: ClassVar[Any] = None DREMIO_VIRTUAL_DATASETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -155,6 +153,8 @@ class DremioSpace(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DremioSpace" + dremio_id: Union[str, None, UnsetType] = UNSET """Source ID of this asset in Dremio.""" @@ -330,6 +330,11 @@ class DremioSpace(Asset): ] = UNSET """Virtual datasets (views) directly contained within the Dremio Space.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -405,66 +410,6 @@ class DremioSpace(Asset): def __post_init__(self) -> None: self.type_name = "DremioSpace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DremioSpace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DremioSpace validation failed: {errors}") - - def minimize(self) -> "DremioSpace": - """ - Return a minimal copy of this DremioSpace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DremioSpace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DremioSpace instance with only the minimum required fields. - """ - self.validate() - return DremioSpace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDremioSpace": - """ - Create a :class:`RelatedDremioSpace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDremioSpace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDremioSpace(guid=self.guid) - return RelatedDremioSpace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -699,6 +644,11 @@ class DremioSpaceRelationshipAttributes(AssetRelationshipAttributes): ] = UNSET """Virtual datasets (views) directly contained within the Dremio Space.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -813,6 +763,7 @@ class DremioSpaceNested(AssetNested): "dbt_seed_assets", "dremio_folders", "dremio_virtual_datasets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -964,9 +915,6 @@ def _dremio_space_to_nested(dremio_space: DremioSpace) -> DremioSpaceNested: is_incomplete=dremio_space.is_incomplete, provenance_type=dremio_space.provenance_type, home_id=dremio_space.home_id, - depth=dremio_space.depth, - immediate_upstream=dremio_space.immediate_upstream, - immediate_downstream=dremio_space.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -998,6 +946,7 @@ def _dremio_space_from_nested(nested: DremioSpaceNested) -> DremioSpace: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1006,9 +955,6 @@ def _dremio_space_from_nested(nested: DremioSpaceNested) -> DremioSpace: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dremio_space_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1128,6 +1074,9 @@ def _dremio_space_from_nested_bytes(data: bytes, serde: Serde) -> DremioSpace: DremioSpace.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") DremioSpace.DREMIO_FOLDERS = RelationField("dremioFolders") DremioSpace.DREMIO_VIRTUAL_DATASETS = RelationField("dremioVirtualDatasets") +DremioSpace.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DremioSpace.MEANINGS = RelationField("meanings") DremioSpace.MC_MONITORS = RelationField("mcMonitors") DremioSpace.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dremio_virtual_dataset.py b/pyatlan_v9/model/assets/dremio_virtual_dataset.py index 7e3279379..21cd2a3ab 100644 --- a/pyatlan_v9/model/assets/dremio_virtual_dataset.py +++ b/pyatlan_v9/model/assets/dremio_virtual_dataset.py @@ -48,11 +48,8 @@ RelatedDbtSource, RelatedDbtTest, ) -from .dremio_related import ( - RelatedDremioFolder, - RelatedDremioSpace, - RelatedDremioVirtualDataset, -) +from .dremio_related import RelatedDremioFolder, RelatedDremioSpace +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -144,6 +141,7 @@ class DremioVirtualDataset(Asset): DBT_SEED_ASSETS: ClassVar[Any] = None DREMIO_SPACE: ClassVar[Any] = None DREMIO_FOLDER: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -168,6 +166,8 @@ class DremioVirtualDataset(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DremioVirtualDataset" + dremio_id: Union[str, None, UnsetType] = UNSET """Source ID of this asset in Dremio.""" @@ -365,6 +365,11 @@ class DremioVirtualDataset(Asset): dremio_folder: Union[RelatedDremioFolder, None, UnsetType] = UNSET """Dremio Folder that contains the virtual datasets (views).""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -455,76 +460,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DremioVirtualDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dremio_space is UNSET: - errors.append("dremio_space is required for creation") - if self.dremio_space_name is UNSET: - errors.append("dremio_space_name is required for creation") - if self.dremio_space_qualified_name is UNSET: - errors.append("dremio_space_qualified_name is required for creation") - if errors: - raise ValueError(f"DremioVirtualDataset validation failed: {errors}") - - def minimize(self) -> "DremioVirtualDataset": - """ - Return a minimal copy of this DremioVirtualDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DremioVirtualDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DremioVirtualDataset instance with only the minimum required fields. - """ - self.validate() - return DremioVirtualDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDremioVirtualDataset": - """ - Create a :class:`RelatedDremioVirtualDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDremioVirtualDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDremioVirtualDataset(guid=self.guid) - return RelatedDremioVirtualDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -783,6 +718,11 @@ class DremioVirtualDatasetRelationshipAttributes(AssetRelationshipAttributes): dremio_folder: Union[RelatedDremioFolder, None, UnsetType] = UNSET """Dremio Folder that contains the virtual datasets (views).""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -908,6 +848,7 @@ class DremioVirtualDatasetNested(AssetNested): "dbt_seed_assets", "dremio_space", "dremio_folder", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1084,9 +1025,6 @@ def _dremio_virtual_dataset_to_nested( is_incomplete=dremio_virtual_dataset.is_incomplete, provenance_type=dremio_virtual_dataset.provenance_type, home_id=dremio_virtual_dataset.home_id, - depth=dremio_virtual_dataset.depth, - immediate_upstream=dremio_virtual_dataset.immediate_upstream, - immediate_downstream=dremio_virtual_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1122,6 +1060,7 @@ def _dremio_virtual_dataset_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1130,9 +1069,6 @@ def _dremio_virtual_dataset_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dremio_virtual_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1280,6 +1216,9 @@ def _dremio_virtual_dataset_from_nested_bytes( DremioVirtualDataset.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") DremioVirtualDataset.DREMIO_SPACE = RelationField("dremioSpace") DremioVirtualDataset.DREMIO_FOLDER = RelationField("dremioFolder") +DremioVirtualDataset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DremioVirtualDataset.MEANINGS = RelationField("meanings") DremioVirtualDataset.MC_MONITORS = RelationField("mcMonitors") DremioVirtualDataset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dynamo_db.py b/pyatlan_v9/model/assets/dynamo_db.py index 4c3f057d2..5f5252633 100644 --- a/pyatlan_v9/model/assets/dynamo_db.py +++ b/pyatlan_v9/model/assets/dynamo_db.py @@ -41,7 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .dynamo_db_related import RelatedDynamoDB +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +85,7 @@ class DynamoDB(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +103,8 @@ class DynamoDB(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DynamoDB" + dynamo_db_status: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="dynamoDBStatus" ) @@ -181,6 +184,11 @@ class DynamoDB(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -236,66 +244,6 @@ class DynamoDB(Asset): def __post_init__(self) -> None: self.type_name = "DynamoDB" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DynamoDB instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DynamoDB validation failed: {errors}") - - def minimize(self) -> "DynamoDB": - """ - Return a minimal copy of this DynamoDB with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DynamoDB with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DynamoDB instance with only the minimum required fields. - """ - self.validate() - return DynamoDB(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDynamoDB": - """ - Create a :class:`RelatedDynamoDB` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDynamoDB reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDynamoDB(guid=self.guid) - return RelatedDynamoDB(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -434,6 +382,11 @@ class DynamoDBRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -520,6 +473,7 @@ class DynamoDBNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -597,9 +551,6 @@ def _dynamo_db_to_nested(dynamo_db: DynamoDB) -> DynamoDBNested: is_incomplete=dynamo_db.is_incomplete, provenance_type=dynamo_db.provenance_type, home_id=dynamo_db.home_id, - depth=dynamo_db.depth, - immediate_upstream=dynamo_db.immediate_upstream, - immediate_downstream=dynamo_db.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -631,6 +582,7 @@ def _dynamo_db_from_nested(nested: DynamoDBNested) -> DynamoDB: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -639,9 +591,6 @@ def _dynamo_db_from_nested(nested: DynamoDBNested) -> DynamoDB: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dynamo_db_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -697,6 +646,9 @@ def _dynamo_db_from_nested_bytes(data: bytes, serde: Serde) -> DynamoDB: DynamoDB.METRICS = RelationField("metrics") DynamoDB.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") DynamoDB.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +DynamoDB.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DynamoDB.MEANINGS = RelationField("meanings") DynamoDB.MC_MONITORS = RelationField("mcMonitors") DynamoDB.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dynamo_db_attribute.py b/pyatlan_v9/model/assets/dynamo_db_attribute.py index c61f4ef4a..49ba0d606 100644 --- a/pyatlan_v9/model/assets/dynamo_db_attribute.py +++ b/pyatlan_v9/model/assets/dynamo_db_attribute.py @@ -51,7 +51,8 @@ RelatedDbtSource, RelatedDbtTest, ) -from .dynamo_db_related import RelatedDynamoDBAttribute, RelatedDynamoDBTable +from .dynamo_db_related import RelatedDynamoDBTable +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .mongo_db_related import RelatedMongoDBCollection @@ -130,6 +131,7 @@ class DynamoDBAttribute(Asset): PARENT_COLUMN_NAME: ClassVar[Any] = None COLUMN_DISTINCT_VALUES_COUNT: ClassVar[Any] = None COLUMN_DISTINCT_VALUES_COUNT_LONG: ClassVar[Any] = None + COLUMN_DISTINCT_VALUES_PERCENTAGE: ClassVar[Any] = None COLUMN_HISTOGRAM: ClassVar[Any] = None COLUMN_MAX: ClassVar[Any] = None COLUMN_MIN: ClassVar[Any] = None @@ -226,6 +228,7 @@ class DynamoDBAttribute(Asset): COLUMN_DBT_MODEL_COLUMNS: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None DYNAMO_DB_TABLE: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MONGO_DB_COLLECTION: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -260,6 +263,8 @@ class DynamoDBAttribute(Asset): SQL_INSIGHT_FILTERS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "DynamoDBAttribute" + dynamo_db_status: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="dynamoDBStatus" ) @@ -383,6 +388,9 @@ class DynamoDBAttribute(Asset): column_distinct_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" + column_distinct_values_percentage: Union[float, None, UnsetType] = UNSET + """Percentage of rows in a column that contain distinct values.""" + column_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """List of values in a histogram that represents the contents of this column.""" @@ -689,6 +697,11 @@ class DynamoDBAttribute(Asset): ) """DynamoDB table in which this attribute exists.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -813,78 +826,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DynamoDBAttribute instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.table is UNSET: - errors.append("table is required for creation") - if self.table_name is UNSET: - errors.append("table_name is required for creation") - if self.table_qualified_name is UNSET: - errors.append("table_qualified_name is required for creation") - if self.order is UNSET: - errors.append("order is required for creation") - if errors: - raise ValueError(f"DynamoDBAttribute validation failed: {errors}") - - def minimize(self) -> "DynamoDBAttribute": - """ - Return a minimal copy of this DynamoDBAttribute with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DynamoDBAttribute with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DynamoDBAttribute instance with only the minimum required fields. - """ - self.validate() - return DynamoDBAttribute(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDynamoDBAttribute": - """ - Create a :class:`RelatedDynamoDBAttribute` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDynamoDBAttribute reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDynamoDBAttribute(guid=self.guid) - return RelatedDynamoDBAttribute(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1065,6 +1006,9 @@ class DynamoDBAttributeAttributes(AssetAttributes): column_distinct_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" + column_distinct_values_percentage: Union[float, None, UnsetType] = UNSET + """Percentage of rows in a column that contain distinct values.""" + column_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """List of values in a histogram that represents the contents of this column.""" @@ -1375,6 +1319,11 @@ class DynamoDBAttributeRelationshipAttributes(AssetRelationshipAttributes): ) """DynamoDB table in which this attribute exists.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1541,6 +1490,7 @@ class DynamoDBAttributeNested(AssetNested): "column_dbt_model_columns", "dbt_seed_assets", "dynamo_db_table", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mongo_db_collection", "mc_monitors", @@ -1619,6 +1569,7 @@ def _populate_dynamo_db_attribute_attrs( attrs.parent_column_name = obj.parent_column_name attrs.column_distinct_values_count = obj.column_distinct_values_count attrs.column_distinct_values_count_long = obj.column_distinct_values_count_long + attrs.column_distinct_values_percentage = obj.column_distinct_values_percentage attrs.column_histogram = obj.column_histogram attrs.column_max = obj.column_max attrs.column_min = obj.column_min @@ -1736,6 +1687,9 @@ def _extract_dynamo_db_attribute_attrs(attrs: DynamoDBAttributeAttributes) -> di result["column_distinct_values_count_long"] = ( attrs.column_distinct_values_count_long ) + result["column_distinct_values_percentage"] = ( + attrs.column_distinct_values_percentage + ) result["column_histogram"] = attrs.column_histogram result["column_max"] = attrs.column_max result["column_min"] = attrs.column_min @@ -1859,9 +1813,6 @@ def _dynamo_db_attribute_to_nested( is_incomplete=dynamo_db_attribute.is_incomplete, provenance_type=dynamo_db_attribute.provenance_type, home_id=dynamo_db_attribute.home_id, - depth=dynamo_db_attribute.depth, - immediate_upstream=dynamo_db_attribute.immediate_upstream, - immediate_downstream=dynamo_db_attribute.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1897,6 +1848,7 @@ def _dynamo_db_attribute_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1905,9 +1857,6 @@ def _dynamo_db_attribute_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dynamo_db_attribute_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -2007,6 +1956,9 @@ def _dynamo_db_attribute_from_nested_bytes( DynamoDBAttribute.COLUMN_DISTINCT_VALUES_COUNT_LONG = NumericField( "columnDistinctValuesCountLong", "columnDistinctValuesCountLong" ) +DynamoDBAttribute.COLUMN_DISTINCT_VALUES_PERCENTAGE = NumericField( + "columnDistinctValuesPercentage", "columnDistinctValuesPercentage" +) DynamoDBAttribute.COLUMN_HISTOGRAM = KeywordField("columnHistogram", "columnHistogram") DynamoDBAttribute.COLUMN_MAX = NumericField("columnMax", "columnMax") DynamoDBAttribute.COLUMN_MIN = NumericField("columnMin", "columnMin") @@ -2193,6 +2145,9 @@ def _dynamo_db_attribute_from_nested_bytes( DynamoDBAttribute.COLUMN_DBT_MODEL_COLUMNS = RelationField("columnDbtModelColumns") DynamoDBAttribute.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") DynamoDBAttribute.DYNAMO_DB_TABLE = RelationField("dynamoDBTable") +DynamoDBAttribute.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DynamoDBAttribute.MEANINGS = RelationField("meanings") DynamoDBAttribute.MONGO_DB_COLLECTION = RelationField("mongoDBCollection") DynamoDBAttribute.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/dynamo_db_related.py b/pyatlan_v9/model/assets/dynamo_db_related.py index 1fffc76a1..cd953bb54 100644 --- a/pyatlan_v9/model/assets/dynamo_db_related.py +++ b/pyatlan_v9/model/assets/dynamo_db_related.py @@ -94,13 +94,13 @@ class RelatedDynamoDBTable(RelatedDynamoDB): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "DynamoDBTable" so it serializes correctly - dynamo_db_table_gsi_count: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="dynamoDBTableGSICount" + dynamo_dbgsi_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBGSICount" ) """Represents the number of global secondary indexes on the table.""" - dynamo_db_table_lsi_count: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="dynamoDBTableLSICount" + dynamo_dblsi_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBLSICount" ) """Represents the number of local secondary indexes on the table.""" @@ -119,8 +119,8 @@ class RelatedDynamoDBSecondaryIndex(RelatedDynamoDB): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "DynamoDBSecondaryIndex" so it serializes correctly - dynamo_db_secondary_index_projection_type: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="dynamoDBSecondaryIndexProjectionType") + dynamo_db_projection_type: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBProjectionType" ) """Specifies attributes that are projected from the DynamoDB table into the index.""" diff --git a/pyatlan_v9/model/assets/dynamo_db_secondary_index.py b/pyatlan_v9/model/assets/dynamo_db_secondary_index.py index c9949acee..be65938e5 100644 --- a/pyatlan_v9/model/assets/dynamo_db_secondary_index.py +++ b/pyatlan_v9/model/assets/dynamo_db_secondary_index.py @@ -47,7 +47,7 @@ RelatedDbtSource, RelatedDbtTest, ) -from .dynamo_db_related import RelatedDynamoDBSecondaryIndex +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -82,7 +82,7 @@ class DynamoDBSecondaryIndex(Asset): Represents a DynamoDB secondary index asset in Atlan. """ - DYNAMO_DB_SECONDARY_INDEX_PROJECTION_TYPE: ClassVar[Any] = None + DYNAMO_DB_PROJECTION_TYPE: ClassVar[Any] = None DYNAMO_DB_STATUS: ClassVar[Any] = None DYNAMO_DB_PARTITION_KEY: ClassVar[Any] = None DYNAMO_DB_SORT_KEY: ClassVar[Any] = None @@ -161,6 +161,7 @@ class DynamoDBSecondaryIndex(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -188,8 +189,10 @@ class DynamoDBSecondaryIndex(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None - dynamo_db_secondary_index_projection_type: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="dynamoDBSecondaryIndexProjectionType") + type_name: Union[str, UnsetType] = "DynamoDBSecondaryIndex" + + dynamo_db_projection_type: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBProjectionType" ) """Specifies attributes that are projected from the DynamoDB table into the index.""" @@ -447,6 +450,11 @@ class DynamoDBSecondaryIndex(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -540,68 +548,6 @@ class DynamoDBSecondaryIndex(Asset): def __post_init__(self) -> None: self.type_name = "DynamoDBSecondaryIndex" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DynamoDBSecondaryIndex instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DynamoDBSecondaryIndex validation failed: {errors}") - - def minimize(self) -> "DynamoDBSecondaryIndex": - """ - Return a minimal copy of this DynamoDBSecondaryIndex with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DynamoDBSecondaryIndex with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DynamoDBSecondaryIndex instance with only the minimum required fields. - """ - self.validate() - return DynamoDBSecondaryIndex( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedDynamoDBSecondaryIndex": - """ - Create a :class:`RelatedDynamoDBSecondaryIndex` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDynamoDBSecondaryIndex reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDynamoDBSecondaryIndex(guid=self.guid) - return RelatedDynamoDBSecondaryIndex(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -659,8 +605,8 @@ def from_json( class DynamoDBSecondaryIndexAttributes(AssetAttributes): """DynamoDBSecondaryIndex-specific attributes for nested API format.""" - dynamo_db_secondary_index_projection_type: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="dynamoDBSecondaryIndexProjectionType") + dynamo_db_projection_type: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBProjectionType" ) """Specifies attributes that are projected from the DynamoDB table into the index.""" @@ -922,6 +868,11 @@ class DynamoDBSecondaryIndexRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1054,6 +1005,7 @@ class DynamoDBSecondaryIndexNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1088,9 +1040,7 @@ def _populate_dynamo_db_secondary_index_attrs( ) -> None: """Populate DynamoDBSecondaryIndex-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.dynamo_db_secondary_index_projection_type = ( - obj.dynamo_db_secondary_index_projection_type - ) + attrs.dynamo_db_projection_type = obj.dynamo_db_projection_type attrs.dynamo_db_status = obj.dynamo_db_status attrs.dynamo_db_partition_key = obj.dynamo_db_partition_key attrs.dynamo_db_sort_key = obj.dynamo_db_sort_key @@ -1160,9 +1110,7 @@ def _extract_dynamo_db_secondary_index_attrs( ) -> dict: """Extract all DynamoDBSecondaryIndex attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["dynamo_db_secondary_index_projection_type"] = ( - attrs.dynamo_db_secondary_index_projection_type - ) + result["dynamo_db_projection_type"] = attrs.dynamo_db_projection_type result["dynamo_db_status"] = attrs.dynamo_db_status result["dynamo_db_partition_key"] = attrs.dynamo_db_partition_key result["dynamo_db_sort_key"] = attrs.dynamo_db_sort_key @@ -1271,9 +1219,6 @@ def _dynamo_db_secondary_index_to_nested( is_incomplete=dynamo_db_secondary_index.is_incomplete, provenance_type=dynamo_db_secondary_index.provenance_type, home_id=dynamo_db_secondary_index.home_id, - depth=dynamo_db_secondary_index.depth, - immediate_upstream=dynamo_db_secondary_index.immediate_upstream, - immediate_downstream=dynamo_db_secondary_index.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1309,6 +1254,7 @@ def _dynamo_db_secondary_index_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1317,9 +1263,6 @@ def _dynamo_db_secondary_index_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dynamo_db_secondary_index_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1351,8 +1294,8 @@ def _dynamo_db_secondary_index_from_nested_bytes( RelationField, ) -DynamoDBSecondaryIndex.DYNAMO_DB_SECONDARY_INDEX_PROJECTION_TYPE = KeywordField( - "dynamoDBSecondaryIndexProjectionType", "dynamoDBSecondaryIndexProjectionType" +DynamoDBSecondaryIndex.DYNAMO_DB_PROJECTION_TYPE = KeywordField( + "dynamoDBProjectionType", "dynamoDBProjectionType" ) DynamoDBSecondaryIndex.DYNAMO_DB_STATUS = KeywordField( "dynamoDBStatus", "dynamoDBStatus" @@ -1527,6 +1470,9 @@ def _dynamo_db_secondary_index_from_nested_bytes( DynamoDBSecondaryIndex.DBT_SOURCES = RelationField("dbtSources") DynamoDBSecondaryIndex.SQL_DBT_SOURCES = RelationField("sqlDBTSources") DynamoDBSecondaryIndex.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +DynamoDBSecondaryIndex.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DynamoDBSecondaryIndex.MEANINGS = RelationField("meanings") DynamoDBSecondaryIndex.MC_MONITORS = RelationField("mcMonitors") DynamoDBSecondaryIndex.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/dynamo_db_table.py b/pyatlan_v9/model/assets/dynamo_db_table.py index 6ee510b97..c1a66d66c 100644 --- a/pyatlan_v9/model/assets/dynamo_db_table.py +++ b/pyatlan_v9/model/assets/dynamo_db_table.py @@ -51,8 +51,8 @@ RelatedDynamoDBAttribute, RelatedDynamoDBGlobalSecondaryIndex, RelatedDynamoDBLocalSecondaryIndex, - RelatedDynamoDBTable, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -87,8 +87,8 @@ class DynamoDBTable(Asset): Represents a DynamoDB table asset in Atlan. """ - DYNAMO_DB_TABLE_GSI_COUNT: ClassVar[Any] = None - DYNAMO_DB_TABLE_LSI_COUNT: ClassVar[Any] = None + DYNAMO_DBGSI_COUNT: ClassVar[Any] = None + DYNAMO_DBLSI_COUNT: ClassVar[Any] = None DYNAMO_DB_STATUS: ClassVar[Any] = None DYNAMO_DB_PARTITION_KEY: ClassVar[Any] = None DYNAMO_DB_SORT_KEY: ClassVar[Any] = None @@ -170,6 +170,7 @@ class DynamoDBTable(Asset): DYNAMO_DB_GLOBAL_SECONDARY_INDEXES: ClassVar[Any] = None DYNAMO_DB_LOCAL_SECONDARY_INDEXES: ClassVar[Any] = None DYNAMO_DB_COLUMNS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -197,13 +198,15 @@ class DynamoDBTable(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None - dynamo_db_table_gsi_count: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="dynamoDBTableGSICount" + type_name: Union[str, UnsetType] = "DynamoDBTable" + + dynamo_dbgsi_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBGSICount" ) """Represents the number of global secondary indexes on the table.""" - dynamo_db_table_lsi_count: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="dynamoDBTableLSICount" + dynamo_dblsi_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBLSICount" ) """Represents the number of local secondary indexes on the table.""" @@ -476,6 +479,11 @@ class DynamoDBTable(Asset): ) """Columns (attributes) that exist within this DynamoDB table.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -569,66 +577,6 @@ class DynamoDBTable(Asset): def __post_init__(self) -> None: self.type_name = "DynamoDBTable" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this DynamoDBTable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"DynamoDBTable validation failed: {errors}") - - def minimize(self) -> "DynamoDBTable": - """ - Return a minimal copy of this DynamoDBTable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new DynamoDBTable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new DynamoDBTable instance with only the minimum required fields. - """ - self.validate() - return DynamoDBTable(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedDynamoDBTable": - """ - Create a :class:`RelatedDynamoDBTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedDynamoDBTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedDynamoDBTable(guid=self.guid) - return RelatedDynamoDBTable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -684,13 +632,13 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> DynamoDBTab class DynamoDBTableAttributes(AssetAttributes): """DynamoDBTable-specific attributes for nested API format.""" - dynamo_db_table_gsi_count: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="dynamoDBTableGSICount" + dynamo_dbgsi_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBGSICount" ) """Represents the number of global secondary indexes on the table.""" - dynamo_db_table_lsi_count: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="dynamoDBTableLSICount" + dynamo_dblsi_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="dynamoDBLSICount" ) """Represents the number of local secondary indexes on the table.""" @@ -967,6 +915,11 @@ class DynamoDBTableRelationshipAttributes(AssetRelationshipAttributes): ) """Columns (attributes) that exist within this DynamoDB table.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1102,6 +1055,7 @@ class DynamoDBTableNested(AssetNested): "dynamo_db_global_secondary_indexes", "dynamo_db_local_secondary_indexes", "dynamo_db_columns", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1136,8 +1090,8 @@ def _populate_dynamo_db_table_attrs( ) -> None: """Populate DynamoDBTable-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.dynamo_db_table_gsi_count = obj.dynamo_db_table_gsi_count - attrs.dynamo_db_table_lsi_count = obj.dynamo_db_table_lsi_count + attrs.dynamo_dbgsi_count = obj.dynamo_dbgsi_count + attrs.dynamo_dblsi_count = obj.dynamo_dblsi_count attrs.dynamo_db_status = obj.dynamo_db_status attrs.dynamo_db_partition_key = obj.dynamo_db_partition_key attrs.dynamo_db_sort_key = obj.dynamo_db_sort_key @@ -1205,8 +1159,8 @@ def _populate_dynamo_db_table_attrs( def _extract_dynamo_db_table_attrs(attrs: DynamoDBTableAttributes) -> dict: """Extract all DynamoDBTable attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["dynamo_db_table_gsi_count"] = attrs.dynamo_db_table_gsi_count - result["dynamo_db_table_lsi_count"] = attrs.dynamo_db_table_lsi_count + result["dynamo_dbgsi_count"] = attrs.dynamo_dbgsi_count + result["dynamo_dblsi_count"] = attrs.dynamo_dblsi_count result["dynamo_db_status"] = attrs.dynamo_db_status result["dynamo_db_partition_key"] = attrs.dynamo_db_partition_key result["dynamo_db_sort_key"] = attrs.dynamo_db_sort_key @@ -1313,9 +1267,6 @@ def _dynamo_db_table_to_nested(dynamo_db_table: DynamoDBTable) -> DynamoDBTableN is_incomplete=dynamo_db_table.is_incomplete, provenance_type=dynamo_db_table.provenance_type, home_id=dynamo_db_table.home_id, - depth=dynamo_db_table.depth, - immediate_upstream=dynamo_db_table.immediate_upstream, - immediate_downstream=dynamo_db_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1349,6 +1300,7 @@ def _dynamo_db_table_from_nested(nested: DynamoDBTableNested) -> DynamoDBTable: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1357,9 +1309,6 @@ def _dynamo_db_table_from_nested(nested: DynamoDBTableNested) -> DynamoDBTable: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_dynamo_db_table_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1389,12 +1338,8 @@ def _dynamo_db_table_from_nested_bytes(data: bytes, serde: Serde) -> DynamoDBTab RelationField, ) -DynamoDBTable.DYNAMO_DB_TABLE_GSI_COUNT = NumericField( - "dynamoDBTableGSICount", "dynamoDBTableGSICount" -) -DynamoDBTable.DYNAMO_DB_TABLE_LSI_COUNT = NumericField( - "dynamoDBTableLSICount", "dynamoDBTableLSICount" -) +DynamoDBTable.DYNAMO_DBGSI_COUNT = NumericField("dynamoDBGSICount", "dynamoDBGSICount") +DynamoDBTable.DYNAMO_DBLSI_COUNT = NumericField("dynamoDBLSICount", "dynamoDBLSICount") DynamoDBTable.DYNAMO_DB_STATUS = KeywordField("dynamoDBStatus", "dynamoDBStatus") DynamoDBTable.DYNAMO_DB_PARTITION_KEY = KeywordField( "dynamoDBPartitionKey", "dynamoDBPartitionKey" @@ -1543,6 +1488,9 @@ def _dynamo_db_table_from_nested_bytes(data: bytes, serde: Serde) -> DynamoDBTab "dynamoDBLocalSecondaryIndexes" ) DynamoDBTable.DYNAMO_DB_COLUMNS = RelationField("dynamoDBColumns") +DynamoDBTable.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) DynamoDBTable.MEANINGS = RelationField("meanings") DynamoDBTable.MC_MONITORS = RelationField("mcMonitors") DynamoDBTable.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/entity.py b/pyatlan_v9/model/assets/entity.py index d7c5ba58f..7427d75c4 100644 --- a/pyatlan_v9/model/assets/entity.py +++ b/pyatlan_v9/model/assets/entity.py @@ -18,8 +18,6 @@ import msgspec from msgspec import UNSET, UnsetType -from .related_entity import SaveSemantic - class AtlasClassification( msgspec.Struct, kw_only=True, omit_defaults=True, rename="camel" @@ -175,18 +173,3 @@ class Entity(msgspec.Struct, kw_only=True, omit_defaults=True, rename="camel"): home_id: Union[str, UnsetType] = UNSET """Home identifier for distributed Atlas systems.""" - - # Lineage-specific fields (only populated in lineage API responses) - depth: Union[int, None, UnsetType] = UNSET - """Depth of this asset within lineage. Only available in assets retrieved via lineage.""" - - immediate_upstream: Union[List[Any], None, UnsetType] = UNSET - """Assets immediately upstream of this asset within lineage.""" - - immediate_downstream: Union[List[Any], None, UnsetType] = UNSET - """Assets immediately downstream of this asset within lineage.""" - - # Internal SDK fields (not sent to API) - semantic: Union[SaveSemantic, None, UnsetType] = UNSET - """Save semantic for relationship operations (REPLACE, APPEND, REMOVE). - Not serialized to JSON - used internally by ref_by_guid/ref_by_qualified_name.""" diff --git a/pyatlan_v9/model/assets/event_store.py b/pyatlan_v9/model/assets/event_store.py index f58cb9cda..90e3a70bb 100644 --- a/pyatlan_v9/model/assets/event_store.py +++ b/pyatlan_v9/model/assets/event_store.py @@ -37,10 +37,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .catalog_related import RelatedEventStore from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -78,6 +78,7 @@ class EventStore(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +96,8 @@ class EventStore(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "EventStore" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -144,6 +147,11 @@ class EventStore(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -199,66 +207,6 @@ class EventStore(Asset): def __post_init__(self) -> None: self.type_name = "EventStore" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this EventStore instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"EventStore validation failed: {errors}") - - def minimize(self) -> "EventStore": - """ - Return a minimal copy of this EventStore with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new EventStore with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new EventStore instance with only the minimum required fields. - """ - self.validate() - return EventStore(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedEventStore": - """ - Create a :class:`RelatedEventStore` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedEventStore reference to this asset. - """ - if self.guid is not UNSET: - return RelatedEventStore(guid=self.guid) - return RelatedEventStore(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -367,6 +315,11 @@ class EventStoreRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -453,6 +406,7 @@ class EventStoreNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -518,9 +472,6 @@ def _event_store_to_nested(event_store: EventStore) -> EventStoreNested: is_incomplete=event_store.is_incomplete, provenance_type=event_store.provenance_type, home_id=event_store.home_id, - depth=event_store.depth, - immediate_upstream=event_store.immediate_upstream, - immediate_downstream=event_store.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -552,6 +503,7 @@ def _event_store_from_nested(nested: EventStoreNested) -> EventStore: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -560,9 +512,6 @@ def _event_store_from_nested(nested: EventStoreNested) -> EventStore: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_event_store_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -602,6 +551,9 @@ def _event_store_from_nested_bytes(data: bytes, serde: Serde) -> EventStore: EventStore.METRICS = RelationField("metrics") EventStore.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") EventStore.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +EventStore.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) EventStore.MEANINGS = RelationField("meanings") EventStore.MC_MONITORS = RelationField("mcMonitors") EventStore.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/fabric.py b/pyatlan_v9/model/assets/fabric.py index acaf81f0b..87c291e40 100644 --- a/pyatlan_v9/model/assets/fabric.py +++ b/pyatlan_v9/model/assets/fabric.py @@ -40,7 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import RelatedFabric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -81,6 +81,7 @@ class Fabric(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -98,6 +99,8 @@ class Fabric(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Fabric" + fabric_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this asset.""" @@ -156,6 +159,11 @@ class Fabric(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -211,66 +219,6 @@ class Fabric(Asset): def __post_init__(self) -> None: self.type_name = "Fabric" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Fabric instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Fabric validation failed: {errors}") - - def minimize(self) -> "Fabric": - """ - Return a minimal copy of this Fabric with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Fabric with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Fabric instance with only the minimum required fields. - """ - self.validate() - return Fabric(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabric": - """ - Create a :class:`RelatedFabric` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabric reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabric(guid=self.guid) - return RelatedFabric(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -388,6 +336,11 @@ class FabricRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -474,6 +427,7 @@ class FabricNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -545,9 +499,6 @@ def _fabric_to_nested(fabric: Fabric) -> FabricNested: is_incomplete=fabric.is_incomplete, provenance_type=fabric.provenance_type, home_id=fabric.home_id, - depth=fabric.depth, - immediate_upstream=fabric.immediate_upstream, - immediate_downstream=fabric.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -577,6 +528,7 @@ def _fabric_from_nested(nested: FabricNested) -> Fabric: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -585,9 +537,6 @@ def _fabric_from_nested(nested: FabricNested) -> Fabric: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -632,6 +581,9 @@ def _fabric_from_nested_bytes(data: bytes, serde: Serde) -> Fabric: Fabric.METRICS = RelationField("metrics") Fabric.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Fabric.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Fabric.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Fabric.MEANINGS = RelationField("meanings") Fabric.MC_MONITORS = RelationField("mcMonitors") Fabric.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/fabric_activity.py b/pyatlan_v9/model/assets/fabric_activity.py index 296e2839e..b1ccce3a9 100644 --- a/pyatlan_v9/model/assets/fabric_activity.py +++ b/pyatlan_v9/model/assets/fabric_activity.py @@ -41,7 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import RelatedFabricActivity, RelatedFabricDataPipeline +from .fabric_related import RelatedFabricDataPipeline +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -86,6 +87,7 @@ class FabricActivity(Asset): DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None FABRIC_DATA_PIPELINE: ClassVar[Any] = None FABRIC_PROCESS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,6 +105,8 @@ class FabricActivity(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricActivity" + fabric_data_pipeline_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the Fabric data pipeline that contains this asset.""" @@ -173,6 +177,11 @@ class FabricActivity(Asset): fabric_process: Union[RelatedProcess, None, UnsetType] = UNSET """Process containing the Fabric activity.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -236,76 +245,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricActivity instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_data_pipeline is UNSET: - errors.append("fabric_data_pipeline is required for creation") - if self.fabric_data_pipeline_qualified_name is UNSET: - errors.append( - "fabric_data_pipeline_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"FabricActivity validation failed: {errors}") - - def minimize(self) -> "FabricActivity": - """ - Return a minimal copy of this FabricActivity with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricActivity with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricActivity instance with only the minimum required fields. - """ - self.validate() - return FabricActivity(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricActivity": - """ - Create a :class:`RelatedFabricActivity` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricActivity reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricActivity(guid=self.guid) - return RelatedFabricActivity(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -435,6 +374,11 @@ class FabricActivityRelationshipAttributes(AssetRelationshipAttributes): fabric_process: Union[RelatedProcess, None, UnsetType] = UNSET """Process containing the Fabric activity.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -525,6 +469,7 @@ class FabricActivityNested(AssetNested): "dq_reference_dataset_rules", "fabric_data_pipeline", "fabric_process", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -606,9 +551,6 @@ def _fabric_activity_to_nested(fabric_activity: FabricActivity) -> FabricActivit is_incomplete=fabric_activity.is_incomplete, provenance_type=fabric_activity.provenance_type, home_id=fabric_activity.home_id, - depth=fabric_activity.depth, - immediate_upstream=fabric_activity.immediate_upstream, - immediate_downstream=fabric_activity.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -642,6 +584,7 @@ def _fabric_activity_from_nested(nested: FabricActivityNested) -> FabricActivity updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -650,9 +593,6 @@ def _fabric_activity_from_nested(nested: FabricActivityNested) -> FabricActivity is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_activity_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -715,6 +655,9 @@ def _fabric_activity_from_nested_bytes(data: bytes, serde: Serde) -> FabricActiv FabricActivity.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") FabricActivity.FABRIC_DATA_PIPELINE = RelationField("fabricDataPipeline") FabricActivity.FABRIC_PROCESS = RelationField("fabricProcess") +FabricActivity.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FabricActivity.MEANINGS = RelationField("meanings") FabricActivity.MC_MONITORS = RelationField("mcMonitors") FabricActivity.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/fabric_dashboard.py b/pyatlan_v9/model/assets/fabric_dashboard.py index 58e749b88..bc1122f2a 100644 --- a/pyatlan_v9/model/assets/fabric_dashboard.py +++ b/pyatlan_v9/model/assets/fabric_dashboard.py @@ -41,7 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import RelatedFabricDashboard, RelatedFabricWorkspace +from .fabric_related import RelatedFabricWorkspace +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -83,6 +84,7 @@ class FabricDashboard(Asset): DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None FABRIC_WORKSPACE: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -100,6 +102,8 @@ class FabricDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricDashboard" + fabric_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this asset.""" @@ -161,6 +165,11 @@ class FabricDashboard(Asset): fabric_workspace: Union[RelatedFabricWorkspace, None, UnsetType] = UNSET """Workspace containing the dashboard.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -222,72 +231,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_workspace is UNSET: - errors.append("fabric_workspace is required for creation") - if errors: - raise ValueError(f"FabricDashboard validation failed: {errors}") - - def minimize(self) -> "FabricDashboard": - """ - Return a minimal copy of this FabricDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricDashboard instance with only the minimum required fields. - """ - self.validate() - return FabricDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricDashboard": - """ - Create a :class:`RelatedFabricDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricDashboard(guid=self.guid) - return RelatedFabricDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -410,6 +353,11 @@ class FabricDashboardRelationshipAttributes(AssetRelationshipAttributes): fabric_workspace: Union[RelatedFabricWorkspace, None, UnsetType] = UNSET """Workspace containing the dashboard.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -499,6 +447,7 @@ class FabricDashboardNested(AssetNested): "dq_base_dataset_rules", "dq_reference_dataset_rules", "fabric_workspace", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -576,9 +525,6 @@ def _fabric_dashboard_to_nested( is_incomplete=fabric_dashboard.is_incomplete, provenance_type=fabric_dashboard.provenance_type, home_id=fabric_dashboard.home_id, - depth=fabric_dashboard.depth, - immediate_upstream=fabric_dashboard.immediate_upstream, - immediate_downstream=fabric_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -612,6 +558,7 @@ def _fabric_dashboard_from_nested(nested: FabricDashboardNested) -> FabricDashbo updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -620,9 +567,6 @@ def _fabric_dashboard_from_nested(nested: FabricDashboardNested) -> FabricDashbo is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -678,6 +622,9 @@ def _fabric_dashboard_from_nested_bytes(data: bytes, serde: Serde) -> FabricDash FabricDashboard.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") FabricDashboard.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") FabricDashboard.FABRIC_WORKSPACE = RelationField("fabricWorkspace") +FabricDashboard.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FabricDashboard.MEANINGS = RelationField("meanings") FabricDashboard.MC_MONITORS = RelationField("mcMonitors") FabricDashboard.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/fabric_data_pipeline.py b/pyatlan_v9/model/assets/fabric_data_pipeline.py index 7fe8001cb..6f50afbce 100644 --- a/pyatlan_v9/model/assets/fabric_data_pipeline.py +++ b/pyatlan_v9/model/assets/fabric_data_pipeline.py @@ -41,11 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import ( - RelatedFabricActivity, - RelatedFabricDataPipeline, - RelatedFabricWorkspace, -) +from .fabric_related import RelatedFabricActivity, RelatedFabricWorkspace +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -88,6 +85,7 @@ class FabricDataPipeline(Asset): DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None FABRIC_WORKSPACE: ClassVar[Any] = None FABRIC_ACTIVITIES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +103,8 @@ class FabricDataPipeline(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricDataPipeline" + fabric_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this asset.""" @@ -169,6 +169,11 @@ class FabricDataPipeline(Asset): fabric_activities: Union[List[RelatedFabricActivity], None, UnsetType] = UNSET """Individual activities contained in the data pipeline.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -230,72 +235,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricDataPipeline instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_workspace is UNSET: - errors.append("fabric_workspace is required for creation") - if errors: - raise ValueError(f"FabricDataPipeline validation failed: {errors}") - - def minimize(self) -> "FabricDataPipeline": - """ - Return a minimal copy of this FabricDataPipeline with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricDataPipeline with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricDataPipeline instance with only the minimum required fields. - """ - self.validate() - return FabricDataPipeline(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricDataPipeline": - """ - Create a :class:`RelatedFabricDataPipeline` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricDataPipeline reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricDataPipeline(guid=self.guid) - return RelatedFabricDataPipeline(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -421,6 +360,11 @@ class FabricDataPipelineRelationshipAttributes(AssetRelationshipAttributes): fabric_activities: Union[List[RelatedFabricActivity], None, UnsetType] = UNSET """Individual activities contained in the data pipeline.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -511,6 +455,7 @@ class FabricDataPipelineNested(AssetNested): "dq_reference_dataset_rules", "fabric_workspace", "fabric_activities", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -588,9 +533,6 @@ def _fabric_data_pipeline_to_nested( is_incomplete=fabric_data_pipeline.is_incomplete, provenance_type=fabric_data_pipeline.provenance_type, home_id=fabric_data_pipeline.home_id, - depth=fabric_data_pipeline.depth, - immediate_upstream=fabric_data_pipeline.immediate_upstream, - immediate_downstream=fabric_data_pipeline.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -626,6 +568,7 @@ def _fabric_data_pipeline_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -634,9 +577,6 @@ def _fabric_data_pipeline_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_data_pipeline_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -697,6 +637,9 @@ def _fabric_data_pipeline_from_nested_bytes( FabricDataPipeline.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") FabricDataPipeline.FABRIC_WORKSPACE = RelationField("fabricWorkspace") FabricDataPipeline.FABRIC_ACTIVITIES = RelationField("fabricActivities") +FabricDataPipeline.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FabricDataPipeline.MEANINGS = RelationField("meanings") FabricDataPipeline.MC_MONITORS = RelationField("mcMonitors") FabricDataPipeline.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/fabric_dataflow.py b/pyatlan_v9/model/assets/fabric_dataflow.py index f99715f1d..aefc97510 100644 --- a/pyatlan_v9/model/assets/fabric_dataflow.py +++ b/pyatlan_v9/model/assets/fabric_dataflow.py @@ -41,11 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import ( - RelatedFabricDataflow, - RelatedFabricDataflowEntityColumn, - RelatedFabricWorkspace, -) +from .fabric_related import RelatedFabricDataflowEntityColumn, RelatedFabricWorkspace +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -88,6 +85,7 @@ class FabricDataflow(Asset): DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None FABRIC_WORKSPACE: ClassVar[Any] = None FABRIC_DATAFLOW_ENTITY_COLUMNS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +103,8 @@ class FabricDataflow(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricDataflow" + fabric_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this asset.""" @@ -171,6 +171,11 @@ class FabricDataflow(Asset): ] = UNSET """Individual dataflow entity columns contained in the dataflow.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -232,72 +237,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricDataflow instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_workspace is UNSET: - errors.append("fabric_workspace is required for creation") - if errors: - raise ValueError(f"FabricDataflow validation failed: {errors}") - - def minimize(self) -> "FabricDataflow": - """ - Return a minimal copy of this FabricDataflow with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricDataflow with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricDataflow instance with only the minimum required fields. - """ - self.validate() - return FabricDataflow(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricDataflow": - """ - Create a :class:`RelatedFabricDataflow` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricDataflow reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricDataflow(guid=self.guid) - return RelatedFabricDataflow(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -423,6 +362,11 @@ class FabricDataflowRelationshipAttributes(AssetRelationshipAttributes): ] = UNSET """Individual dataflow entity columns contained in the dataflow.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -513,6 +457,7 @@ class FabricDataflowNested(AssetNested): "dq_reference_dataset_rules", "fabric_workspace", "fabric_dataflow_entity_columns", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -588,9 +533,6 @@ def _fabric_dataflow_to_nested(fabric_dataflow: FabricDataflow) -> FabricDataflo is_incomplete=fabric_dataflow.is_incomplete, provenance_type=fabric_dataflow.provenance_type, home_id=fabric_dataflow.home_id, - depth=fabric_dataflow.depth, - immediate_upstream=fabric_dataflow.immediate_upstream, - immediate_downstream=fabric_dataflow.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -624,6 +566,7 @@ def _fabric_dataflow_from_nested(nested: FabricDataflowNested) -> FabricDataflow updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -632,9 +575,6 @@ def _fabric_dataflow_from_nested(nested: FabricDataflowNested) -> FabricDataflow is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_dataflow_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -693,6 +633,9 @@ def _fabric_dataflow_from_nested_bytes(data: bytes, serde: Serde) -> FabricDataf FabricDataflow.FABRIC_DATAFLOW_ENTITY_COLUMNS = RelationField( "fabricDataflowEntityColumns" ) +FabricDataflow.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FabricDataflow.MEANINGS = RelationField("meanings") FabricDataflow.MC_MONITORS = RelationField("mcMonitors") FabricDataflow.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/fabric_dataflow_entity_column.py b/pyatlan_v9/model/assets/fabric_dataflow_entity_column.py index 651baa078..fceb17b29 100644 --- a/pyatlan_v9/model/assets/fabric_dataflow_entity_column.py +++ b/pyatlan_v9/model/assets/fabric_dataflow_entity_column.py @@ -41,7 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import RelatedFabricDataflow, RelatedFabricDataflowEntityColumn +from .fabric_related import RelatedFabricDataflow +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +86,7 @@ class FabricDataflowEntityColumn(Asset): DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None FABRIC_DATAFLOW: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +104,8 @@ class FabricDataflowEntityColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricDataflowEntityColumn" + fabric_dataflow_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the Fabric dataflow that contains this asset.""" @@ -169,6 +173,11 @@ class FabricDataflowEntityColumn(Asset): fabric_dataflow: Union[RelatedFabricDataflow, None, UnsetType] = UNSET """Dataflow containing the columns.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -232,78 +241,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricDataflowEntityColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_dataflow is UNSET: - errors.append("fabric_dataflow is required for creation") - if self.fabric_dataflow_name is UNSET: - errors.append("fabric_dataflow_name is required for creation") - if self.fabric_dataflow_qualified_name is UNSET: - errors.append("fabric_dataflow_qualified_name is required for creation") - if errors: - raise ValueError(f"FabricDataflowEntityColumn validation failed: {errors}") - - def minimize(self) -> "FabricDataflowEntityColumn": - """ - Return a minimal copy of this FabricDataflowEntityColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricDataflowEntityColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricDataflowEntityColumn instance with only the minimum required fields. - """ - self.validate() - return FabricDataflowEntityColumn( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedFabricDataflowEntityColumn": - """ - Create a :class:`RelatedFabricDataflowEntityColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricDataflowEntityColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricDataflowEntityColumn(guid=self.guid) - return RelatedFabricDataflowEntityColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -432,6 +369,11 @@ class FabricDataflowEntityColumnRelationshipAttributes(AssetRelationshipAttribut fabric_dataflow: Union[RelatedFabricDataflow, None, UnsetType] = UNSET """Dataflow containing the columns.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -521,6 +463,7 @@ class FabricDataflowEntityColumnNested(AssetNested): "dq_base_dataset_rules", "dq_reference_dataset_rules", "fabric_dataflow", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -604,9 +547,6 @@ def _fabric_dataflow_entity_column_to_nested( is_incomplete=fabric_dataflow_entity_column.is_incomplete, provenance_type=fabric_dataflow_entity_column.provenance_type, home_id=fabric_dataflow_entity_column.home_id, - depth=fabric_dataflow_entity_column.depth, - immediate_upstream=fabric_dataflow_entity_column.immediate_upstream, - immediate_downstream=fabric_dataflow_entity_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -642,6 +582,7 @@ def _fabric_dataflow_entity_column_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -650,9 +591,6 @@ def _fabric_dataflow_entity_column_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_dataflow_entity_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -732,6 +670,9 @@ def _fabric_dataflow_entity_column_from_nested_bytes( "dqReferenceDatasetRules" ) FabricDataflowEntityColumn.FABRIC_DATAFLOW = RelationField("fabricDataflow") +FabricDataflowEntityColumn.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FabricDataflowEntityColumn.MEANINGS = RelationField("meanings") FabricDataflowEntityColumn.MC_MONITORS = RelationField("mcMonitors") FabricDataflowEntityColumn.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/fabric_page.py b/pyatlan_v9/model/assets/fabric_page.py index 591b71fe3..ac85f178e 100644 --- a/pyatlan_v9/model/assets/fabric_page.py +++ b/pyatlan_v9/model/assets/fabric_page.py @@ -41,7 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import RelatedFabricPage, RelatedFabricReport, RelatedFabricVisual +from .fabric_related import RelatedFabricReport, RelatedFabricVisual +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +86,7 @@ class FabricPage(Asset): DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None FABRIC_REPORT: ClassVar[Any] = None FABRIC_VISUALS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +104,8 @@ class FabricPage(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricPage" + fabric_report_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the Fabric report that contains this asset.""" @@ -169,6 +173,11 @@ class FabricPage(Asset): fabric_visuals: Union[List[RelatedFabricVisual], None, UnsetType] = UNSET """Individual visuals contained in the page.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -232,74 +241,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricPage instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_report is UNSET: - errors.append("fabric_report is required for creation") - if self.fabric_report_qualified_name is UNSET: - errors.append("fabric_report_qualified_name is required for creation") - if errors: - raise ValueError(f"FabricPage validation failed: {errors}") - - def minimize(self) -> "FabricPage": - """ - Return a minimal copy of this FabricPage with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricPage with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricPage instance with only the minimum required fields. - """ - self.validate() - return FabricPage(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricPage": - """ - Create a :class:`RelatedFabricPage` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricPage reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricPage(guid=self.guid) - return RelatedFabricPage(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -426,6 +367,11 @@ class FabricPageRelationshipAttributes(AssetRelationshipAttributes): fabric_visuals: Union[List[RelatedFabricVisual], None, UnsetType] = UNSET """Individual visuals contained in the page.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -514,6 +460,7 @@ class FabricPageNested(AssetNested): "dq_reference_dataset_rules", "fabric_report", "fabric_visuals", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -587,9 +534,6 @@ def _fabric_page_to_nested(fabric_page: FabricPage) -> FabricPageNested: is_incomplete=fabric_page.is_incomplete, provenance_type=fabric_page.provenance_type, home_id=fabric_page.home_id, - depth=fabric_page.depth, - immediate_upstream=fabric_page.immediate_upstream, - immediate_downstream=fabric_page.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -621,6 +565,7 @@ def _fabric_page_from_nested(nested: FabricPageNested) -> FabricPage: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -629,9 +574,6 @@ def _fabric_page_from_nested(nested: FabricPageNested) -> FabricPage: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_page_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -683,6 +625,9 @@ def _fabric_page_from_nested_bytes(data: bytes, serde: Serde) -> FabricPage: FabricPage.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") FabricPage.FABRIC_REPORT = RelationField("fabricReport") FabricPage.FABRIC_VISUALS = RelationField("fabricVisuals") +FabricPage.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FabricPage.MEANINGS = RelationField("meanings") FabricPage.MC_MONITORS = RelationField("mcMonitors") FabricPage.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/fabric_report.py b/pyatlan_v9/model/assets/fabric_report.py index cfc629220..f6b513e18 100644 --- a/pyatlan_v9/model/assets/fabric_report.py +++ b/pyatlan_v9/model/assets/fabric_report.py @@ -41,11 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import ( - RelatedFabricPage, - RelatedFabricReport, - RelatedFabricWorkspace, -) +from .fabric_related import RelatedFabricPage, RelatedFabricWorkspace +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -88,6 +85,7 @@ class FabricReport(Asset): DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None FABRIC_WORKSPACE: ClassVar[Any] = None FABRIC_PAGES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +103,8 @@ class FabricReport(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricReport" + fabric_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this asset.""" @@ -169,6 +169,11 @@ class FabricReport(Asset): fabric_pages: Union[List[RelatedFabricPage], None, UnsetType] = UNSET """Individual pages contained in the report.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -230,72 +235,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricReport instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_workspace is UNSET: - errors.append("fabric_workspace is required for creation") - if errors: - raise ValueError(f"FabricReport validation failed: {errors}") - - def minimize(self) -> "FabricReport": - """ - Return a minimal copy of this FabricReport with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricReport with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricReport instance with only the minimum required fields. - """ - self.validate() - return FabricReport(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricReport": - """ - Create a :class:`RelatedFabricReport` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricReport reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricReport(guid=self.guid) - return RelatedFabricReport(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -419,6 +358,11 @@ class FabricReportRelationshipAttributes(AssetRelationshipAttributes): fabric_pages: Union[List[RelatedFabricPage], None, UnsetType] = UNSET """Individual pages contained in the report.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -509,6 +453,7 @@ class FabricReportNested(AssetNested): "dq_reference_dataset_rules", "fabric_workspace", "fabric_pages", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -582,9 +527,6 @@ def _fabric_report_to_nested(fabric_report: FabricReport) -> FabricReportNested: is_incomplete=fabric_report.is_incomplete, provenance_type=fabric_report.provenance_type, home_id=fabric_report.home_id, - depth=fabric_report.depth, - immediate_upstream=fabric_report.immediate_upstream, - immediate_downstream=fabric_report.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -618,6 +560,7 @@ def _fabric_report_from_nested(nested: FabricReportNested) -> FabricReport: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -626,9 +569,6 @@ def _fabric_report_from_nested(nested: FabricReportNested) -> FabricReport: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_report_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -681,6 +621,9 @@ def _fabric_report_from_nested_bytes(data: bytes, serde: Serde) -> FabricReport: FabricReport.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") FabricReport.FABRIC_WORKSPACE = RelationField("fabricWorkspace") FabricReport.FABRIC_PAGES = RelationField("fabricPages") +FabricReport.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FabricReport.MEANINGS = RelationField("meanings") FabricReport.MC_MONITORS = RelationField("mcMonitors") FabricReport.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/fabric_semantic_model.py b/pyatlan_v9/model/assets/fabric_semantic_model.py index 6f9a91a7b..338b45f01 100644 --- a/pyatlan_v9/model/assets/fabric_semantic_model.py +++ b/pyatlan_v9/model/assets/fabric_semantic_model.py @@ -41,11 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import ( - RelatedFabricSemanticModel, - RelatedFabricSemanticModelTable, - RelatedFabricWorkspace, -) +from .fabric_related import RelatedFabricSemanticModelTable, RelatedFabricWorkspace +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -88,6 +85,7 @@ class FabricSemanticModel(Asset): DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None FABRIC_WORKSPACE: ClassVar[Any] = None FABRIC_SEMANTIC_MODEL_TABLES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +103,8 @@ class FabricSemanticModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricSemanticModel" + fabric_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this asset.""" @@ -171,6 +171,11 @@ class FabricSemanticModel(Asset): ] = UNSET """Individual semantic model tables contained in the semantic model.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -232,72 +237,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricSemanticModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_workspace is UNSET: - errors.append("fabric_workspace is required for creation") - if errors: - raise ValueError(f"FabricSemanticModel validation failed: {errors}") - - def minimize(self) -> "FabricSemanticModel": - """ - Return a minimal copy of this FabricSemanticModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricSemanticModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricSemanticModel instance with only the minimum required fields. - """ - self.validate() - return FabricSemanticModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricSemanticModel": - """ - Create a :class:`RelatedFabricSemanticModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricSemanticModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricSemanticModel(guid=self.guid) - return RelatedFabricSemanticModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -425,6 +364,11 @@ class FabricSemanticModelRelationshipAttributes(AssetRelationshipAttributes): ] = UNSET """Individual semantic model tables contained in the semantic model.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -515,6 +459,7 @@ class FabricSemanticModelNested(AssetNested): "dq_reference_dataset_rules", "fabric_workspace", "fabric_semantic_model_tables", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -592,9 +537,6 @@ def _fabric_semantic_model_to_nested( is_incomplete=fabric_semantic_model.is_incomplete, provenance_type=fabric_semantic_model.provenance_type, home_id=fabric_semantic_model.home_id, - depth=fabric_semantic_model.depth, - immediate_upstream=fabric_semantic_model.immediate_upstream, - immediate_downstream=fabric_semantic_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -630,6 +572,7 @@ def _fabric_semantic_model_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -638,9 +581,6 @@ def _fabric_semantic_model_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_semantic_model_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -705,6 +645,9 @@ def _fabric_semantic_model_from_nested_bytes( FabricSemanticModel.FABRIC_SEMANTIC_MODEL_TABLES = RelationField( "fabricSemanticModelTables" ) +FabricSemanticModel.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FabricSemanticModel.MEANINGS = RelationField("meanings") FabricSemanticModel.MC_MONITORS = RelationField("mcMonitors") FabricSemanticModel.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/fabric_semantic_model_table.py b/pyatlan_v9/model/assets/fabric_semantic_model_table.py index b4417d7ea..bf1d4ee1d 100644 --- a/pyatlan_v9/model/assets/fabric_semantic_model_table.py +++ b/pyatlan_v9/model/assets/fabric_semantic_model_table.py @@ -43,9 +43,9 @@ from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .fabric_related import ( RelatedFabricSemanticModel, - RelatedFabricSemanticModelTable, RelatedFabricSemanticModelTableColumn, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -89,6 +89,7 @@ class FabricSemanticModelTable(Asset): DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None FABRIC_SEMANTIC_MODEL: ClassVar[Any] = None FABRIC_SEMANTIC_MODEL_TABLE_COLUMNS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -106,6 +107,8 @@ class FabricSemanticModelTable(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricSemanticModelTable" + fabric_semantic_model_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the Fabric semantic model that contains this asset.""" @@ -175,6 +178,11 @@ class FabricSemanticModelTable(Asset): ] = UNSET """Individual semantic model table columns contained in the semantic model table.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -238,78 +246,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricSemanticModelTable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_semantic_model is UNSET: - errors.append("fabric_semantic_model is required for creation") - if self.fabric_semantic_model_qualified_name is UNSET: - errors.append( - "fabric_semantic_model_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"FabricSemanticModelTable validation failed: {errors}") - - def minimize(self) -> "FabricSemanticModelTable": - """ - Return a minimal copy of this FabricSemanticModelTable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricSemanticModelTable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricSemanticModelTable instance with only the minimum required fields. - """ - self.validate() - return FabricSemanticModelTable( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedFabricSemanticModelTable": - """ - Create a :class:`RelatedFabricSemanticModelTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricSemanticModelTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricSemanticModelTable(guid=self.guid) - return RelatedFabricSemanticModelTable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -440,6 +376,11 @@ class FabricSemanticModelTableRelationshipAttributes(AssetRelationshipAttributes ] = UNSET """Individual semantic model table columns contained in the semantic model table.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -530,6 +471,7 @@ class FabricSemanticModelTableNested(AssetNested): "dq_reference_dataset_rules", "fabric_semantic_model", "fabric_semantic_model_table_columns", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -615,9 +557,6 @@ def _fabric_semantic_model_table_to_nested( is_incomplete=fabric_semantic_model_table.is_incomplete, provenance_type=fabric_semantic_model_table.provenance_type, home_id=fabric_semantic_model_table.home_id, - depth=fabric_semantic_model_table.depth, - immediate_upstream=fabric_semantic_model_table.immediate_upstream, - immediate_downstream=fabric_semantic_model_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -653,6 +592,7 @@ def _fabric_semantic_model_table_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -661,9 +601,6 @@ def _fabric_semantic_model_table_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_semantic_model_table_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -741,6 +678,9 @@ def _fabric_semantic_model_table_from_nested_bytes( FabricSemanticModelTable.FABRIC_SEMANTIC_MODEL_TABLE_COLUMNS = RelationField( "fabricSemanticModelTableColumns" ) +FabricSemanticModelTable.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FabricSemanticModelTable.MEANINGS = RelationField("meanings") FabricSemanticModelTable.MC_MONITORS = RelationField("mcMonitors") FabricSemanticModelTable.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/fabric_semantic_model_table_column.py b/pyatlan_v9/model/assets/fabric_semantic_model_table_column.py index 571762514..37cb5604d 100644 --- a/pyatlan_v9/model/assets/fabric_semantic_model_table_column.py +++ b/pyatlan_v9/model/assets/fabric_semantic_model_table_column.py @@ -41,10 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import ( - RelatedFabricSemanticModelTable, - RelatedFabricSemanticModelTableColumn, -) +from .fabric_related import RelatedFabricSemanticModelTable +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -88,6 +86,7 @@ class FabricSemanticModelTableColumn(Asset): DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None FABRIC_SEMANTIC_MODEL_TABLE: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +104,8 @@ class FabricSemanticModelTableColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricSemanticModelTableColumn" + fabric_semantic_model_table_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the Fabric semantic model table that contains this asset.""" @@ -174,6 +175,11 @@ class FabricSemanticModelTableColumn(Asset): ] = UNSET """Semantic model table containing the column.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -237,84 +243,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricSemanticModelTableColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_semantic_model_table is UNSET: - errors.append("fabric_semantic_model_table is required for creation") - if self.fabric_semantic_model_table_name is UNSET: - errors.append( - "fabric_semantic_model_table_name is required for creation" - ) - if self.fabric_semantic_model_table_qualified_name is UNSET: - errors.append( - "fabric_semantic_model_table_qualified_name is required for creation" - ) - if errors: - raise ValueError( - f"FabricSemanticModelTableColumn validation failed: {errors}" - ) - - def minimize(self) -> "FabricSemanticModelTableColumn": - """ - Return a minimal copy of this FabricSemanticModelTableColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricSemanticModelTableColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricSemanticModelTableColumn instance with only the minimum required fields. - """ - self.validate() - return FabricSemanticModelTableColumn( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedFabricSemanticModelTableColumn": - """ - Create a :class:`RelatedFabricSemanticModelTableColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricSemanticModelTableColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricSemanticModelTableColumn(guid=self.guid) - return RelatedFabricSemanticModelTableColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -445,6 +373,11 @@ class FabricSemanticModelTableColumnRelationshipAttributes(AssetRelationshipAttr ] = UNSET """Semantic model table containing the column.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -534,6 +467,7 @@ class FabricSemanticModelTableColumnNested(AssetNested): "dq_base_dataset_rules", "dq_reference_dataset_rules", "fabric_semantic_model_table", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -623,9 +557,6 @@ def _fabric_semantic_model_table_column_to_nested( is_incomplete=fabric_semantic_model_table_column.is_incomplete, provenance_type=fabric_semantic_model_table_column.provenance_type, home_id=fabric_semantic_model_table_column.home_id, - depth=fabric_semantic_model_table_column.depth, - immediate_upstream=fabric_semantic_model_table_column.immediate_upstream, - immediate_downstream=fabric_semantic_model_table_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -661,6 +592,7 @@ def _fabric_semantic_model_table_column_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -669,9 +601,6 @@ def _fabric_semantic_model_table_column_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_semantic_model_table_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -763,6 +692,9 @@ def _fabric_semantic_model_table_column_from_nested_bytes( FabricSemanticModelTableColumn.FABRIC_SEMANTIC_MODEL_TABLE = RelationField( "fabricSemanticModelTable" ) +FabricSemanticModelTableColumn.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = ( + RelationField("gcpDataplexAspectTypeMetadataEntities") +) FabricSemanticModelTableColumn.MEANINGS = RelationField("meanings") FabricSemanticModelTableColumn.MC_MONITORS = RelationField("mcMonitors") FabricSemanticModelTableColumn.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/fabric_visual.py b/pyatlan_v9/model/assets/fabric_visual.py index 99c7bc28b..8e2984c65 100644 --- a/pyatlan_v9/model/assets/fabric_visual.py +++ b/pyatlan_v9/model/assets/fabric_visual.py @@ -41,7 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fabric_related import RelatedFabricPage, RelatedFabricVisual +from .fabric_related import RelatedFabricPage +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -86,6 +87,7 @@ class FabricVisual(Asset): DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None FABRIC_PAGE: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,6 +105,8 @@ class FabricVisual(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricVisual" + fabric_page_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the Fabric page that contains this asset.""" @@ -173,6 +177,11 @@ class FabricVisual(Asset): fabric_page: Union[RelatedFabricPage, None, UnsetType] = UNSET """Page containing the visual.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -236,76 +245,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricVisual instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.fabric_page is UNSET: - errors.append("fabric_page is required for creation") - if self.fabric_page_name is UNSET: - errors.append("fabric_page_name is required for creation") - if self.fabric_page_qualified_name is UNSET: - errors.append("fabric_page_qualified_name is required for creation") - if errors: - raise ValueError(f"FabricVisual validation failed: {errors}") - - def minimize(self) -> "FabricVisual": - """ - Return a minimal copy of this FabricVisual with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricVisual with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricVisual instance with only the minimum required fields. - """ - self.validate() - return FabricVisual(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricVisual": - """ - Create a :class:`RelatedFabricVisual` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricVisual reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricVisual(guid=self.guid) - return RelatedFabricVisual(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -435,6 +374,11 @@ class FabricVisualRelationshipAttributes(AssetRelationshipAttributes): fabric_page: Union[RelatedFabricPage, None, UnsetType] = UNSET """Page containing the visual.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -524,6 +468,7 @@ class FabricVisualNested(AssetNested): "dq_base_dataset_rules", "dq_reference_dataset_rules", "fabric_page", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -603,9 +548,6 @@ def _fabric_visual_to_nested(fabric_visual: FabricVisual) -> FabricVisualNested: is_incomplete=fabric_visual.is_incomplete, provenance_type=fabric_visual.provenance_type, home_id=fabric_visual.home_id, - depth=fabric_visual.depth, - immediate_upstream=fabric_visual.immediate_upstream, - immediate_downstream=fabric_visual.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -639,6 +581,7 @@ def _fabric_visual_from_nested(nested: FabricVisualNested) -> FabricVisual: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -647,9 +590,6 @@ def _fabric_visual_from_nested(nested: FabricVisualNested) -> FabricVisual: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_visual_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -706,6 +646,9 @@ def _fabric_visual_from_nested_bytes(data: bytes, serde: Serde) -> FabricVisual: FabricVisual.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") FabricVisual.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") FabricVisual.FABRIC_PAGE = RelationField("fabricPage") +FabricVisual.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FabricVisual.MEANINGS = RelationField("meanings") FabricVisual.MC_MONITORS = RelationField("mcMonitors") FabricVisual.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/fabric_workspace.py b/pyatlan_v9/model/assets/fabric_workspace.py index e99aac2da..e7ed4e69a 100644 --- a/pyatlan_v9/model/assets/fabric_workspace.py +++ b/pyatlan_v9/model/assets/fabric_workspace.py @@ -46,8 +46,8 @@ RelatedFabricDataPipeline, RelatedFabricReport, RelatedFabricSemanticModel, - RelatedFabricWorkspace, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -95,6 +95,7 @@ class FabricWorkspace(Asset): FABRIC_DATA_PIPELINES: ClassVar[Any] = None FABRIC_REPORTS: ClassVar[Any] = None FABRIC_SEMANTIC_MODELS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -112,6 +113,8 @@ class FabricWorkspace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FabricWorkspace" + fabric_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this asset.""" @@ -192,6 +195,11 @@ class FabricWorkspace(Asset): ) """Individual semantic models contained in the workspace.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -247,66 +255,6 @@ class FabricWorkspace(Asset): def __post_init__(self) -> None: self.type_name = "FabricWorkspace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FabricWorkspace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"FabricWorkspace validation failed: {errors}") - - def minimize(self) -> "FabricWorkspace": - """ - Return a minimal copy of this FabricWorkspace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FabricWorkspace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FabricWorkspace instance with only the minimum required fields. - """ - self.validate() - return FabricWorkspace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFabricWorkspace": - """ - Create a :class:`RelatedFabricWorkspace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFabricWorkspace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFabricWorkspace(guid=self.guid) - return RelatedFabricWorkspace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -448,6 +396,11 @@ class FabricWorkspaceRelationshipAttributes(AssetRelationshipAttributes): ) """Individual semantic models contained in the workspace.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -542,6 +495,7 @@ class FabricWorkspaceNested(AssetNested): "fabric_data_pipelines", "fabric_reports", "fabric_semantic_models", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -619,9 +573,6 @@ def _fabric_workspace_to_nested( is_incomplete=fabric_workspace.is_incomplete, provenance_type=fabric_workspace.provenance_type, home_id=fabric_workspace.home_id, - depth=fabric_workspace.depth, - immediate_upstream=fabric_workspace.immediate_upstream, - immediate_downstream=fabric_workspace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -655,6 +606,7 @@ def _fabric_workspace_from_nested(nested: FabricWorkspaceNested) -> FabricWorksp updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -663,9 +615,6 @@ def _fabric_workspace_from_nested(nested: FabricWorkspaceNested) -> FabricWorksp is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fabric_workspace_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -726,6 +675,9 @@ def _fabric_workspace_from_nested_bytes(data: bytes, serde: Serde) -> FabricWork FabricWorkspace.FABRIC_DATA_PIPELINES = RelationField("fabricDataPipelines") FabricWorkspace.FABRIC_REPORTS = RelationField("fabricReports") FabricWorkspace.FABRIC_SEMANTIC_MODELS = RelationField("fabricSemanticModels") +FabricWorkspace.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FabricWorkspace.MEANINGS = RelationField("meanings") FabricWorkspace.MC_MONITORS = RelationField("mcMonitors") FabricWorkspace.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/file.py b/pyatlan_v9/model/assets/file.py index c4c5a9418..88960349e 100644 --- a/pyatlan_v9/model/assets/file.py +++ b/pyatlan_v9/model/assets/file.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +86,7 @@ class File(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,6 +105,8 @@ class File(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "File" + file_type: Union[str, None, UnsetType] = UNSET """Type (extension) of the file.""" @@ -170,6 +174,11 @@ class File(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -228,69 +237,6 @@ class File(Asset): def __post_init__(self) -> None: self.type_name = "File" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this File instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.file_type is UNSET: - errors.append("file_type is required for creation") - if errors: - raise ValueError(f"File validation failed: {errors}") - - def minimize(self) -> "File": - """ - Return a minimal copy of this File with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new File with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new File instance with only the minimum required fields. - """ - self.validate() - return File(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFile": - """ - Create a :class:`RelatedFile` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFile reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFile(guid=self.guid) - return RelatedFile(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -503,6 +449,11 @@ class FileRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -588,6 +539,7 @@ class FileNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -666,9 +618,6 @@ def _file_to_nested(file: File) -> FileNested: is_incomplete=file.is_incomplete, provenance_type=file.provenance_type, home_id=file.home_id, - depth=file.depth, - immediate_upstream=file.immediate_upstream, - immediate_downstream=file.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -698,6 +647,7 @@ def _file_from_nested(nested: FileNested) -> File: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -706,9 +656,6 @@ def _file_from_nested(nested: FileNested) -> File: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_file_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -756,6 +703,9 @@ def _file_from_nested_bytes(data: bytes, serde: Serde) -> File: File.METRICS = RelationField("metrics") File.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") File.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +File.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) File.MEANINGS = RelationField("meanings") File.MC_MONITORS = RelationField("mcMonitors") File.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/fivetran.py b/pyatlan_v9/model/assets/fivetran.py index e623b2e00..6c6205a91 100644 --- a/pyatlan_v9/model/assets/fivetran.py +++ b/pyatlan_v9/model/assets/fivetran.py @@ -40,7 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fivetran_related import RelatedFivetran +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -81,6 +81,7 @@ class Fivetran(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -98,6 +99,8 @@ class Fivetran(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Fivetran" + fivetran_workflow_name: Union[str, None, UnsetType] = UNSET """Name of the atlan fivetran workflow that updated this asset""" @@ -156,6 +159,11 @@ class Fivetran(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -211,66 +219,6 @@ class Fivetran(Asset): def __post_init__(self) -> None: self.type_name = "Fivetran" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Fivetran instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Fivetran validation failed: {errors}") - - def minimize(self) -> "Fivetran": - """ - Return a minimal copy of this Fivetran with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Fivetran with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Fivetran instance with only the minimum required fields. - """ - self.validate() - return Fivetran(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFivetran": - """ - Create a :class:`RelatedFivetran` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFivetran reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFivetran(guid=self.guid) - return RelatedFivetran(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -388,6 +336,11 @@ class FivetranRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -474,6 +427,7 @@ class FivetranNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -547,9 +501,6 @@ def _fivetran_to_nested(fivetran: Fivetran) -> FivetranNested: is_incomplete=fivetran.is_incomplete, provenance_type=fivetran.provenance_type, home_id=fivetran.home_id, - depth=fivetran.depth, - immediate_upstream=fivetran.immediate_upstream, - immediate_downstream=fivetran.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -581,6 +532,7 @@ def _fivetran_from_nested(nested: FivetranNested) -> Fivetran: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -589,9 +541,6 @@ def _fivetran_from_nested(nested: FivetranNested) -> Fivetran: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fivetran_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -642,6 +591,9 @@ def _fivetran_from_nested_bytes(data: bytes, serde: Serde) -> Fivetran: Fivetran.METRICS = RelationField("metrics") Fivetran.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Fivetran.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Fivetran.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Fivetran.MEANINGS = RelationField("meanings") Fivetran.MC_MONITORS = RelationField("mcMonitors") Fivetran.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/fivetran_connector.py b/pyatlan_v9/model/assets/fivetran_connector.py index 242a503e2..d2b15ebdd 100644 --- a/pyatlan_v9/model/assets/fivetran_connector.py +++ b/pyatlan_v9/model/assets/fivetran_connector.py @@ -41,7 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .fivetran_related import RelatedFivetranConnector +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -64,50 +64,44 @@ class FivetranConnector(Asset): Instance of a Fivetran connector asset in Atlan. """ - FIVETRAN_CONNECTOR_LAST_SYNC_ID: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_STARTED_AT: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_FINISHED_AT: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_REASON: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_TASK_TYPE: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_RESCHEDULED_AT: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_TABLES_SYNCED: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_EXTRACT_TIME_SECONDS: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_EXTRACT_VOLUME_MEGABYTES: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_LOAD_TIME_SECONDS: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_LOAD_VOLUME_MEGABYTES: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_PROCESS_TIME_SECONDS: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_PROCESS_VOLUME_MEGABYTES: ClassVar[Any] = None - FIVETRAN_CONNECTOR_LAST_SYNC_TOTAL_TIME_SECONDS: ClassVar[Any] = None - FIVETRAN_CONNECTOR_NAME: ClassVar[Any] = None - FIVETRAN_CONNECTOR_TYPE: ClassVar[Any] = None - FIVETRAN_CONNECTOR_URL: ClassVar[Any] = None - FIVETRAN_CONNECTOR_DESTINATION_NAME: ClassVar[Any] = None - FIVETRAN_CONNECTOR_DESTINATION_TYPE: ClassVar[Any] = None - FIVETRAN_CONNECTOR_DESTINATION_URL: ClassVar[Any] = None - FIVETRAN_CONNECTOR_SYNC_SETUP_ON: ClassVar[Any] = None - FIVETRAN_CONNECTOR_SYNC_FREQUENCY: ClassVar[Any] = None - FIVETRAN_CONNECTOR_SYNC_PAUSED: ClassVar[Any] = None - FIVETRAN_CONNECTOR_SYNC_SETUP_USER_FULL_NAME: ClassVar[Any] = None - FIVETRAN_CONNECTOR_SYNC_SETUP_USER_EMAIL: ClassVar[Any] = None - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_FREE: ClassVar[Any] = None - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_PAID: ClassVar[Any] = None - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_TOTAL: ClassVar[Any] = None - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_FREE: ClassVar[Any] = None - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_PAID: ClassVar[Any] = None - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_TOTAL: ClassVar[Any] = None - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_FREE_PERCENTAGE_OF_ACCOUNT: ClassVar[Any] = ( - None - ) - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_PAID_PERCENTAGE_OF_ACCOUNT: ClassVar[Any] = ( - None - ) - FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_TOTAL_PERCENTAGE_OF_ACCOUNT: ClassVar[ - Any - ] = None - FIVETRAN_CONNECTOR_TOTAL_TABLES_SYNCED: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_ID: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_STARTED_AT: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_FINISHED_AT: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_REASON: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_TASK_TYPE: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_RESCHEDULED_AT: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_TABLES_SYNCED: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_EXTRACT_TIME_SECONDS: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_EXTRACT_VOLUME_MEGABYTES: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_LOAD_TIME_SECONDS: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_LOAD_VOLUME_MEGABYTES: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_PROCESS_TIME_SECONDS: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_PROCESS_VOLUME_MEGABYTES: ClassVar[Any] = None + FIVETRAN_LAST_SYNC_TOTAL_TIME_SECONDS: ClassVar[Any] = None + FIVETRAN_NAME: ClassVar[Any] = None + FIVETRAN_TYPE: ClassVar[Any] = None + FIVETRAN_URL: ClassVar[Any] = None + FIVETRAN_DESTINATION_NAME: ClassVar[Any] = None + FIVETRAN_DESTINATION_TYPE: ClassVar[Any] = None + FIVETRAN_DESTINATION_URL: ClassVar[Any] = None + FIVETRAN_SYNC_SETUP_ON: ClassVar[Any] = None + FIVETRAN_SYNC_FREQUENCY: ClassVar[Any] = None + FIVETRAN_SYNC_PAUSED: ClassVar[Any] = None + FIVETRAN_SYNC_SETUP_USER_FULL_NAME: ClassVar[Any] = None + FIVETRAN_SYNC_SETUP_USER_EMAIL: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_FREE: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_PAID: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_TOTAL: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_FREE: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_PAID: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_TOTAL: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_FREE_PERCENTAGE_OF_ACCOUNT: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_PAID_PERCENTAGE_OF_ACCOUNT: ClassVar[Any] = None + FIVETRAN_MONTHLY_ACTIVE_ROWS_TOTAL_PERCENTAGE_OF_ACCOUNT: ClassVar[Any] = None + FIVETRAN_TOTAL_TABLES_SYNCED: ClassVar[Any] = None FIVETRAN_CONNECTOR_TOP_TABLES_BY_MAR: ClassVar[Any] = None - FIVETRAN_CONNECTOR_USAGE_COST: ClassVar[Any] = None - FIVETRAN_CONNECTOR_CREDITS_USED: ClassVar[Any] = None + FIVETRAN_USAGE_COST: ClassVar[Any] = None + FIVETRAN_CREDITS_USED: ClassVar[Any] = None FIVETRAN_WORKFLOW_NAME: ClassVar[Any] = None FIVETRAN_LAST_SYNC_STATUS: ClassVar[Any] = None FIVETRAN_LAST_SYNC_RECORDS_UPDATED: ClassVar[Any] = None @@ -127,6 +121,7 @@ class FivetranConnector(Asset): DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None PROCESSES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -144,139 +139,127 @@ class FivetranConnector(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - fivetran_connector_last_sync_id: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "FivetranConnector" + + fivetran_last_sync_id: Union[str, None, UnsetType] = UNSET """ID of the latest sync""" - fivetran_connector_last_sync_started_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_started_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) when the latest sync started on Fivetran, in milliseconds""" - fivetran_connector_last_sync_finished_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_finished_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) when the latest sync finished on Fivetran, in milliseconds""" - fivetran_connector_last_sync_reason: Union[str, None, UnsetType] = UNSET + fivetran_last_sync_reason: Union[str, None, UnsetType] = UNSET """Failure reason for the latest sync on Fivetran. If status is FAILURE, this is the description of the reason why the sync failed. If status is FAILURE_WITH_TASK, this is the description of the Error. If status is RESCHEDULED, this is the description of the reason why the sync is rescheduled.""" - fivetran_connector_last_sync_task_type: Union[str, None, UnsetType] = UNSET + fivetran_last_sync_task_type: Union[str, None, UnsetType] = UNSET """Failure task type for the latest sync on Fivetran. If status is FAILURE_WITH_TASK or RESCHEDULED, this field displays the type of the Error that caused the failure or rescheduling, respectively, e.g., reconnect, update_service_account, etc.""" - fivetran_connector_last_sync_rescheduled_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_rescheduled_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) at which the latest sync is rescheduled at on Fivetran""" - fivetran_connector_last_sync_tables_synced: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_tables_synced: Union[int, None, UnsetType] = UNSET """Number of tables synced in the latest sync on Fivetran""" - fivetran_connector_last_sync_extract_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_extract_time_seconds: Union[float, None, UnsetType] = UNSET """Extract time in seconds in the latest sync on fivetran""" - fivetran_connector_last_sync_extract_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_extract_volume_megabytes: Union[float, None, UnsetType] = UNSET """Extracted data volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_load_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_load_time_seconds: Union[float, None, UnsetType] = UNSET """Load time in seconds in the latest sync on Fivetran""" - fivetran_connector_last_sync_load_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_load_volume_megabytes: Union[float, None, UnsetType] = UNSET """Loaded data volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_process_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_process_time_seconds: Union[float, None, UnsetType] = UNSET """Process time in seconds in the latest sync on Fivetran""" - fivetran_connector_last_sync_process_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_process_volume_megabytes: Union[float, None, UnsetType] = UNSET """Process volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_total_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_total_time_seconds: Union[float, None, UnsetType] = UNSET """Total sync time in seconds in the latest sync on Fivetran""" - fivetran_connector_name: Union[str, None, UnsetType] = UNSET + fivetran_name: Union[str, None, UnsetType] = UNSET """Connector name added by the user on Fivetran""" - fivetran_connector_type: Union[str, None, UnsetType] = UNSET + fivetran_type: Union[str, None, UnsetType] = UNSET """Type of connector on Fivetran. Eg: snowflake, google_analytics, notion etc.""" - fivetran_connector_url: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="fivetranConnectorURL" + fivetran_url: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="fivetranURL" ) """URL to open the connector details on Fivetran""" - fivetran_connector_destination_name: Union[str, None, UnsetType] = UNSET + fivetran_destination_name: Union[str, None, UnsetType] = UNSET """Destination name added by the user on Fivetran""" - fivetran_connector_destination_type: Union[str, None, UnsetType] = UNSET + fivetran_destination_type: Union[str, None, UnsetType] = UNSET """Type of destination on Fivetran. Eg: redshift, bigquery etc.""" - fivetran_connector_destination_url: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="fivetranConnectorDestinationURL" + fivetran_destination_url: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="fivetranDestinationURL" ) """URL to open the destination details on Fivetran""" - fivetran_connector_sync_setup_on: Union[int, None, UnsetType] = UNSET + fivetran_sync_setup_on: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) on which the connector was setup on Fivetran, in milliseconds""" - fivetran_connector_sync_frequency: Union[str, None, UnsetType] = UNSET + fivetran_sync_frequency: Union[str, None, UnsetType] = UNSET """Sync frequency for the connector in number of hours. Eg: Every 6 hours""" - fivetran_connector_sync_paused: Union[bool, None, UnsetType] = UNSET + fivetran_sync_paused: Union[bool, None, UnsetType] = UNSET """Boolean to indicate whether the sync for this connector is paused or not""" - fivetran_connector_sync_setup_user_full_name: Union[str, None, UnsetType] = UNSET + fivetran_sync_setup_user_full_name: Union[str, None, UnsetType] = UNSET """Full name of the user who setup the connector on Fivetran""" - fivetran_connector_sync_setup_user_email: Union[str, None, UnsetType] = UNSET + fivetran_sync_setup_user_email: Union[str, None, UnsetType] = UNSET """Email ID of the user who setpu the connector on Fivetran""" - fivetran_connector_monthly_active_rows_free: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_free: Union[int, None, UnsetType] = UNSET """Free Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_paid: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_paid: Union[int, None, UnsetType] = UNSET """Paid Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_total: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_total: Union[int, None, UnsetType] = UNSET """Total Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_change_percentage_free: Union[ + fivetran_monthly_active_rows_change_percentage_free: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of free MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_change_percentage_paid: Union[ + fivetran_monthly_active_rows_change_percentage_paid: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of paid MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_change_percentage_total: Union[ + fivetran_monthly_active_rows_change_percentage_total: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of total MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_free_percentage_of_account: Union[ + fivetran_monthly_active_rows_free_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total free MAR used by this connector""" - fivetran_connector_monthly_active_rows_paid_percentage_of_account: Union[ + fivetran_monthly_active_rows_paid_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total paid MAR used by this connector""" - fivetran_connector_monthly_active_rows_total_percentage_of_account: Union[ + fivetran_monthly_active_rows_total_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total MAR used by this connector""" - fivetran_connector_total_tables_synced: Union[int, None, UnsetType] = UNSET + fivetran_total_tables_synced: Union[int, None, UnsetType] = UNSET """Total number of tables synced by this connector""" fivetran_connector_top_tables_by_mar: Union[str, None, UnsetType] = msgspec.field( @@ -284,10 +267,10 @@ class FivetranConnector(Asset): ) """Total five tables sorted by MAR synced by this connector""" - fivetran_connector_usage_cost: Union[float, None, UnsetType] = UNSET + fivetran_usage_cost: Union[float, None, UnsetType] = UNSET """Total usage cost by this destination""" - fivetran_connector_credits_used: Union[float, None, UnsetType] = UNSET + fivetran_credits_used: Union[float, None, UnsetType] = UNSET """Total credits used by this destination""" fivetran_workflow_name: Union[str, None, UnsetType] = UNSET @@ -351,6 +334,11 @@ class FivetranConnector(Asset): processes: Union[List[RelatedProcess], None, UnsetType] = UNSET """Processes related to this Fivetran connector""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -406,66 +394,6 @@ class FivetranConnector(Asset): def __post_init__(self) -> None: self.type_name = "FivetranConnector" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FivetranConnector instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"FivetranConnector validation failed: {errors}") - - def minimize(self) -> "FivetranConnector": - """ - Return a minimal copy of this FivetranConnector with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FivetranConnector with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FivetranConnector instance with only the minimum required fields. - """ - self.validate() - return FivetranConnector(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFivetranConnector": - """ - Create a :class:`RelatedFivetranConnector` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFivetranConnector reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFivetranConnector(guid=self.guid) - return RelatedFivetranConnector(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -523,139 +451,125 @@ def from_json( class FivetranConnectorAttributes(AssetAttributes): """FivetranConnector-specific attributes for nested API format.""" - fivetran_connector_last_sync_id: Union[str, None, UnsetType] = UNSET + fivetran_last_sync_id: Union[str, None, UnsetType] = UNSET """ID of the latest sync""" - fivetran_connector_last_sync_started_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_started_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) when the latest sync started on Fivetran, in milliseconds""" - fivetran_connector_last_sync_finished_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_finished_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) when the latest sync finished on Fivetran, in milliseconds""" - fivetran_connector_last_sync_reason: Union[str, None, UnsetType] = UNSET + fivetran_last_sync_reason: Union[str, None, UnsetType] = UNSET """Failure reason for the latest sync on Fivetran. If status is FAILURE, this is the description of the reason why the sync failed. If status is FAILURE_WITH_TASK, this is the description of the Error. If status is RESCHEDULED, this is the description of the reason why the sync is rescheduled.""" - fivetran_connector_last_sync_task_type: Union[str, None, UnsetType] = UNSET + fivetran_last_sync_task_type: Union[str, None, UnsetType] = UNSET """Failure task type for the latest sync on Fivetran. If status is FAILURE_WITH_TASK or RESCHEDULED, this field displays the type of the Error that caused the failure or rescheduling, respectively, e.g., reconnect, update_service_account, etc.""" - fivetran_connector_last_sync_rescheduled_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_rescheduled_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) at which the latest sync is rescheduled at on Fivetran""" - fivetran_connector_last_sync_tables_synced: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_tables_synced: Union[int, None, UnsetType] = UNSET """Number of tables synced in the latest sync on Fivetran""" - fivetran_connector_last_sync_extract_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_extract_time_seconds: Union[float, None, UnsetType] = UNSET """Extract time in seconds in the latest sync on fivetran""" - fivetran_connector_last_sync_extract_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_extract_volume_megabytes: Union[float, None, UnsetType] = UNSET """Extracted data volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_load_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_load_time_seconds: Union[float, None, UnsetType] = UNSET """Load time in seconds in the latest sync on Fivetran""" - fivetran_connector_last_sync_load_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_load_volume_megabytes: Union[float, None, UnsetType] = UNSET """Loaded data volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_process_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_process_time_seconds: Union[float, None, UnsetType] = UNSET """Process time in seconds in the latest sync on Fivetran""" - fivetran_connector_last_sync_process_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_process_volume_megabytes: Union[float, None, UnsetType] = UNSET """Process volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_total_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_total_time_seconds: Union[float, None, UnsetType] = UNSET """Total sync time in seconds in the latest sync on Fivetran""" - fivetran_connector_name: Union[str, None, UnsetType] = UNSET + fivetran_name: Union[str, None, UnsetType] = UNSET """Connector name added by the user on Fivetran""" - fivetran_connector_type: Union[str, None, UnsetType] = UNSET + fivetran_type: Union[str, None, UnsetType] = UNSET """Type of connector on Fivetran. Eg: snowflake, google_analytics, notion etc.""" - fivetran_connector_url: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="fivetranConnectorURL" + fivetran_url: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="fivetranURL" ) """URL to open the connector details on Fivetran""" - fivetran_connector_destination_name: Union[str, None, UnsetType] = UNSET + fivetran_destination_name: Union[str, None, UnsetType] = UNSET """Destination name added by the user on Fivetran""" - fivetran_connector_destination_type: Union[str, None, UnsetType] = UNSET + fivetran_destination_type: Union[str, None, UnsetType] = UNSET """Type of destination on Fivetran. Eg: redshift, bigquery etc.""" - fivetran_connector_destination_url: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="fivetranConnectorDestinationURL" + fivetran_destination_url: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="fivetranDestinationURL" ) """URL to open the destination details on Fivetran""" - fivetran_connector_sync_setup_on: Union[int, None, UnsetType] = UNSET + fivetran_sync_setup_on: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) on which the connector was setup on Fivetran, in milliseconds""" - fivetran_connector_sync_frequency: Union[str, None, UnsetType] = UNSET + fivetran_sync_frequency: Union[str, None, UnsetType] = UNSET """Sync frequency for the connector in number of hours. Eg: Every 6 hours""" - fivetran_connector_sync_paused: Union[bool, None, UnsetType] = UNSET + fivetran_sync_paused: Union[bool, None, UnsetType] = UNSET """Boolean to indicate whether the sync for this connector is paused or not""" - fivetran_connector_sync_setup_user_full_name: Union[str, None, UnsetType] = UNSET + fivetran_sync_setup_user_full_name: Union[str, None, UnsetType] = UNSET """Full name of the user who setup the connector on Fivetran""" - fivetran_connector_sync_setup_user_email: Union[str, None, UnsetType] = UNSET + fivetran_sync_setup_user_email: Union[str, None, UnsetType] = UNSET """Email ID of the user who setpu the connector on Fivetran""" - fivetran_connector_monthly_active_rows_free: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_free: Union[int, None, UnsetType] = UNSET """Free Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_paid: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_paid: Union[int, None, UnsetType] = UNSET """Paid Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_total: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_total: Union[int, None, UnsetType] = UNSET """Total Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_change_percentage_free: Union[ + fivetran_monthly_active_rows_change_percentage_free: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of free MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_change_percentage_paid: Union[ + fivetran_monthly_active_rows_change_percentage_paid: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of paid MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_change_percentage_total: Union[ + fivetran_monthly_active_rows_change_percentage_total: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of total MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_free_percentage_of_account: Union[ + fivetran_monthly_active_rows_free_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total free MAR used by this connector""" - fivetran_connector_monthly_active_rows_paid_percentage_of_account: Union[ + fivetran_monthly_active_rows_paid_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total paid MAR used by this connector""" - fivetran_connector_monthly_active_rows_total_percentage_of_account: Union[ + fivetran_monthly_active_rows_total_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total MAR used by this connector""" - fivetran_connector_total_tables_synced: Union[int, None, UnsetType] = UNSET + fivetran_total_tables_synced: Union[int, None, UnsetType] = UNSET """Total number of tables synced by this connector""" fivetran_connector_top_tables_by_mar: Union[str, None, UnsetType] = msgspec.field( @@ -663,10 +577,10 @@ class FivetranConnectorAttributes(AssetAttributes): ) """Total five tables sorted by MAR synced by this connector""" - fivetran_connector_usage_cost: Union[float, None, UnsetType] = UNSET + fivetran_usage_cost: Union[float, None, UnsetType] = UNSET """Total usage cost by this destination""" - fivetran_connector_credits_used: Union[float, None, UnsetType] = UNSET + fivetran_credits_used: Union[float, None, UnsetType] = UNSET """Total credits used by this destination""" fivetran_workflow_name: Union[str, None, UnsetType] = UNSET @@ -734,6 +648,11 @@ class FivetranConnectorRelationshipAttributes(AssetRelationshipAttributes): processes: Union[List[RelatedProcess], None, UnsetType] = UNSET """Processes related to this Fivetran connector""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -823,6 +742,7 @@ class FivetranConnectorNested(AssetNested): "dq_base_dataset_rules", "dq_reference_dataset_rules", "processes", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -847,94 +767,72 @@ def _populate_fivetran_connector_attrs( ) -> None: """Populate FivetranConnector-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.fivetran_connector_last_sync_id = obj.fivetran_connector_last_sync_id - attrs.fivetran_connector_last_sync_started_at = ( - obj.fivetran_connector_last_sync_started_at - ) - attrs.fivetran_connector_last_sync_finished_at = ( - obj.fivetran_connector_last_sync_finished_at - ) - attrs.fivetran_connector_last_sync_reason = obj.fivetran_connector_last_sync_reason - attrs.fivetran_connector_last_sync_task_type = ( - obj.fivetran_connector_last_sync_task_type - ) - attrs.fivetran_connector_last_sync_rescheduled_at = ( - obj.fivetran_connector_last_sync_rescheduled_at - ) - attrs.fivetran_connector_last_sync_tables_synced = ( - obj.fivetran_connector_last_sync_tables_synced - ) - attrs.fivetran_connector_last_sync_extract_time_seconds = ( - obj.fivetran_connector_last_sync_extract_time_seconds - ) - attrs.fivetran_connector_last_sync_extract_volume_megabytes = ( - obj.fivetran_connector_last_sync_extract_volume_megabytes - ) - attrs.fivetran_connector_last_sync_load_time_seconds = ( - obj.fivetran_connector_last_sync_load_time_seconds - ) - attrs.fivetran_connector_last_sync_load_volume_megabytes = ( - obj.fivetran_connector_last_sync_load_volume_megabytes - ) - attrs.fivetran_connector_last_sync_process_time_seconds = ( - obj.fivetran_connector_last_sync_process_time_seconds - ) - attrs.fivetran_connector_last_sync_process_volume_megabytes = ( - obj.fivetran_connector_last_sync_process_volume_megabytes - ) - attrs.fivetran_connector_last_sync_total_time_seconds = ( - obj.fivetran_connector_last_sync_total_time_seconds - ) - attrs.fivetran_connector_name = obj.fivetran_connector_name - attrs.fivetran_connector_type = obj.fivetran_connector_type - attrs.fivetran_connector_url = obj.fivetran_connector_url - attrs.fivetran_connector_destination_name = obj.fivetran_connector_destination_name - attrs.fivetran_connector_destination_type = obj.fivetran_connector_destination_type - attrs.fivetran_connector_destination_url = obj.fivetran_connector_destination_url - attrs.fivetran_connector_sync_setup_on = obj.fivetran_connector_sync_setup_on - attrs.fivetran_connector_sync_frequency = obj.fivetran_connector_sync_frequency - attrs.fivetran_connector_sync_paused = obj.fivetran_connector_sync_paused - attrs.fivetran_connector_sync_setup_user_full_name = ( - obj.fivetran_connector_sync_setup_user_full_name - ) - attrs.fivetran_connector_sync_setup_user_email = ( - obj.fivetran_connector_sync_setup_user_email - ) - attrs.fivetran_connector_monthly_active_rows_free = ( - obj.fivetran_connector_monthly_active_rows_free - ) - attrs.fivetran_connector_monthly_active_rows_paid = ( - obj.fivetran_connector_monthly_active_rows_paid - ) - attrs.fivetran_connector_monthly_active_rows_total = ( - obj.fivetran_connector_monthly_active_rows_total - ) - attrs.fivetran_connector_monthly_active_rows_change_percentage_free = ( - obj.fivetran_connector_monthly_active_rows_change_percentage_free - ) - attrs.fivetran_connector_monthly_active_rows_change_percentage_paid = ( - obj.fivetran_connector_monthly_active_rows_change_percentage_paid - ) - attrs.fivetran_connector_monthly_active_rows_change_percentage_total = ( - obj.fivetran_connector_monthly_active_rows_change_percentage_total - ) - attrs.fivetran_connector_monthly_active_rows_free_percentage_of_account = ( - obj.fivetran_connector_monthly_active_rows_free_percentage_of_account - ) - attrs.fivetran_connector_monthly_active_rows_paid_percentage_of_account = ( - obj.fivetran_connector_monthly_active_rows_paid_percentage_of_account - ) - attrs.fivetran_connector_monthly_active_rows_total_percentage_of_account = ( - obj.fivetran_connector_monthly_active_rows_total_percentage_of_account - ) - attrs.fivetran_connector_total_tables_synced = ( - obj.fivetran_connector_total_tables_synced - ) + attrs.fivetran_last_sync_id = obj.fivetran_last_sync_id + attrs.fivetran_last_sync_started_at = obj.fivetran_last_sync_started_at + attrs.fivetran_last_sync_finished_at = obj.fivetran_last_sync_finished_at + attrs.fivetran_last_sync_reason = obj.fivetran_last_sync_reason + attrs.fivetran_last_sync_task_type = obj.fivetran_last_sync_task_type + attrs.fivetran_last_sync_rescheduled_at = obj.fivetran_last_sync_rescheduled_at + attrs.fivetran_last_sync_tables_synced = obj.fivetran_last_sync_tables_synced + attrs.fivetran_last_sync_extract_time_seconds = ( + obj.fivetran_last_sync_extract_time_seconds + ) + attrs.fivetran_last_sync_extract_volume_megabytes = ( + obj.fivetran_last_sync_extract_volume_megabytes + ) + attrs.fivetran_last_sync_load_time_seconds = ( + obj.fivetran_last_sync_load_time_seconds + ) + attrs.fivetran_last_sync_load_volume_megabytes = ( + obj.fivetran_last_sync_load_volume_megabytes + ) + attrs.fivetran_last_sync_process_time_seconds = ( + obj.fivetran_last_sync_process_time_seconds + ) + attrs.fivetran_last_sync_process_volume_megabytes = ( + obj.fivetran_last_sync_process_volume_megabytes + ) + attrs.fivetran_last_sync_total_time_seconds = ( + obj.fivetran_last_sync_total_time_seconds + ) + attrs.fivetran_name = obj.fivetran_name + attrs.fivetran_type = obj.fivetran_type + attrs.fivetran_url = obj.fivetran_url + attrs.fivetran_destination_name = obj.fivetran_destination_name + attrs.fivetran_destination_type = obj.fivetran_destination_type + attrs.fivetran_destination_url = obj.fivetran_destination_url + attrs.fivetran_sync_setup_on = obj.fivetran_sync_setup_on + attrs.fivetran_sync_frequency = obj.fivetran_sync_frequency + attrs.fivetran_sync_paused = obj.fivetran_sync_paused + attrs.fivetran_sync_setup_user_full_name = obj.fivetran_sync_setup_user_full_name + attrs.fivetran_sync_setup_user_email = obj.fivetran_sync_setup_user_email + attrs.fivetran_monthly_active_rows_free = obj.fivetran_monthly_active_rows_free + attrs.fivetran_monthly_active_rows_paid = obj.fivetran_monthly_active_rows_paid + attrs.fivetran_monthly_active_rows_total = obj.fivetran_monthly_active_rows_total + attrs.fivetran_monthly_active_rows_change_percentage_free = ( + obj.fivetran_monthly_active_rows_change_percentage_free + ) + attrs.fivetran_monthly_active_rows_change_percentage_paid = ( + obj.fivetran_monthly_active_rows_change_percentage_paid + ) + attrs.fivetran_monthly_active_rows_change_percentage_total = ( + obj.fivetran_monthly_active_rows_change_percentage_total + ) + attrs.fivetran_monthly_active_rows_free_percentage_of_account = ( + obj.fivetran_monthly_active_rows_free_percentage_of_account + ) + attrs.fivetran_monthly_active_rows_paid_percentage_of_account = ( + obj.fivetran_monthly_active_rows_paid_percentage_of_account + ) + attrs.fivetran_monthly_active_rows_total_percentage_of_account = ( + obj.fivetran_monthly_active_rows_total_percentage_of_account + ) + attrs.fivetran_total_tables_synced = obj.fivetran_total_tables_synced attrs.fivetran_connector_top_tables_by_mar = ( obj.fivetran_connector_top_tables_by_mar ) - attrs.fivetran_connector_usage_cost = obj.fivetran_connector_usage_cost - attrs.fivetran_connector_credits_used = obj.fivetran_connector_credits_used + attrs.fivetran_usage_cost = obj.fivetran_usage_cost + attrs.fivetran_credits_used = obj.fivetran_credits_used attrs.fivetran_workflow_name = obj.fivetran_workflow_name attrs.fivetran_last_sync_status = obj.fivetran_last_sync_status attrs.fivetran_last_sync_records_updated = obj.fivetran_last_sync_records_updated @@ -944,104 +842,82 @@ def _populate_fivetran_connector_attrs( def _extract_fivetran_connector_attrs(attrs: FivetranConnectorAttributes) -> dict: """Extract all FivetranConnector attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["fivetran_connector_last_sync_id"] = attrs.fivetran_connector_last_sync_id - result["fivetran_connector_last_sync_started_at"] = ( - attrs.fivetran_connector_last_sync_started_at - ) - result["fivetran_connector_last_sync_finished_at"] = ( - attrs.fivetran_connector_last_sync_finished_at - ) - result["fivetran_connector_last_sync_reason"] = ( - attrs.fivetran_connector_last_sync_reason - ) - result["fivetran_connector_last_sync_task_type"] = ( - attrs.fivetran_connector_last_sync_task_type - ) - result["fivetran_connector_last_sync_rescheduled_at"] = ( - attrs.fivetran_connector_last_sync_rescheduled_at - ) - result["fivetran_connector_last_sync_tables_synced"] = ( - attrs.fivetran_connector_last_sync_tables_synced - ) - result["fivetran_connector_last_sync_extract_time_seconds"] = ( - attrs.fivetran_connector_last_sync_extract_time_seconds - ) - result["fivetran_connector_last_sync_extract_volume_megabytes"] = ( - attrs.fivetran_connector_last_sync_extract_volume_megabytes - ) - result["fivetran_connector_last_sync_load_time_seconds"] = ( - attrs.fivetran_connector_last_sync_load_time_seconds - ) - result["fivetran_connector_last_sync_load_volume_megabytes"] = ( - attrs.fivetran_connector_last_sync_load_volume_megabytes + result["fivetran_last_sync_id"] = attrs.fivetran_last_sync_id + result["fivetran_last_sync_started_at"] = attrs.fivetran_last_sync_started_at + result["fivetran_last_sync_finished_at"] = attrs.fivetran_last_sync_finished_at + result["fivetran_last_sync_reason"] = attrs.fivetran_last_sync_reason + result["fivetran_last_sync_task_type"] = attrs.fivetran_last_sync_task_type + result["fivetran_last_sync_rescheduled_at"] = ( + attrs.fivetran_last_sync_rescheduled_at ) - result["fivetran_connector_last_sync_process_time_seconds"] = ( - attrs.fivetran_connector_last_sync_process_time_seconds + result["fivetran_last_sync_tables_synced"] = attrs.fivetran_last_sync_tables_synced + result["fivetran_last_sync_extract_time_seconds"] = ( + attrs.fivetran_last_sync_extract_time_seconds ) - result["fivetran_connector_last_sync_process_volume_megabytes"] = ( - attrs.fivetran_connector_last_sync_process_volume_megabytes + result["fivetran_last_sync_extract_volume_megabytes"] = ( + attrs.fivetran_last_sync_extract_volume_megabytes ) - result["fivetran_connector_last_sync_total_time_seconds"] = ( - attrs.fivetran_connector_last_sync_total_time_seconds + result["fivetran_last_sync_load_time_seconds"] = ( + attrs.fivetran_last_sync_load_time_seconds ) - result["fivetran_connector_name"] = attrs.fivetran_connector_name - result["fivetran_connector_type"] = attrs.fivetran_connector_type - result["fivetran_connector_url"] = attrs.fivetran_connector_url - result["fivetran_connector_destination_name"] = ( - attrs.fivetran_connector_destination_name + result["fivetran_last_sync_load_volume_megabytes"] = ( + attrs.fivetran_last_sync_load_volume_megabytes ) - result["fivetran_connector_destination_type"] = ( - attrs.fivetran_connector_destination_type + result["fivetran_last_sync_process_time_seconds"] = ( + attrs.fivetran_last_sync_process_time_seconds ) - result["fivetran_connector_destination_url"] = ( - attrs.fivetran_connector_destination_url + result["fivetran_last_sync_process_volume_megabytes"] = ( + attrs.fivetran_last_sync_process_volume_megabytes ) - result["fivetran_connector_sync_setup_on"] = attrs.fivetran_connector_sync_setup_on - result["fivetran_connector_sync_frequency"] = ( - attrs.fivetran_connector_sync_frequency + result["fivetran_last_sync_total_time_seconds"] = ( + attrs.fivetran_last_sync_total_time_seconds ) - result["fivetran_connector_sync_paused"] = attrs.fivetran_connector_sync_paused - result["fivetran_connector_sync_setup_user_full_name"] = ( - attrs.fivetran_connector_sync_setup_user_full_name + result["fivetran_name"] = attrs.fivetran_name + result["fivetran_type"] = attrs.fivetran_type + result["fivetran_url"] = attrs.fivetran_url + result["fivetran_destination_name"] = attrs.fivetran_destination_name + result["fivetran_destination_type"] = attrs.fivetran_destination_type + result["fivetran_destination_url"] = attrs.fivetran_destination_url + result["fivetran_sync_setup_on"] = attrs.fivetran_sync_setup_on + result["fivetran_sync_frequency"] = attrs.fivetran_sync_frequency + result["fivetran_sync_paused"] = attrs.fivetran_sync_paused + result["fivetran_sync_setup_user_full_name"] = ( + attrs.fivetran_sync_setup_user_full_name ) - result["fivetran_connector_sync_setup_user_email"] = ( - attrs.fivetran_connector_sync_setup_user_email + result["fivetran_sync_setup_user_email"] = attrs.fivetran_sync_setup_user_email + result["fivetran_monthly_active_rows_free"] = ( + attrs.fivetran_monthly_active_rows_free ) - result["fivetran_connector_monthly_active_rows_free"] = ( - attrs.fivetran_connector_monthly_active_rows_free + result["fivetran_monthly_active_rows_paid"] = ( + attrs.fivetran_monthly_active_rows_paid ) - result["fivetran_connector_monthly_active_rows_paid"] = ( - attrs.fivetran_connector_monthly_active_rows_paid + result["fivetran_monthly_active_rows_total"] = ( + attrs.fivetran_monthly_active_rows_total ) - result["fivetran_connector_monthly_active_rows_total"] = ( - attrs.fivetran_connector_monthly_active_rows_total + result["fivetran_monthly_active_rows_change_percentage_free"] = ( + attrs.fivetran_monthly_active_rows_change_percentage_free ) - result["fivetran_connector_monthly_active_rows_change_percentage_free"] = ( - attrs.fivetran_connector_monthly_active_rows_change_percentage_free + result["fivetran_monthly_active_rows_change_percentage_paid"] = ( + attrs.fivetran_monthly_active_rows_change_percentage_paid ) - result["fivetran_connector_monthly_active_rows_change_percentage_paid"] = ( - attrs.fivetran_connector_monthly_active_rows_change_percentage_paid + result["fivetran_monthly_active_rows_change_percentage_total"] = ( + attrs.fivetran_monthly_active_rows_change_percentage_total ) - result["fivetran_connector_monthly_active_rows_change_percentage_total"] = ( - attrs.fivetran_connector_monthly_active_rows_change_percentage_total + result["fivetran_monthly_active_rows_free_percentage_of_account"] = ( + attrs.fivetran_monthly_active_rows_free_percentage_of_account ) - result["fivetran_connector_monthly_active_rows_free_percentage_of_account"] = ( - attrs.fivetran_connector_monthly_active_rows_free_percentage_of_account + result["fivetran_monthly_active_rows_paid_percentage_of_account"] = ( + attrs.fivetran_monthly_active_rows_paid_percentage_of_account ) - result["fivetran_connector_monthly_active_rows_paid_percentage_of_account"] = ( - attrs.fivetran_connector_monthly_active_rows_paid_percentage_of_account - ) - result["fivetran_connector_monthly_active_rows_total_percentage_of_account"] = ( - attrs.fivetran_connector_monthly_active_rows_total_percentage_of_account - ) - result["fivetran_connector_total_tables_synced"] = ( - attrs.fivetran_connector_total_tables_synced + result["fivetran_monthly_active_rows_total_percentage_of_account"] = ( + attrs.fivetran_monthly_active_rows_total_percentage_of_account ) + result["fivetran_total_tables_synced"] = attrs.fivetran_total_tables_synced result["fivetran_connector_top_tables_by_mar"] = ( attrs.fivetran_connector_top_tables_by_mar ) - result["fivetran_connector_usage_cost"] = attrs.fivetran_connector_usage_cost - result["fivetran_connector_credits_used"] = attrs.fivetran_connector_credits_used + result["fivetran_usage_cost"] = attrs.fivetran_usage_cost + result["fivetran_credits_used"] = attrs.fivetran_credits_used result["fivetran_workflow_name"] = attrs.fivetran_workflow_name result["fivetran_last_sync_status"] = attrs.fivetran_last_sync_status result["fivetran_last_sync_records_updated"] = ( @@ -1088,9 +964,6 @@ def _fivetran_connector_to_nested( is_incomplete=fivetran_connector.is_incomplete, provenance_type=fivetran_connector.provenance_type, home_id=fivetran_connector.home_id, - depth=fivetran_connector.depth, - immediate_upstream=fivetran_connector.immediate_upstream, - immediate_downstream=fivetran_connector.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1126,6 +999,7 @@ def _fivetran_connector_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1134,9 +1008,6 @@ def _fivetran_connector_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_fivetran_connector_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1169,146 +1040,125 @@ def _fivetran_connector_from_nested_bytes( RelationField, ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_ID = KeywordField( - "fivetranConnectorLastSyncId", "fivetranConnectorLastSyncId" -) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_STARTED_AT = NumericField( - "fivetranConnectorLastSyncStartedAt", "fivetranConnectorLastSyncStartedAt" +FivetranConnector.FIVETRAN_LAST_SYNC_ID = KeywordField( + "fivetranLastSyncId", "fivetranLastSyncId" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_FINISHED_AT = NumericField( - "fivetranConnectorLastSyncFinishedAt", "fivetranConnectorLastSyncFinishedAt" +FivetranConnector.FIVETRAN_LAST_SYNC_STARTED_AT = NumericField( + "fivetranLastSyncStartedAt", "fivetranLastSyncStartedAt" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_REASON = KeywordTextField( - "fivetranConnectorLastSyncReason", - "fivetranConnectorLastSyncReason", - "fivetranConnectorLastSyncReason.text", +FivetranConnector.FIVETRAN_LAST_SYNC_FINISHED_AT = NumericField( + "fivetranLastSyncFinishedAt", "fivetranLastSyncFinishedAt" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_TASK_TYPE = KeywordField( - "fivetranConnectorLastSyncTaskType", "fivetranConnectorLastSyncTaskType" +FivetranConnector.FIVETRAN_LAST_SYNC_REASON = KeywordTextField( + "fivetranLastSyncReason", "fivetranLastSyncReason", "fivetranLastSyncReason.text" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_RESCHEDULED_AT = NumericField( - "fivetranConnectorLastSyncRescheduledAt", "fivetranConnectorLastSyncRescheduledAt" +FivetranConnector.FIVETRAN_LAST_SYNC_TASK_TYPE = KeywordField( + "fivetranLastSyncTaskType", "fivetranLastSyncTaskType" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_TABLES_SYNCED = NumericField( - "fivetranConnectorLastSyncTablesSynced", "fivetranConnectorLastSyncTablesSynced" +FivetranConnector.FIVETRAN_LAST_SYNC_RESCHEDULED_AT = NumericField( + "fivetranLastSyncRescheduledAt", "fivetranLastSyncRescheduledAt" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_EXTRACT_TIME_SECONDS = NumericField( - "fivetranConnectorLastSyncExtractTimeSeconds", - "fivetranConnectorLastSyncExtractTimeSeconds", +FivetranConnector.FIVETRAN_LAST_SYNC_TABLES_SYNCED = NumericField( + "fivetranLastSyncTablesSynced", "fivetranLastSyncTablesSynced" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_EXTRACT_VOLUME_MEGABYTES = NumericField( - "fivetranConnectorLastSyncExtractVolumeMegabytes", - "fivetranConnectorLastSyncExtractVolumeMegabytes", +FivetranConnector.FIVETRAN_LAST_SYNC_EXTRACT_TIME_SECONDS = NumericField( + "fivetranLastSyncExtractTimeSeconds", "fivetranLastSyncExtractTimeSeconds" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_LOAD_TIME_SECONDS = NumericField( - "fivetranConnectorLastSyncLoadTimeSeconds", - "fivetranConnectorLastSyncLoadTimeSeconds", +FivetranConnector.FIVETRAN_LAST_SYNC_EXTRACT_VOLUME_MEGABYTES = NumericField( + "fivetranLastSyncExtractVolumeMegabytes", "fivetranLastSyncExtractVolumeMegabytes" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_LOAD_VOLUME_MEGABYTES = NumericField( - "fivetranConnectorLastSyncLoadVolumeMegabytes", - "fivetranConnectorLastSyncLoadVolumeMegabytes", +FivetranConnector.FIVETRAN_LAST_SYNC_LOAD_TIME_SECONDS = NumericField( + "fivetranLastSyncLoadTimeSeconds", "fivetranLastSyncLoadTimeSeconds" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_PROCESS_TIME_SECONDS = NumericField( - "fivetranConnectorLastSyncProcessTimeSeconds", - "fivetranConnectorLastSyncProcessTimeSeconds", +FivetranConnector.FIVETRAN_LAST_SYNC_LOAD_VOLUME_MEGABYTES = NumericField( + "fivetranLastSyncLoadVolumeMegabytes", "fivetranLastSyncLoadVolumeMegabytes" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_PROCESS_VOLUME_MEGABYTES = NumericField( - "fivetranConnectorLastSyncProcessVolumeMegabytes", - "fivetranConnectorLastSyncProcessVolumeMegabytes", +FivetranConnector.FIVETRAN_LAST_SYNC_PROCESS_TIME_SECONDS = NumericField( + "fivetranLastSyncProcessTimeSeconds", "fivetranLastSyncProcessTimeSeconds" ) -FivetranConnector.FIVETRAN_CONNECTOR_LAST_SYNC_TOTAL_TIME_SECONDS = NumericField( - "fivetranConnectorLastSyncTotalTimeSeconds", - "fivetranConnectorLastSyncTotalTimeSeconds", +FivetranConnector.FIVETRAN_LAST_SYNC_PROCESS_VOLUME_MEGABYTES = NumericField( + "fivetranLastSyncProcessVolumeMegabytes", "fivetranLastSyncProcessVolumeMegabytes" ) -FivetranConnector.FIVETRAN_CONNECTOR_NAME = KeywordField( - "fivetranConnectorName", "fivetranConnectorName" +FivetranConnector.FIVETRAN_LAST_SYNC_TOTAL_TIME_SECONDS = NumericField( + "fivetranLastSyncTotalTimeSeconds", "fivetranLastSyncTotalTimeSeconds" ) -FivetranConnector.FIVETRAN_CONNECTOR_TYPE = KeywordField( - "fivetranConnectorType", "fivetranConnectorType" +FivetranConnector.FIVETRAN_NAME = KeywordField("fivetranName", "fivetranName") +FivetranConnector.FIVETRAN_TYPE = KeywordField("fivetranType", "fivetranType") +FivetranConnector.FIVETRAN_URL = KeywordField("fivetranURL", "fivetranURL") +FivetranConnector.FIVETRAN_DESTINATION_NAME = KeywordField( + "fivetranDestinationName", "fivetranDestinationName" ) -FivetranConnector.FIVETRAN_CONNECTOR_URL = KeywordField( - "fivetranConnectorURL", "fivetranConnectorURL" +FivetranConnector.FIVETRAN_DESTINATION_TYPE = KeywordField( + "fivetranDestinationType", "fivetranDestinationType" ) -FivetranConnector.FIVETRAN_CONNECTOR_DESTINATION_NAME = KeywordField( - "fivetranConnectorDestinationName", "fivetranConnectorDestinationName" +FivetranConnector.FIVETRAN_DESTINATION_URL = KeywordField( + "fivetranDestinationURL", "fivetranDestinationURL" ) -FivetranConnector.FIVETRAN_CONNECTOR_DESTINATION_TYPE = KeywordField( - "fivetranConnectorDestinationType", "fivetranConnectorDestinationType" +FivetranConnector.FIVETRAN_SYNC_SETUP_ON = NumericField( + "fivetranSyncSetupOn", "fivetranSyncSetupOn" ) -FivetranConnector.FIVETRAN_CONNECTOR_DESTINATION_URL = KeywordField( - "fivetranConnectorDestinationURL", "fivetranConnectorDestinationURL" +FivetranConnector.FIVETRAN_SYNC_FREQUENCY = KeywordField( + "fivetranSyncFrequency", "fivetranSyncFrequency" ) -FivetranConnector.FIVETRAN_CONNECTOR_SYNC_SETUP_ON = NumericField( - "fivetranConnectorSyncSetupOn", "fivetranConnectorSyncSetupOn" +FivetranConnector.FIVETRAN_SYNC_PAUSED = BooleanField( + "fivetranSyncPaused", "fivetranSyncPaused" ) -FivetranConnector.FIVETRAN_CONNECTOR_SYNC_FREQUENCY = KeywordField( - "fivetranConnectorSyncFrequency", "fivetranConnectorSyncFrequency" +FivetranConnector.FIVETRAN_SYNC_SETUP_USER_FULL_NAME = KeywordField( + "fivetranSyncSetupUserFullName", "fivetranSyncSetupUserFullName" ) -FivetranConnector.FIVETRAN_CONNECTOR_SYNC_PAUSED = BooleanField( - "fivetranConnectorSyncPaused", "fivetranConnectorSyncPaused" +FivetranConnector.FIVETRAN_SYNC_SETUP_USER_EMAIL = KeywordField( + "fivetranSyncSetupUserEmail", "fivetranSyncSetupUserEmail" ) -FivetranConnector.FIVETRAN_CONNECTOR_SYNC_SETUP_USER_FULL_NAME = KeywordField( - "fivetranConnectorSyncSetupUserFullName", "fivetranConnectorSyncSetupUserFullName" +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_FREE = NumericField( + "fivetranMonthlyActiveRowsFree", "fivetranMonthlyActiveRowsFree" ) -FivetranConnector.FIVETRAN_CONNECTOR_SYNC_SETUP_USER_EMAIL = KeywordField( - "fivetranConnectorSyncSetupUserEmail", "fivetranConnectorSyncSetupUserEmail" +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_PAID = NumericField( + "fivetranMonthlyActiveRowsPaid", "fivetranMonthlyActiveRowsPaid" ) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_FREE = NumericField( - "fivetranConnectorMonthlyActiveRowsFree", "fivetranConnectorMonthlyActiveRowsFree" +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_TOTAL = NumericField( + "fivetranMonthlyActiveRowsTotal", "fivetranMonthlyActiveRowsTotal" ) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_PAID = NumericField( - "fivetranConnectorMonthlyActiveRowsPaid", "fivetranConnectorMonthlyActiveRowsPaid" -) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_TOTAL = NumericField( - "fivetranConnectorMonthlyActiveRowsTotal", "fivetranConnectorMonthlyActiveRowsTotal" -) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_FREE = ( - NumericField( - "fivetranConnectorMonthlyActiveRowsChangePercentageFree", - "fivetranConnectorMonthlyActiveRowsChangePercentageFree", - ) +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_FREE = NumericField( + "fivetranMonthlyActiveRowsChangePercentageFree", + "fivetranMonthlyActiveRowsChangePercentageFree", ) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_PAID = ( - NumericField( - "fivetranConnectorMonthlyActiveRowsChangePercentagePaid", - "fivetranConnectorMonthlyActiveRowsChangePercentagePaid", - ) +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_PAID = NumericField( + "fivetranMonthlyActiveRowsChangePercentagePaid", + "fivetranMonthlyActiveRowsChangePercentagePaid", ) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_TOTAL = ( - NumericField( - "fivetranConnectorMonthlyActiveRowsChangePercentageTotal", - "fivetranConnectorMonthlyActiveRowsChangePercentageTotal", - ) +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_CHANGE_PERCENTAGE_TOTAL = NumericField( + "fivetranMonthlyActiveRowsChangePercentageTotal", + "fivetranMonthlyActiveRowsChangePercentageTotal", ) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_FREE_PERCENTAGE_OF_ACCOUNT = ( +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_FREE_PERCENTAGE_OF_ACCOUNT = ( NumericField( - "fivetranConnectorMonthlyActiveRowsFreePercentageOfAccount", - "fivetranConnectorMonthlyActiveRowsFreePercentageOfAccount", + "fivetranMonthlyActiveRowsFreePercentageOfAccount", + "fivetranMonthlyActiveRowsFreePercentageOfAccount", ) ) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_PAID_PERCENTAGE_OF_ACCOUNT = ( +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_PAID_PERCENTAGE_OF_ACCOUNT = ( NumericField( - "fivetranConnectorMonthlyActiveRowsPaidPercentageOfAccount", - "fivetranConnectorMonthlyActiveRowsPaidPercentageOfAccount", + "fivetranMonthlyActiveRowsPaidPercentageOfAccount", + "fivetranMonthlyActiveRowsPaidPercentageOfAccount", ) ) -FivetranConnector.FIVETRAN_CONNECTOR_MONTHLY_ACTIVE_ROWS_TOTAL_PERCENTAGE_OF_ACCOUNT = ( +FivetranConnector.FIVETRAN_MONTHLY_ACTIVE_ROWS_TOTAL_PERCENTAGE_OF_ACCOUNT = ( NumericField( - "fivetranConnectorMonthlyActiveRowsTotalPercentageOfAccount", - "fivetranConnectorMonthlyActiveRowsTotalPercentageOfAccount", + "fivetranMonthlyActiveRowsTotalPercentageOfAccount", + "fivetranMonthlyActiveRowsTotalPercentageOfAccount", ) ) -FivetranConnector.FIVETRAN_CONNECTOR_TOTAL_TABLES_SYNCED = NumericField( - "fivetranConnectorTotalTablesSynced", "fivetranConnectorTotalTablesSynced" +FivetranConnector.FIVETRAN_TOTAL_TABLES_SYNCED = NumericField( + "fivetranTotalTablesSynced", "fivetranTotalTablesSynced" ) FivetranConnector.FIVETRAN_CONNECTOR_TOP_TABLES_BY_MAR = KeywordField( "fivetranConnectorTopTablesByMAR", "fivetranConnectorTopTablesByMAR" ) -FivetranConnector.FIVETRAN_CONNECTOR_USAGE_COST = NumericField( - "fivetranConnectorUsageCost", "fivetranConnectorUsageCost" +FivetranConnector.FIVETRAN_USAGE_COST = NumericField( + "fivetranUsageCost", "fivetranUsageCost" ) -FivetranConnector.FIVETRAN_CONNECTOR_CREDITS_USED = NumericField( - "fivetranConnectorCreditsUsed", "fivetranConnectorCreditsUsed" +FivetranConnector.FIVETRAN_CREDITS_USED = NumericField( + "fivetranCreditsUsed", "fivetranCreditsUsed" ) FivetranConnector.FIVETRAN_WORKFLOW_NAME = KeywordField( "fivetranWorkflowName", "fivetranWorkflowName" @@ -1341,6 +1191,9 @@ def _fivetran_connector_from_nested_bytes( FivetranConnector.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") FivetranConnector.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") FivetranConnector.PROCESSES = RelationField("processes") +FivetranConnector.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FivetranConnector.MEANINGS = RelationField("meanings") FivetranConnector.MC_MONITORS = RelationField("mcMonitors") FivetranConnector.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/fivetran_related.py b/pyatlan_v9/model/assets/fivetran_related.py index 3ee933554..16f104819 100644 --- a/pyatlan_v9/model/assets/fivetran_related.py +++ b/pyatlan_v9/model/assets/fivetran_related.py @@ -59,139 +59,125 @@ class RelatedFivetranConnector(RelatedFivetran): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "FivetranConnector" so it serializes correctly - fivetran_connector_last_sync_id: Union[str, None, UnsetType] = UNSET + fivetran_last_sync_id: Union[str, None, UnsetType] = UNSET """ID of the latest sync""" - fivetran_connector_last_sync_started_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_started_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) when the latest sync started on Fivetran, in milliseconds""" - fivetran_connector_last_sync_finished_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_finished_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) when the latest sync finished on Fivetran, in milliseconds""" - fivetran_connector_last_sync_reason: Union[str, None, UnsetType] = UNSET + fivetran_last_sync_reason: Union[str, None, UnsetType] = UNSET """Failure reason for the latest sync on Fivetran. If status is FAILURE, this is the description of the reason why the sync failed. If status is FAILURE_WITH_TASK, this is the description of the Error. If status is RESCHEDULED, this is the description of the reason why the sync is rescheduled.""" - fivetran_connector_last_sync_task_type: Union[str, None, UnsetType] = UNSET + fivetran_last_sync_task_type: Union[str, None, UnsetType] = UNSET """Failure task type for the latest sync on Fivetran. If status is FAILURE_WITH_TASK or RESCHEDULED, this field displays the type of the Error that caused the failure or rescheduling, respectively, e.g., reconnect, update_service_account, etc.""" - fivetran_connector_last_sync_rescheduled_at: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_rescheduled_at: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) at which the latest sync is rescheduled at on Fivetran""" - fivetran_connector_last_sync_tables_synced: Union[int, None, UnsetType] = UNSET + fivetran_last_sync_tables_synced: Union[int, None, UnsetType] = UNSET """Number of tables synced in the latest sync on Fivetran""" - fivetran_connector_last_sync_extract_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_extract_time_seconds: Union[float, None, UnsetType] = UNSET """Extract time in seconds in the latest sync on fivetran""" - fivetran_connector_last_sync_extract_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_extract_volume_megabytes: Union[float, None, UnsetType] = UNSET """Extracted data volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_load_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_load_time_seconds: Union[float, None, UnsetType] = UNSET """Load time in seconds in the latest sync on Fivetran""" - fivetran_connector_last_sync_load_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_load_volume_megabytes: Union[float, None, UnsetType] = UNSET """Loaded data volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_process_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_process_time_seconds: Union[float, None, UnsetType] = UNSET """Process time in seconds in the latest sync on Fivetran""" - fivetran_connector_last_sync_process_volume_megabytes: Union[ - float, None, UnsetType - ] = UNSET + fivetran_last_sync_process_volume_megabytes: Union[float, None, UnsetType] = UNSET """Process volume in metabytes in the latest sync on Fivetran""" - fivetran_connector_last_sync_total_time_seconds: Union[float, None, UnsetType] = ( - UNSET - ) + fivetran_last_sync_total_time_seconds: Union[float, None, UnsetType] = UNSET """Total sync time in seconds in the latest sync on Fivetran""" - fivetran_connector_name: Union[str, None, UnsetType] = UNSET + fivetran_name: Union[str, None, UnsetType] = UNSET """Connector name added by the user on Fivetran""" - fivetran_connector_type: Union[str, None, UnsetType] = UNSET + fivetran_type: Union[str, None, UnsetType] = UNSET """Type of connector on Fivetran. Eg: snowflake, google_analytics, notion etc.""" - fivetran_connector_url: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="fivetranConnectorURL" + fivetran_url: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="fivetranURL" ) """URL to open the connector details on Fivetran""" - fivetran_connector_destination_name: Union[str, None, UnsetType] = UNSET + fivetran_destination_name: Union[str, None, UnsetType] = UNSET """Destination name added by the user on Fivetran""" - fivetran_connector_destination_type: Union[str, None, UnsetType] = UNSET + fivetran_destination_type: Union[str, None, UnsetType] = UNSET """Type of destination on Fivetran. Eg: redshift, bigquery etc.""" - fivetran_connector_destination_url: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="fivetranConnectorDestinationURL" + fivetran_destination_url: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="fivetranDestinationURL" ) """URL to open the destination details on Fivetran""" - fivetran_connector_sync_setup_on: Union[int, None, UnsetType] = UNSET + fivetran_sync_setup_on: Union[int, None, UnsetType] = UNSET """Timestamp (epoch) on which the connector was setup on Fivetran, in milliseconds""" - fivetran_connector_sync_frequency: Union[str, None, UnsetType] = UNSET + fivetran_sync_frequency: Union[str, None, UnsetType] = UNSET """Sync frequency for the connector in number of hours. Eg: Every 6 hours""" - fivetran_connector_sync_paused: Union[bool, None, UnsetType] = UNSET + fivetran_sync_paused: Union[bool, None, UnsetType] = UNSET """Boolean to indicate whether the sync for this connector is paused or not""" - fivetran_connector_sync_setup_user_full_name: Union[str, None, UnsetType] = UNSET + fivetran_sync_setup_user_full_name: Union[str, None, UnsetType] = UNSET """Full name of the user who setup the connector on Fivetran""" - fivetran_connector_sync_setup_user_email: Union[str, None, UnsetType] = UNSET + fivetran_sync_setup_user_email: Union[str, None, UnsetType] = UNSET """Email ID of the user who setpu the connector on Fivetran""" - fivetran_connector_monthly_active_rows_free: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_free: Union[int, None, UnsetType] = UNSET """Free Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_paid: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_paid: Union[int, None, UnsetType] = UNSET """Paid Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_total: Union[int, None, UnsetType] = UNSET + fivetran_monthly_active_rows_total: Union[int, None, UnsetType] = UNSET """Total Monthly Active Rows used by the connector in the past month""" - fivetran_connector_monthly_active_rows_change_percentage_free: Union[ + fivetran_monthly_active_rows_change_percentage_free: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of free MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_change_percentage_paid: Union[ + fivetran_monthly_active_rows_change_percentage_paid: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of paid MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_change_percentage_total: Union[ + fivetran_monthly_active_rows_change_percentage_total: Union[ float, None, UnsetType ] = UNSET """Increase in the percentage of total MAR compared to the previous month""" - fivetran_connector_monthly_active_rows_free_percentage_of_account: Union[ + fivetran_monthly_active_rows_free_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total free MAR used by this connector""" - fivetran_connector_monthly_active_rows_paid_percentage_of_account: Union[ + fivetran_monthly_active_rows_paid_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total paid MAR used by this connector""" - fivetran_connector_monthly_active_rows_total_percentage_of_account: Union[ + fivetran_monthly_active_rows_total_percentage_of_account: Union[ float, None, UnsetType ] = UNSET """Percentage of the account's total MAR used by this connector""" - fivetran_connector_total_tables_synced: Union[int, None, UnsetType] = UNSET + fivetran_total_tables_synced: Union[int, None, UnsetType] = UNSET """Total number of tables synced by this connector""" fivetran_connector_top_tables_by_mar: Union[str, None, UnsetType] = msgspec.field( @@ -199,10 +185,10 @@ class RelatedFivetranConnector(RelatedFivetran): ) """Total five tables sorted by MAR synced by this connector""" - fivetran_connector_usage_cost: Union[float, None, UnsetType] = UNSET + fivetran_usage_cost: Union[float, None, UnsetType] = UNSET """Total usage cost by this destination""" - fivetran_connector_credits_used: Union[float, None, UnsetType] = UNSET + fivetran_credits_used: Union[float, None, UnsetType] = UNSET """Total credits used by this destination""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/flow.py b/pyatlan_v9/model/assets/flow.py index d3d3cc9ce..63e0be7bc 100644 --- a/pyatlan_v9/model/assets/flow.py +++ b/pyatlan_v9/model/assets/flow.py @@ -39,7 +39,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .flow_related import RelatedFlow +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable @@ -82,6 +82,7 @@ class Flow(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -93,6 +94,8 @@ class Flow(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Flow" + flow_started_at: Union[int, None, UnsetType] = UNSET """Date and time at which this point in the data processing or orchestration started.""" @@ -167,6 +170,11 @@ class Flow(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -204,66 +212,6 @@ class Flow(Asset): def __post_init__(self) -> None: self.type_name = "Flow" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Flow instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Flow validation failed: {errors}") - - def minimize(self) -> "Flow": - """ - Return a minimal copy of this Flow with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Flow with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Flow instance with only the minimum required fields. - """ - self.validate() - return Flow(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlow": - """ - Create a :class:`RelatedFlow` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlow reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlow(guid=self.guid) - return RelatedFlow(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -397,6 +345,11 @@ class FlowRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -457,6 +410,7 @@ class FlowNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -544,9 +498,6 @@ def _flow_to_nested(flow: Flow) -> FlowNested: is_incomplete=flow.is_incomplete, provenance_type=flow.provenance_type, home_id=flow.home_id, - depth=flow.depth, - immediate_upstream=flow.immediate_upstream, - immediate_downstream=flow.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -576,6 +527,7 @@ def _flow_from_nested(nested: FlowNested) -> Flow: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -584,9 +536,6 @@ def _flow_from_nested(nested: FlowNested) -> Flow: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -650,6 +599,9 @@ def _flow_from_nested_bytes(data: bytes, serde: Serde) -> Flow: Flow.METRICS = RelationField("metrics") Flow.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Flow.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Flow.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Flow.MEANINGS = RelationField("meanings") Flow.MC_MONITORS = RelationField("mcMonitors") Flow.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/flow_control_operation.py b/pyatlan_v9/model/assets/flow_control_operation.py index 46e96492f..1860d7747 100644 --- a/pyatlan_v9/model/assets/flow_control_operation.py +++ b/pyatlan_v9/model/assets/flow_control_operation.py @@ -41,6 +41,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .flow_related import RelatedFlowControlOperation +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .process_related import RelatedProcess @@ -89,6 +90,7 @@ class FlowControlOperation(Asset): FLOW_SUCCESSORS: ClassVar[Any] = None FLOW_CONTROLLED_OPERATIONS: ClassVar[Any] = None FLOW_CONTROLLED_BY: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -100,6 +102,8 @@ class FlowControlOperation(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FlowControlOperation" + flow_started_at: Union[int, None, UnsetType] = UNSET """Date and time at which this point in the data processing or orchestration started.""" @@ -191,6 +195,11 @@ class FlowControlOperation(Asset): flow_controlled_by: Union[RelatedFlowControlOperation, None, UnsetType] = UNSET """Control operation that controls the execution of this control operation.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -234,70 +243,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FlowControlOperation instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"FlowControlOperation validation failed: {errors}") - - def minimize(self) -> "FlowControlOperation": - """ - Return a minimal copy of this FlowControlOperation with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FlowControlOperation with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FlowControlOperation instance with only the minimum required fields. - """ - self.validate() - return FlowControlOperation(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlowControlOperation": - """ - Create a :class:`RelatedFlowControlOperation` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlowControlOperation reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlowControlOperation(guid=self.guid) - return RelatedFlowControlOperation(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -450,6 +395,11 @@ class FlowControlOperationRelationshipAttributes(AssetRelationshipAttributes): flow_controlled_by: Union[RelatedFlowControlOperation, None, UnsetType] = UNSET """Control operation that controls the execution of this control operation.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -521,6 +471,7 @@ class FlowControlOperationNested(AssetNested): "flow_successors", "flow_controlled_operations", "flow_controlled_by", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -616,9 +567,6 @@ def _flow_control_operation_to_nested( is_incomplete=flow_control_operation.is_incomplete, provenance_type=flow_control_operation.provenance_type, home_id=flow_control_operation.home_id, - depth=flow_control_operation.depth, - immediate_upstream=flow_control_operation.immediate_upstream, - immediate_downstream=flow_control_operation.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -654,6 +602,7 @@ def _flow_control_operation_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -662,9 +611,6 @@ def _flow_control_operation_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_control_operation_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -747,6 +693,9 @@ def _flow_control_operation_from_nested_bytes( "flowControlledOperations" ) FlowControlOperation.FLOW_CONTROLLED_BY = RelationField("flowControlledBy") +FlowControlOperation.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FlowControlOperation.MEANINGS = RelationField("meanings") FlowControlOperation.MC_MONITORS = RelationField("mcMonitors") FlowControlOperation.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/flow_dataset.py b/pyatlan_v9/model/assets/flow_dataset.py index 03da324e7..469877bd4 100644 --- a/pyatlan_v9/model/assets/flow_dataset.py +++ b/pyatlan_v9/model/assets/flow_dataset.py @@ -41,7 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .flow_related import RelatedFlowDataset, RelatedFlowField, RelatedFlowReusableUnit +from .flow_related import RelatedFlowField, RelatedFlowReusableUnit +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -100,6 +101,7 @@ class FlowDataset(Asset): FLOW_DETAILED_BY: ClassVar[Any] = None FLOW_PARENT_UNIT: ClassVar[Any] = None FLOW_FIELDS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -117,6 +119,8 @@ class FlowDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FlowDataset" + flow_field_count: Union[int, None, UnsetType] = UNSET """Count of the number of individual fields that make up this ephemeral dataset.""" @@ -229,6 +233,11 @@ class FlowDataset(Asset): flow_fields: Union[List[RelatedFlowField], None, UnsetType] = UNSET """Fields contained in the ephemeral dataset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -290,78 +299,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FlowDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.flow_detailed_by is UNSET: - errors.append("flow_detailed_by is required for creation") - if self.flow_reusable_unit_name is UNSET: - errors.append("flow_reusable_unit_name is required for creation") - if self.flow_reusable_unit_qualified_name is UNSET: - errors.append( - "flow_reusable_unit_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"FlowDataset validation failed: {errors}") - - def minimize(self) -> "FlowDataset": - """ - Return a minimal copy of this FlowDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FlowDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FlowDataset instance with only the minimum required fields. - """ - self.validate() - return FlowDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlowDataset": - """ - Create a :class:`RelatedFlowDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlowDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlowDataset(guid=self.guid) - return RelatedFlowDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -533,6 +470,11 @@ class FlowDatasetRelationshipAttributes(AssetRelationshipAttributes): flow_fields: Union[List[RelatedFlowField], None, UnsetType] = UNSET """Fields contained in the ephemeral dataset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -622,6 +564,7 @@ class FlowDatasetNested(AssetNested): "flow_detailed_by", "flow_parent_unit", "flow_fields", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -727,9 +670,6 @@ def _flow_dataset_to_nested(flow_dataset: FlowDataset) -> FlowDatasetNested: is_incomplete=flow_dataset.is_incomplete, provenance_type=flow_dataset.provenance_type, home_id=flow_dataset.home_id, - depth=flow_dataset.depth, - immediate_upstream=flow_dataset.immediate_upstream, - immediate_downstream=flow_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -761,6 +701,7 @@ def _flow_dataset_from_nested(nested: FlowDatasetNested) -> FlowDataset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -769,9 +710,6 @@ def _flow_dataset_from_nested(nested: FlowDatasetNested) -> FlowDataset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -853,6 +791,9 @@ def _flow_dataset_from_nested_bytes(data: bytes, serde: Serde) -> FlowDataset: FlowDataset.FLOW_DETAILED_BY = RelationField("flowDetailedBy") FlowDataset.FLOW_PARENT_UNIT = RelationField("flowParentUnit") FlowDataset.FLOW_FIELDS = RelationField("flowFields") +FlowDataset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FlowDataset.MEANINGS = RelationField("meanings") FlowDataset.MC_MONITORS = RelationField("mcMonitors") FlowDataset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/flow_dataset_operation.py b/pyatlan_v9/model/assets/flow_dataset_operation.py index 00c39c0d4..865b11d33 100644 --- a/pyatlan_v9/model/assets/flow_dataset_operation.py +++ b/pyatlan_v9/model/assets/flow_dataset_operation.py @@ -46,11 +46,8 @@ from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .fabric_related import RelatedFabricActivity from .fivetran_related import RelatedFivetranConnector -from .flow_related import ( - RelatedFlowControlOperation, - RelatedFlowDatasetOperation, - RelatedFlowReusableUnit, -) +from .flow_related import RelatedFlowControlOperation, RelatedFlowReusableUnit +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .matillion_related import RelatedMatillionComponent from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -94,6 +91,7 @@ class FlowDatasetOperation(Asset): AST: ClassVar[Any] = None ADDITIONAL_ETL_CONTEXT: ClassVar[Any] = None AI_DATASET_TYPE: ClassVar[Any] = None + IS_PASS_THROUGH: ClassVar[Any] = None ADF_ACTIVITY: ClassVar[Any] = None AIRFLOW_TASKS: ClassVar[Any] = None ANOMALO_CHECKS: ClassVar[Any] = None @@ -110,6 +108,7 @@ class FlowDatasetOperation(Asset): FIVETRAN_CONNECTOR: ClassVar[Any] = None FLOW_ORCHESTRATED_BY: ClassVar[Any] = None FLOW_REUSABLE_UNIT: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MATILLION_COMPONENT: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -129,6 +128,8 @@ class FlowDatasetOperation(Asset): SODA_CHECKS: ClassVar[Any] = None SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FlowDatasetOperation" + flow_started_at: Union[int, None, UnsetType] = UNSET """Date and time at which this point in the data processing or orchestration started.""" @@ -189,6 +190,9 @@ class FlowDatasetOperation(Asset): ai_dataset_type: Union[str, None, UnsetType] = UNSET """Dataset type for AI Model - dataset process.""" + is_pass_through: Union[bool, None, UnsetType] = UNSET + """Whether this process represents a pass-through data flow where data is moved without transformation, as opposed to a flow where data is actively modified.""" + adf_activity: Union[RelatedAdfActivity, None, UnsetType] = UNSET """ADF Activity that is associated with this lineage process.""" @@ -239,6 +243,11 @@ class FlowDatasetOperation(Asset): flow_reusable_unit: Union[RelatedFlowReusableUnit, None, UnsetType] = UNSET """Reusable unit of dataset operations that are all executed together.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -308,78 +317,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FlowDatasetOperation instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.flow_reusable_unit is UNSET: - errors.append("flow_reusable_unit is required for creation") - if self.flow_reusable_unit_name is UNSET: - errors.append("flow_reusable_unit_name is required for creation") - if self.flow_reusable_unit_qualified_name is UNSET: - errors.append( - "flow_reusable_unit_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"FlowDatasetOperation validation failed: {errors}") - - def minimize(self) -> "FlowDatasetOperation": - """ - Return a minimal copy of this FlowDatasetOperation with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FlowDatasetOperation with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FlowDatasetOperation instance with only the minimum required fields. - """ - self.validate() - return FlowDatasetOperation(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlowDatasetOperation": - """ - Create a :class:`RelatedFlowDatasetOperation` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlowDatasetOperation reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlowDatasetOperation(guid=self.guid) - return RelatedFlowDatasetOperation(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -497,6 +434,9 @@ class FlowDatasetOperationAttributes(AssetAttributes): ai_dataset_type: Union[str, None, UnsetType] = UNSET """Dataset type for AI Model - dataset process.""" + is_pass_through: Union[bool, None, UnsetType] = UNSET + """Whether this process represents a pass-through data flow where data is moved without transformation, as opposed to a flow where data is actively modified.""" + class FlowDatasetOperationRelationshipAttributes(AssetRelationshipAttributes): """FlowDatasetOperation-specific relationship attributes for nested API format.""" @@ -551,6 +491,11 @@ class FlowDatasetOperationRelationshipAttributes(AssetRelationshipAttributes): flow_reusable_unit: Union[RelatedFlowReusableUnit, None, UnsetType] = UNSET """Reusable unit of dataset operations that are all executed together.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -649,6 +594,7 @@ class FlowDatasetOperationNested(AssetNested): "fivetran_connector", "flow_orchestrated_by", "flow_reusable_unit", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "matillion_component", "mc_monitors", @@ -697,6 +643,7 @@ def _populate_flow_dataset_operation_attrs( attrs.ast = obj.ast attrs.additional_etl_context = obj.additional_etl_context attrs.ai_dataset_type = obj.ai_dataset_type + attrs.is_pass_through = obj.is_pass_through def _extract_flow_dataset_operation_attrs( @@ -728,6 +675,7 @@ def _extract_flow_dataset_operation_attrs( result["ast"] = attrs.ast result["additional_etl_context"] = attrs.additional_etl_context result["ai_dataset_type"] = attrs.ai_dataset_type + result["is_pass_through"] = attrs.is_pass_through return result @@ -768,9 +716,6 @@ def _flow_dataset_operation_to_nested( is_incomplete=flow_dataset_operation.is_incomplete, provenance_type=flow_dataset_operation.provenance_type, home_id=flow_dataset_operation.home_id, - depth=flow_dataset_operation.depth, - immediate_upstream=flow_dataset_operation.immediate_upstream, - immediate_downstream=flow_dataset_operation.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -806,6 +751,7 @@ def _flow_dataset_operation_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -814,9 +760,6 @@ def _flow_dataset_operation_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_dataset_operation_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -842,6 +785,7 @@ def _flow_dataset_operation_from_nested_bytes( # Deferred field descriptor initialization # --------------------------------------------------------------------------- from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + BooleanField, KeywordField, KeywordTextField, NumericField, @@ -888,6 +832,7 @@ def _flow_dataset_operation_from_nested_bytes( "additionalEtlContext", "additionalEtlContext" ) FlowDatasetOperation.AI_DATASET_TYPE = KeywordField("aiDatasetType", "aiDatasetType") +FlowDatasetOperation.IS_PASS_THROUGH = BooleanField("isPassThrough", "isPassThrough") FlowDatasetOperation.ADF_ACTIVITY = RelationField("adfActivity") FlowDatasetOperation.AIRFLOW_TASKS = RelationField("airflowTasks") FlowDatasetOperation.ANOMALO_CHECKS = RelationField("anomaloChecks") @@ -908,6 +853,9 @@ def _flow_dataset_operation_from_nested_bytes( FlowDatasetOperation.FIVETRAN_CONNECTOR = RelationField("fivetranConnector") FlowDatasetOperation.FLOW_ORCHESTRATED_BY = RelationField("flowOrchestratedBy") FlowDatasetOperation.FLOW_REUSABLE_UNIT = RelationField("flowReusableUnit") +FlowDatasetOperation.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FlowDatasetOperation.MEANINGS = RelationField("meanings") FlowDatasetOperation.MATILLION_COMPONENT = RelationField("matillionComponent") FlowDatasetOperation.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/flow_field.py b/pyatlan_v9/model/assets/flow_field.py index 1b8ff6a96..1d60ea619 100644 --- a/pyatlan_v9/model/assets/flow_field.py +++ b/pyatlan_v9/model/assets/flow_field.py @@ -41,7 +41,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .flow_related import RelatedFlowDataset, RelatedFlowField +from .flow_related import RelatedFlowDataset +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -98,6 +99,7 @@ class FlowField(Asset): DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None FLOW_DATASET: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -115,6 +117,8 @@ class FlowField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FlowField" + flow_dataset_name: Union[str, None, UnsetType] = UNSET """Simple name of the ephemeral dataset in which this field is contained.""" @@ -221,6 +225,11 @@ class FlowField(Asset): flow_dataset: Union[RelatedFlowDataset, None, UnsetType] = UNSET """Ephemeral dataset that contains these fields.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -284,82 +293,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FlowField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.flow_dataset is UNSET: - errors.append("flow_dataset is required for creation") - if self.flow_dataset_name is UNSET: - errors.append("flow_dataset_name is required for creation") - if self.flow_dataset_qualified_name is UNSET: - errors.append("flow_dataset_qualified_name is required for creation") - if self.flow_reusable_unit_name is UNSET: - errors.append("flow_reusable_unit_name is required for creation") - if self.flow_reusable_unit_qualified_name is UNSET: - errors.append( - "flow_reusable_unit_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"FlowField validation failed: {errors}") - - def minimize(self) -> "FlowField": - """ - Return a minimal copy of this FlowField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FlowField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FlowField instance with only the minimum required fields. - """ - self.validate() - return FlowField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlowField": - """ - Create a :class:`RelatedFlowField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlowField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlowField(guid=self.guid) - return RelatedFlowField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -525,6 +458,11 @@ class FlowFieldRelationshipAttributes(AssetRelationshipAttributes): flow_dataset: Union[RelatedFlowDataset, None, UnsetType] = UNSET """Ephemeral dataset that contains these fields.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -612,6 +550,7 @@ class FlowFieldNested(AssetNested): "dq_base_dataset_rules", "dq_reference_dataset_rules", "flow_dataset", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -715,9 +654,6 @@ def _flow_field_to_nested(flow_field: FlowField) -> FlowFieldNested: is_incomplete=flow_field.is_incomplete, provenance_type=flow_field.provenance_type, home_id=flow_field.home_id, - depth=flow_field.depth, - immediate_upstream=flow_field.immediate_upstream, - immediate_downstream=flow_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -749,6 +685,7 @@ def _flow_field_from_nested(nested: FlowFieldNested) -> FlowField: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -757,9 +694,6 @@ def _flow_field_from_nested(nested: FlowFieldNested) -> FlowField: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -841,6 +775,9 @@ def _flow_field_from_nested_bytes(data: bytes, serde: Serde) -> FlowField: FlowField.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") FlowField.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") FlowField.FLOW_DATASET = RelationField("flowDataset") +FlowField.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FlowField.MEANINGS = RelationField("meanings") FlowField.MC_MONITORS = RelationField("mcMonitors") FlowField.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/flow_field_operation.py b/pyatlan_v9/model/assets/flow_field_operation.py index e37000db9..b5258aede 100644 --- a/pyatlan_v9/model/assets/flow_field_operation.py +++ b/pyatlan_v9/model/assets/flow_field_operation.py @@ -45,7 +45,8 @@ from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .fabric_related import RelatedFabricActivity from .fivetran_related import RelatedFivetranConnector -from .flow_related import RelatedFlowControlOperation, RelatedFlowFieldOperation +from .flow_related import RelatedFlowControlOperation +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .matillion_related import RelatedMatillionComponent from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -89,6 +90,7 @@ class FlowFieldOperation(Asset): AST: ClassVar[Any] = None ADDITIONAL_ETL_CONTEXT: ClassVar[Any] = None AI_DATASET_TYPE: ClassVar[Any] = None + IS_PASS_THROUGH: ClassVar[Any] = None ADF_ACTIVITY: ClassVar[Any] = None AIRFLOW_TASKS: ClassVar[Any] = None ANOMALO_CHECKS: ClassVar[Any] = None @@ -104,6 +106,7 @@ class FlowFieldOperation(Asset): FABRIC_ACTIVITIES: ClassVar[Any] = None FIVETRAN_CONNECTOR: ClassVar[Any] = None FLOW_ORCHESTRATED_BY: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MATILLION_COMPONENT: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -124,6 +127,8 @@ class FlowFieldOperation(Asset): SODA_CHECKS: ClassVar[Any] = None SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FlowFieldOperation" + flow_started_at: Union[int, None, UnsetType] = UNSET """Date and time at which this point in the data processing or orchestration started.""" @@ -184,6 +189,9 @@ class FlowFieldOperation(Asset): ai_dataset_type: Union[str, None, UnsetType] = UNSET """Dataset type for AI Model - dataset process.""" + is_pass_through: Union[bool, None, UnsetType] = UNSET + """Whether this process represents a pass-through data flow where data is moved without transformation, as opposed to a flow where data is actively modified.""" + adf_activity: Union[RelatedAdfActivity, None, UnsetType] = UNSET """ADF Activity that is associated with this lineage process.""" @@ -231,6 +239,11 @@ class FlowFieldOperation(Asset): flow_orchestrated_by: Union[RelatedFlowControlOperation, None, UnsetType] = UNSET """Orchestrated control operation that ran these data flows (process).""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -297,66 +310,6 @@ class FlowFieldOperation(Asset): def __post_init__(self) -> None: self.type_name = "FlowFieldOperation" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FlowFieldOperation instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"FlowFieldOperation validation failed: {errors}") - - def minimize(self) -> "FlowFieldOperation": - """ - Return a minimal copy of this FlowFieldOperation with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FlowFieldOperation with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FlowFieldOperation instance with only the minimum required fields. - """ - self.validate() - return FlowFieldOperation(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlowFieldOperation": - """ - Create a :class:`RelatedFlowFieldOperation` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlowFieldOperation reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlowFieldOperation(guid=self.guid) - return RelatedFlowFieldOperation(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -474,6 +427,9 @@ class FlowFieldOperationAttributes(AssetAttributes): ai_dataset_type: Union[str, None, UnsetType] = UNSET """Dataset type for AI Model - dataset process.""" + is_pass_through: Union[bool, None, UnsetType] = UNSET + """Whether this process represents a pass-through data flow where data is moved without transformation, as opposed to a flow where data is actively modified.""" + class FlowFieldOperationRelationshipAttributes(AssetRelationshipAttributes): """FlowFieldOperation-specific relationship attributes for nested API format.""" @@ -525,6 +481,11 @@ class FlowFieldOperationRelationshipAttributes(AssetRelationshipAttributes): flow_orchestrated_by: Union[RelatedFlowControlOperation, None, UnsetType] = UNSET """Orchestrated control operation that ran these data flows (process).""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -625,6 +586,7 @@ class FlowFieldOperationNested(AssetNested): "fabric_activities", "fivetran_connector", "flow_orchestrated_by", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "matillion_component", "mc_monitors", @@ -674,6 +636,7 @@ def _populate_flow_field_operation_attrs( attrs.ast = obj.ast attrs.additional_etl_context = obj.additional_etl_context attrs.ai_dataset_type = obj.ai_dataset_type + attrs.is_pass_through = obj.is_pass_through def _extract_flow_field_operation_attrs(attrs: FlowFieldOperationAttributes) -> dict: @@ -703,6 +666,7 @@ def _extract_flow_field_operation_attrs(attrs: FlowFieldOperationAttributes) -> result["ast"] = attrs.ast result["additional_etl_context"] = attrs.additional_etl_context result["ai_dataset_type"] = attrs.ai_dataset_type + result["is_pass_through"] = attrs.is_pass_through return result @@ -743,9 +707,6 @@ def _flow_field_operation_to_nested( is_incomplete=flow_field_operation.is_incomplete, provenance_type=flow_field_operation.provenance_type, home_id=flow_field_operation.home_id, - depth=flow_field_operation.depth, - immediate_upstream=flow_field_operation.immediate_upstream, - immediate_downstream=flow_field_operation.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -781,6 +742,7 @@ def _flow_field_operation_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -789,9 +751,6 @@ def _flow_field_operation_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_field_operation_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -817,6 +776,7 @@ def _flow_field_operation_from_nested_bytes( # Deferred field descriptor initialization # --------------------------------------------------------------------------- from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + BooleanField, KeywordField, KeywordTextField, NumericField, @@ -863,6 +823,7 @@ def _flow_field_operation_from_nested_bytes( "additionalEtlContext", "additionalEtlContext" ) FlowFieldOperation.AI_DATASET_TYPE = KeywordField("aiDatasetType", "aiDatasetType") +FlowFieldOperation.IS_PASS_THROUGH = BooleanField("isPassThrough", "isPassThrough") FlowFieldOperation.ADF_ACTIVITY = RelationField("adfActivity") FlowFieldOperation.AIRFLOW_TASKS = RelationField("airflowTasks") FlowFieldOperation.ANOMALO_CHECKS = RelationField("anomaloChecks") @@ -880,6 +841,9 @@ def _flow_field_operation_from_nested_bytes( FlowFieldOperation.FABRIC_ACTIVITIES = RelationField("fabricActivities") FlowFieldOperation.FIVETRAN_CONNECTOR = RelationField("fivetranConnector") FlowFieldOperation.FLOW_ORCHESTRATED_BY = RelationField("flowOrchestratedBy") +FlowFieldOperation.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FlowFieldOperation.MEANINGS = RelationField("meanings") FlowFieldOperation.MATILLION_COMPONENT = RelationField("matillionComponent") FlowFieldOperation.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/flow_folder.py b/pyatlan_v9/model/assets/flow_folder.py index ff6a34723..cf6a4a5be 100644 --- a/pyatlan_v9/model/assets/flow_folder.py +++ b/pyatlan_v9/model/assets/flow_folder.py @@ -41,6 +41,7 @@ from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric from .flow_related import RelatedFlowFolder +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable @@ -85,6 +86,7 @@ class FlowFolder(Asset): DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None FLOW_SUB_FOLDERS: ClassVar[Any] = None FLOW_PARENT_FOLDER: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -96,6 +98,8 @@ class FlowFolder(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FlowFolder" + flow_started_at: Union[int, None, UnsetType] = UNSET """Date and time at which this point in the data processing or orchestration started.""" @@ -176,6 +180,11 @@ class FlowFolder(Asset): flow_parent_folder: Union[RelatedFlowFolder, None, UnsetType] = UNSET """Parent folder containing the sub-folders.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -219,70 +228,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FlowFolder instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"FlowFolder validation failed: {errors}") - - def minimize(self) -> "FlowFolder": - """ - Return a minimal copy of this FlowFolder with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FlowFolder with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FlowFolder instance with only the minimum required fields. - """ - self.validate() - return FlowFolder(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlowFolder": - """ - Create a :class:`RelatedFlowFolder` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlowFolder reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlowFolder(guid=self.guid) - return RelatedFlowFolder(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -422,6 +367,11 @@ class FlowFolderRelationshipAttributes(AssetRelationshipAttributes): flow_parent_folder: Union[RelatedFlowFolder, None, UnsetType] = UNSET """Parent folder containing the sub-folders.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -488,6 +438,7 @@ class FlowFolderNested(AssetNested): "dq_reference_dataset_rules", "flow_sub_folders", "flow_parent_folder", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -575,9 +526,6 @@ def _flow_folder_to_nested(flow_folder: FlowFolder) -> FlowFolderNested: is_incomplete=flow_folder.is_incomplete, provenance_type=flow_folder.provenance_type, home_id=flow_folder.home_id, - depth=flow_folder.depth, - immediate_upstream=flow_folder.immediate_upstream, - immediate_downstream=flow_folder.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -609,6 +557,7 @@ def _flow_folder_from_nested(nested: FlowFolderNested) -> FlowFolder: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -617,9 +566,6 @@ def _flow_folder_from_nested(nested: FlowFolderNested) -> FlowFolder: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_folder_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -687,6 +633,9 @@ def _flow_folder_from_nested_bytes(data: bytes, serde: Serde) -> FlowFolder: FlowFolder.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") FlowFolder.FLOW_SUB_FOLDERS = RelationField("flowSubFolders") FlowFolder.FLOW_PARENT_FOLDER = RelationField("flowParentFolder") +FlowFolder.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FlowFolder.MEANINGS = RelationField("meanings") FlowFolder.MC_MONITORS = RelationField("mcMonitors") FlowFolder.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/flow_project.py b/pyatlan_v9/model/assets/flow_project.py index 4476c266b..4449762ad 100644 --- a/pyatlan_v9/model/assets/flow_project.py +++ b/pyatlan_v9/model/assets/flow_project.py @@ -39,7 +39,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .flow_related import RelatedFlowProject +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable @@ -82,6 +82,7 @@ class FlowProject(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -93,6 +94,8 @@ class FlowProject(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FlowProject" + flow_started_at: Union[int, None, UnsetType] = UNSET """Date and time at which this point in the data processing or orchestration started.""" @@ -167,6 +170,11 @@ class FlowProject(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -204,66 +212,6 @@ class FlowProject(Asset): def __post_init__(self) -> None: self.type_name = "FlowProject" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FlowProject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"FlowProject validation failed: {errors}") - - def minimize(self) -> "FlowProject": - """ - Return a minimal copy of this FlowProject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FlowProject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FlowProject instance with only the minimum required fields. - """ - self.validate() - return FlowProject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlowProject": - """ - Create a :class:`RelatedFlowProject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlowProject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlowProject(guid=self.guid) - return RelatedFlowProject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -397,6 +345,11 @@ class FlowProjectRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -461,6 +414,7 @@ class FlowProjectNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -550,9 +504,6 @@ def _flow_project_to_nested(flow_project: FlowProject) -> FlowProjectNested: is_incomplete=flow_project.is_incomplete, provenance_type=flow_project.provenance_type, home_id=flow_project.home_id, - depth=flow_project.depth, - immediate_upstream=flow_project.immediate_upstream, - immediate_downstream=flow_project.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -584,6 +535,7 @@ def _flow_project_from_nested(nested: FlowProjectNested) -> FlowProject: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -592,9 +544,6 @@ def _flow_project_from_nested(nested: FlowProjectNested) -> FlowProject: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_project_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -662,6 +611,9 @@ def _flow_project_from_nested_bytes(data: bytes, serde: Serde) -> FlowProject: FlowProject.METRICS = RelationField("metrics") FlowProject.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") FlowProject.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +FlowProject.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FlowProject.MEANINGS = RelationField("meanings") FlowProject.MC_MONITORS = RelationField("mcMonitors") FlowProject.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/flow_reusable_unit.py b/pyatlan_v9/model/assets/flow_reusable_unit.py index a8d98999b..fcdb92a9f 100644 --- a/pyatlan_v9/model/assets/flow_reusable_unit.py +++ b/pyatlan_v9/model/assets/flow_reusable_unit.py @@ -39,11 +39,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .flow_related import ( - RelatedFlowDataset, - RelatedFlowDatasetOperation, - RelatedFlowReusableUnit, -) +from .flow_related import RelatedFlowDataset, RelatedFlowDatasetOperation +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable @@ -91,6 +88,7 @@ class FlowReusableUnit(Asset): FLOW_DATA_FLOWS: ClassVar[Any] = None FLOW_ABSTRACTS: ClassVar[Any] = None FLOW_DATASETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +100,8 @@ class FlowReusableUnit(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "FlowReusableUnit" + flow_dataset_count: Union[int, None, UnsetType] = UNSET """Count of the number of ephemeral datasets contained within this reusable unit.""" @@ -191,6 +191,11 @@ class FlowReusableUnit(Asset): flow_datasets: Union[List[RelatedFlowDataset], None, UnsetType] = UNSET """Ephemeral datasets that are contained within the reusable unit.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -228,66 +233,6 @@ class FlowReusableUnit(Asset): def __post_init__(self) -> None: self.type_name = "FlowReusableUnit" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this FlowReusableUnit instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"FlowReusableUnit validation failed: {errors}") - - def minimize(self) -> "FlowReusableUnit": - """ - Return a minimal copy of this FlowReusableUnit with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new FlowReusableUnit with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new FlowReusableUnit instance with only the minimum required fields. - """ - self.validate() - return FlowReusableUnit(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFlowReusableUnit": - """ - Create a :class:`RelatedFlowReusableUnit` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFlowReusableUnit reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFlowReusableUnit(guid=self.guid) - return RelatedFlowReusableUnit(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -438,6 +383,11 @@ class FlowReusableUnitRelationshipAttributes(AssetRelationshipAttributes): flow_datasets: Union[List[RelatedFlowDataset], None, UnsetType] = UNSET """Ephemeral datasets that are contained within the reusable unit.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -507,6 +457,7 @@ class FlowReusableUnitNested(AssetNested): "flow_data_flows", "flow_abstracts", "flow_datasets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -604,9 +555,6 @@ def _flow_reusable_unit_to_nested( is_incomplete=flow_reusable_unit.is_incomplete, provenance_type=flow_reusable_unit.provenance_type, home_id=flow_reusable_unit.home_id, - depth=flow_reusable_unit.depth, - immediate_upstream=flow_reusable_unit.immediate_upstream, - immediate_downstream=flow_reusable_unit.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -640,6 +588,7 @@ def _flow_reusable_unit_from_nested(nested: FlowReusableUnitNested) -> FlowReusa updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -648,9 +597,6 @@ def _flow_reusable_unit_from_nested(nested: FlowReusableUnitNested) -> FlowReusa is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_flow_reusable_unit_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -733,6 +679,9 @@ def _flow_reusable_unit_from_nested_bytes( FlowReusableUnit.FLOW_DATA_FLOWS = RelationField("flowDataFlows") FlowReusableUnit.FLOW_ABSTRACTS = RelationField("flowAbstracts") FlowReusableUnit.FLOW_DATASETS = RelationField("flowDatasets") +FlowReusableUnit.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) FlowReusableUnit.MEANINGS = RelationField("meanings") FlowReusableUnit.MC_MONITORS = RelationField("mcMonitors") FlowReusableUnit.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/folder.py b/pyatlan_v9/model/assets/folder.py index beecd38e7..577d8382d 100644 --- a/pyatlan_v9/model/assets/folder.py +++ b/pyatlan_v9/model/assets/folder.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .namespace_related import RelatedFolder, RelatedNamespace @@ -73,6 +74,7 @@ class Folder(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -87,6 +89,8 @@ class Folder(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Folder" + parent_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the parent folder or collection in which this folder exists.""" @@ -125,6 +129,11 @@ class Folder(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -177,72 +186,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Folder instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.parent is UNSET: - errors.append("parent is required for creation") - if errors: - raise ValueError(f"Folder validation failed: {errors}") - - def minimize(self) -> "Folder": - """ - Return a minimal copy of this Folder with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Folder with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Folder instance with only the minimum required fields. - """ - self.validate() - return Folder(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFolder": - """ - Create a :class:`RelatedFolder` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFolder reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFolder(guid=self.guid) - return RelatedFolder(qualified_name=self.qualified_name) - @classmethod def creator( cls, @@ -382,6 +325,11 @@ class FolderRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -455,6 +403,7 @@ class FolderNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -519,9 +468,6 @@ def _folder_to_nested(folder: Folder) -> FolderNested: is_incomplete=folder.is_incomplete, provenance_type=folder.provenance_type, home_id=folder.home_id, - depth=folder.depth, - immediate_upstream=folder.immediate_upstream, - immediate_downstream=folder.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -551,6 +497,7 @@ def _folder_from_nested(nested: FolderNested) -> Folder: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -559,9 +506,6 @@ def _folder_from_nested(nested: FolderNested) -> Folder: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_folder_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -603,6 +547,9 @@ def _folder_from_nested_bytes(data: bytes, serde: Serde) -> Folder: Folder.METRICS = RelationField("metrics") Folder.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Folder.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Folder.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Folder.MEANINGS = RelationField("meanings") Folder.MC_MONITORS = RelationField("mcMonitors") Folder.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/form.py b/pyatlan_v9/model/assets/form.py index e1e086110..1111a3b3c 100644 --- a/pyatlan_v9/model/assets/form.py +++ b/pyatlan_v9/model/assets/form.py @@ -39,7 +39,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .form_related import RelatedForm +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable @@ -70,6 +70,7 @@ class Form(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -81,6 +82,8 @@ class Form(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Form" + form_fields: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Fields in a form.""" @@ -119,6 +122,11 @@ class Form(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -156,66 +164,6 @@ class Form(Asset): def __post_init__(self) -> None: self.type_name = "Form" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Form instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Form validation failed: {errors}") - - def minimize(self) -> "Form": - """ - Return a minimal copy of this Form with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Form with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Form instance with only the minimum required fields. - """ - self.validate() - return Form(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedForm": - """ - Create a :class:`RelatedForm` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedForm reference to this asset. - """ - if self.guid is not UNSET: - return RelatedForm(guid=self.guid) - return RelatedForm(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -313,6 +261,11 @@ class FormRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -373,6 +326,7 @@ class FormNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -434,9 +388,6 @@ def _form_to_nested(form: Form) -> FormNested: is_incomplete=form.is_incomplete, provenance_type=form.provenance_type, home_id=form.home_id, - depth=form.depth, - immediate_upstream=form.immediate_upstream, - immediate_downstream=form.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -466,6 +417,7 @@ def _form_from_nested(nested: FormNested) -> Form: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -474,9 +426,6 @@ def _form_from_nested(nested: FormNested) -> Form: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_form_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -511,6 +460,9 @@ def _form_from_nested_bytes(data: bytes, serde: Serde) -> Form: Form.METRICS = RelationField("metrics") Form.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Form.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Form.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Form.MEANINGS = RelationField("meanings") Form.MC_MONITORS = RelationField("mcMonitors") Form.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/function.py b/pyatlan_v9/model/assets/function.py index 52969c5e8..6a8ab7583 100644 --- a/pyatlan_v9/model/assets/function.py +++ b/pyatlan_v9/model/assets/function.py @@ -48,6 +48,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -63,7 +64,7 @@ RelatedSqlInsightBusinessQuestion, RelatedSqlInsightJoin, ) -from .sql_related import RelatedFunction, RelatedSchema +from .sql_related import RelatedSchema # ============================================================================= # FLAT ASSET CLASS @@ -77,19 +78,19 @@ class Function(Asset): """ FUNCTION_DEFINITION: ClassVar[Any] = None - FUNCTION_RETURN_TYPE: ClassVar[Any] = None - FUNCTION_ARGUMENTS: ClassVar[Any] = None - FUNCTION_LANGUAGE: ClassVar[Any] = None - FUNCTION_TYPE: ClassVar[Any] = None - FUNCTION_IS_EXTERNAL: ClassVar[Any] = None - FUNCTION_IS_DMF: ClassVar[Any] = None - FUNCTION_IS_SECURE: ClassVar[Any] = None - FUNCTION_IS_MEMOIZABLE: ClassVar[Any] = None - FUNCTION_RUNTIME_VERSION: ClassVar[Any] = None - FUNCTION_EXTERNAL_ACCESS_INTEGRATIONS: ClassVar[Any] = None - FUNCTION_SECRETS: ClassVar[Any] = None - FUNCTION_PACKAGES: ClassVar[Any] = None - FUNCTION_INSTALLED_PACKAGES: ClassVar[Any] = None + SQL_RETURN_TYPE: ClassVar[Any] = None + SQL_ARGUMENTS: ClassVar[Any] = None + SQL_LANGUAGE: ClassVar[Any] = None + SQL_TYPE: ClassVar[Any] = None + SQL_IS_EXTERNAL: ClassVar[Any] = None + SQL_IS_DMF: ClassVar[Any] = None + SQL_IS_SECURE: ClassVar[Any] = None + SQL_IS_MEMOIZABLE: ClassVar[Any] = None + SQL_RUNTIME_VERSION: ClassVar[Any] = None + SQL_EXTERNAL_ACCESS_INTEGRATIONS: ClassVar[Any] = None + SQL_SECRETS: ClassVar[Any] = None + SQL_PACKAGES: ClassVar[Any] = None + SQL_INSTALLED_PACKAGES: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -107,7 +108,6 @@ class Function(Asset): IS_PROFILED: ClassVar[Any] = None LAST_PROFILED_AT: ClassVar[Any] = None SQL_AI_MODEL_CONTEXT_QUALIFIED_NAME: ClassVar[Any] = None - SQL_IS_SECURE: ClassVar[Any] = None SQL_HAS_AI_INSIGHTS: ClassVar[Any] = None SQL_AI_INSIGHTS_LAST_ANALYZED_AT: ClassVar[Any] = None SQL_AI_INSIGHTS_POPULAR_BUSINESS_QUESTION_COUNT: ClassVar[Any] = None @@ -135,6 +135,7 @@ class Function(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -158,48 +159,50 @@ class Function(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Function" + function_definition: Union[str, None, UnsetType] = UNSET """Code or set of statements that determine the output of the function.""" - function_return_type: Union[str, None, UnsetType] = UNSET + sql_return_type: Union[str, None, UnsetType] = UNSET """Data type of the value returned by the function.""" - function_arguments: Union[List[str], None, UnsetType] = UNSET + sql_arguments: Union[List[str], None, UnsetType] = UNSET """Arguments that are passed in to the function.""" - function_language: Union[str, None, UnsetType] = UNSET + sql_language: Union[str, None, UnsetType] = UNSET """Programming language in which the function is written.""" - function_type: Union[str, None, UnsetType] = UNSET + sql_type: Union[str, None, UnsetType] = UNSET """Type of function.""" - function_is_external: Union[bool, None, UnsetType] = UNSET + sql_is_external: Union[bool, None, UnsetType] = UNSET """Whether the function is stored or executed externally (true) or internally (false).""" - function_is_dmf: Union[bool, None, UnsetType] = msgspec.field( - default=UNSET, name="functionIsDMF" + sql_is_dmf: Union[bool, None, UnsetType] = msgspec.field( + default=UNSET, name="sqlIsDMF" ) """Whether the function is a data metric function.""" - function_is_secure: Union[bool, None, UnsetType] = UNSET - """Whether sensitive information of the function is omitted for unauthorized users (true) or not (false).""" + sql_is_secure: Union[bool, None, UnsetType] = UNSET + """Whether this asset is secure (true) or not (false).""" - function_is_memoizable: Union[bool, None, UnsetType] = UNSET + sql_is_memoizable: Union[bool, None, UnsetType] = UNSET """Whether the function must re-compute if there are no underlying changes in the values (false) or not (true).""" - function_runtime_version: Union[str, None, UnsetType] = UNSET + sql_runtime_version: Union[str, None, UnsetType] = UNSET """Version of the language runtime used by the function.""" - function_external_access_integrations: Union[str, None, UnsetType] = UNSET + sql_external_access_integrations: Union[str, None, UnsetType] = UNSET """Names of external access integrations used by the function.""" - function_secrets: Union[str, None, UnsetType] = UNSET + sql_secrets: Union[str, None, UnsetType] = UNSET """Secret variables used by the function.""" - function_packages: Union[str, None, UnsetType] = UNSET + sql_packages: Union[str, None, UnsetType] = UNSET """Packages requested by the function.""" - function_installed_packages: Union[str, None, UnsetType] = UNSET + sql_installed_packages: Union[str, None, UnsetType] = UNSET """Packages actually installed for the function.""" query_count: Union[int, None, UnsetType] = UNSET @@ -255,9 +258,6 @@ class Function(Asset): ) """Unique name of the context in which the model versions exist, or empty if it does not exist within an AI model context.""" - sql_is_secure: Union[bool, None, UnsetType] = UNSET - """Whether this asset is secure (true) or not (false).""" - sql_has_ai_insights: Union[bool, None, UnsetType] = UNSET """Whether this asset has any AI insights data available.""" @@ -345,6 +345,11 @@ class Function(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -434,80 +439,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Function instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.function_schema is UNSET: - errors.append("function_schema is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"Function validation failed: {errors}") - - def minimize(self) -> "Function": - """ - Return a minimal copy of this Function with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Function with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Function instance with only the minimum required fields. - """ - self.validate() - return Function(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedFunction": - """ - Create a :class:`RelatedFunction` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedFunction reference to this asset. - """ - if self.guid is not UNSET: - return RelatedFunction(guid=self.guid) - return RelatedFunction(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -566,45 +497,45 @@ class FunctionAttributes(AssetAttributes): function_definition: Union[str, None, UnsetType] = UNSET """Code or set of statements that determine the output of the function.""" - function_return_type: Union[str, None, UnsetType] = UNSET + sql_return_type: Union[str, None, UnsetType] = UNSET """Data type of the value returned by the function.""" - function_arguments: Union[List[str], None, UnsetType] = UNSET + sql_arguments: Union[List[str], None, UnsetType] = UNSET """Arguments that are passed in to the function.""" - function_language: Union[str, None, UnsetType] = UNSET + sql_language: Union[str, None, UnsetType] = UNSET """Programming language in which the function is written.""" - function_type: Union[str, None, UnsetType] = UNSET + sql_type: Union[str, None, UnsetType] = UNSET """Type of function.""" - function_is_external: Union[bool, None, UnsetType] = UNSET + sql_is_external: Union[bool, None, UnsetType] = UNSET """Whether the function is stored or executed externally (true) or internally (false).""" - function_is_dmf: Union[bool, None, UnsetType] = msgspec.field( - default=UNSET, name="functionIsDMF" + sql_is_dmf: Union[bool, None, UnsetType] = msgspec.field( + default=UNSET, name="sqlIsDMF" ) """Whether the function is a data metric function.""" - function_is_secure: Union[bool, None, UnsetType] = UNSET - """Whether sensitive information of the function is omitted for unauthorized users (true) or not (false).""" + sql_is_secure: Union[bool, None, UnsetType] = UNSET + """Whether this asset is secure (true) or not (false).""" - function_is_memoizable: Union[bool, None, UnsetType] = UNSET + sql_is_memoizable: Union[bool, None, UnsetType] = UNSET """Whether the function must re-compute if there are no underlying changes in the values (false) or not (true).""" - function_runtime_version: Union[str, None, UnsetType] = UNSET + sql_runtime_version: Union[str, None, UnsetType] = UNSET """Version of the language runtime used by the function.""" - function_external_access_integrations: Union[str, None, UnsetType] = UNSET + sql_external_access_integrations: Union[str, None, UnsetType] = UNSET """Names of external access integrations used by the function.""" - function_secrets: Union[str, None, UnsetType] = UNSET + sql_secrets: Union[str, None, UnsetType] = UNSET """Secret variables used by the function.""" - function_packages: Union[str, None, UnsetType] = UNSET + sql_packages: Union[str, None, UnsetType] = UNSET """Packages requested by the function.""" - function_installed_packages: Union[str, None, UnsetType] = UNSET + sql_installed_packages: Union[str, None, UnsetType] = UNSET """Packages actually installed for the function.""" query_count: Union[int, None, UnsetType] = UNSET @@ -660,9 +591,6 @@ class FunctionAttributes(AssetAttributes): ) """Unique name of the context in which the model versions exist, or empty if it does not exist within an AI model context.""" - sql_is_secure: Union[bool, None, UnsetType] = UNSET - """Whether this asset is secure (true) or not (false).""" - sql_has_ai_insights: Union[bool, None, UnsetType] = UNSET """Whether this asset has any AI insights data available.""" @@ -754,6 +682,11 @@ class FunctionRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -872,6 +805,7 @@ class FunctionNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -901,21 +835,19 @@ def _populate_function_attrs(attrs: FunctionAttributes, obj: Function) -> None: """Populate Function-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) attrs.function_definition = obj.function_definition - attrs.function_return_type = obj.function_return_type - attrs.function_arguments = obj.function_arguments - attrs.function_language = obj.function_language - attrs.function_type = obj.function_type - attrs.function_is_external = obj.function_is_external - attrs.function_is_dmf = obj.function_is_dmf - attrs.function_is_secure = obj.function_is_secure - attrs.function_is_memoizable = obj.function_is_memoizable - attrs.function_runtime_version = obj.function_runtime_version - attrs.function_external_access_integrations = ( - obj.function_external_access_integrations - ) - attrs.function_secrets = obj.function_secrets - attrs.function_packages = obj.function_packages - attrs.function_installed_packages = obj.function_installed_packages + attrs.sql_return_type = obj.sql_return_type + attrs.sql_arguments = obj.sql_arguments + attrs.sql_language = obj.sql_language + attrs.sql_type = obj.sql_type + attrs.sql_is_external = obj.sql_is_external + attrs.sql_is_dmf = obj.sql_is_dmf + attrs.sql_is_secure = obj.sql_is_secure + attrs.sql_is_memoizable = obj.sql_is_memoizable + attrs.sql_runtime_version = obj.sql_runtime_version + attrs.sql_external_access_integrations = obj.sql_external_access_integrations + attrs.sql_secrets = obj.sql_secrets + attrs.sql_packages = obj.sql_packages + attrs.sql_installed_packages = obj.sql_installed_packages attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -933,7 +865,6 @@ def _populate_function_attrs(attrs: FunctionAttributes, obj: Function) -> None: attrs.is_profiled = obj.is_profiled attrs.last_profiled_at = obj.last_profiled_at attrs.sql_ai_model_context_qualified_name = obj.sql_ai_model_context_qualified_name - attrs.sql_is_secure = obj.sql_is_secure attrs.sql_has_ai_insights = obj.sql_has_ai_insights attrs.sql_ai_insights_last_analyzed_at = obj.sql_ai_insights_last_analyzed_at attrs.sql_ai_insights_popular_business_question_count = ( @@ -951,21 +882,19 @@ def _extract_function_attrs(attrs: FunctionAttributes) -> dict: """Extract all Function attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) result["function_definition"] = attrs.function_definition - result["function_return_type"] = attrs.function_return_type - result["function_arguments"] = attrs.function_arguments - result["function_language"] = attrs.function_language - result["function_type"] = attrs.function_type - result["function_is_external"] = attrs.function_is_external - result["function_is_dmf"] = attrs.function_is_dmf - result["function_is_secure"] = attrs.function_is_secure - result["function_is_memoizable"] = attrs.function_is_memoizable - result["function_runtime_version"] = attrs.function_runtime_version - result["function_external_access_integrations"] = ( - attrs.function_external_access_integrations - ) - result["function_secrets"] = attrs.function_secrets - result["function_packages"] = attrs.function_packages - result["function_installed_packages"] = attrs.function_installed_packages + result["sql_return_type"] = attrs.sql_return_type + result["sql_arguments"] = attrs.sql_arguments + result["sql_language"] = attrs.sql_language + result["sql_type"] = attrs.sql_type + result["sql_is_external"] = attrs.sql_is_external + result["sql_is_dmf"] = attrs.sql_is_dmf + result["sql_is_secure"] = attrs.sql_is_secure + result["sql_is_memoizable"] = attrs.sql_is_memoizable + result["sql_runtime_version"] = attrs.sql_runtime_version + result["sql_external_access_integrations"] = attrs.sql_external_access_integrations + result["sql_secrets"] = attrs.sql_secrets + result["sql_packages"] = attrs.sql_packages + result["sql_installed_packages"] = attrs.sql_installed_packages result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -985,7 +914,6 @@ def _extract_function_attrs(attrs: FunctionAttributes) -> dict: result["sql_ai_model_context_qualified_name"] = ( attrs.sql_ai_model_context_qualified_name ) - result["sql_is_secure"] = attrs.sql_is_secure result["sql_has_ai_insights"] = attrs.sql_has_ai_insights result["sql_ai_insights_last_analyzed_at"] = attrs.sql_ai_insights_last_analyzed_at result["sql_ai_insights_popular_business_question_count"] = ( @@ -1037,9 +965,6 @@ def _function_to_nested(function: Function) -> FunctionNested: is_incomplete=function.is_incomplete, provenance_type=function.provenance_type, home_id=function.home_id, - depth=function.depth, - immediate_upstream=function.immediate_upstream, - immediate_downstream=function.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1071,6 +996,7 @@ def _function_from_nested(nested: FunctionNested) -> Function: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1079,9 +1005,6 @@ def _function_from_nested(nested: FunctionNested) -> Function: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_function_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1111,26 +1034,24 @@ def _function_from_nested_bytes(data: bytes, serde: Serde) -> Function: ) Function.FUNCTION_DEFINITION = KeywordField("functionDefinition", "functionDefinition") -Function.FUNCTION_RETURN_TYPE = KeywordField("functionReturnType", "functionReturnType") -Function.FUNCTION_ARGUMENTS = KeywordField("functionArguments", "functionArguments") -Function.FUNCTION_LANGUAGE = KeywordField("functionLanguage", "functionLanguage") -Function.FUNCTION_TYPE = KeywordField("functionType", "functionType") -Function.FUNCTION_IS_EXTERNAL = BooleanField("functionIsExternal", "functionIsExternal") -Function.FUNCTION_IS_DMF = BooleanField("functionIsDMF", "functionIsDMF") -Function.FUNCTION_IS_SECURE = BooleanField("functionIsSecure", "functionIsSecure") -Function.FUNCTION_IS_MEMOIZABLE = BooleanField( - "functionIsMemoizable", "functionIsMemoizable" -) -Function.FUNCTION_RUNTIME_VERSION = KeywordTextField( - "functionRuntimeVersion", "functionRuntimeVersion", "functionRuntimeVersion.text" +Function.SQL_RETURN_TYPE = KeywordField("sqlReturnType", "sqlReturnType") +Function.SQL_ARGUMENTS = KeywordField("sqlArguments", "sqlArguments") +Function.SQL_LANGUAGE = KeywordField("sqlLanguage", "sqlLanguage") +Function.SQL_TYPE = KeywordField("sqlType", "sqlType") +Function.SQL_IS_EXTERNAL = BooleanField("sqlIsExternal", "sqlIsExternal") +Function.SQL_IS_DMF = BooleanField("sqlIsDMF", "sqlIsDMF") +Function.SQL_IS_SECURE = BooleanField("sqlIsSecure", "sqlIsSecure") +Function.SQL_IS_MEMOIZABLE = BooleanField("sqlIsMemoizable", "sqlIsMemoizable") +Function.SQL_RUNTIME_VERSION = KeywordTextField( + "sqlRuntimeVersion", "sqlRuntimeVersion", "sqlRuntimeVersion.text" ) -Function.FUNCTION_EXTERNAL_ACCESS_INTEGRATIONS = KeywordField( - "functionExternalAccessIntegrations", "functionExternalAccessIntegrations" +Function.SQL_EXTERNAL_ACCESS_INTEGRATIONS = KeywordField( + "sqlExternalAccessIntegrations", "sqlExternalAccessIntegrations" ) -Function.FUNCTION_SECRETS = KeywordField("functionSecrets", "functionSecrets") -Function.FUNCTION_PACKAGES = KeywordField("functionPackages", "functionPackages") -Function.FUNCTION_INSTALLED_PACKAGES = KeywordField( - "functionInstalledPackages", "functionInstalledPackages" +Function.SQL_SECRETS = KeywordField("sqlSecrets", "sqlSecrets") +Function.SQL_PACKAGES = KeywordField("sqlPackages", "sqlPackages") +Function.SQL_INSTALLED_PACKAGES = KeywordField( + "sqlInstalledPackages", "sqlInstalledPackages" ) Function.QUERY_COUNT = NumericField("queryCount", "queryCount") Function.QUERY_USER_COUNT = NumericField("queryUserCount", "queryUserCount") @@ -1161,7 +1082,6 @@ def _function_from_nested_bytes(data: bytes, serde: Serde) -> Function: Function.SQL_AI_MODEL_CONTEXT_QUALIFIED_NAME = KeywordField( "sqlAIModelContextQualifiedName", "sqlAIModelContextQualifiedName" ) -Function.SQL_IS_SECURE = BooleanField("sqlIsSecure", "sqlIsSecure") Function.SQL_HAS_AI_INSIGHTS = BooleanField("sqlHasAiInsights", "sqlHasAiInsights") Function.SQL_AI_INSIGHTS_LAST_ANALYZED_AT = NumericField( "sqlAiInsightsLastAnalyzedAt", "sqlAiInsightsLastAnalyzedAt" @@ -1200,6 +1120,9 @@ def _function_from_nested_bytes(data: bytes, serde: Serde) -> Function: Function.DBT_SOURCES = RelationField("dbtSources") Function.SQL_DBT_SOURCES = RelationField("sqlDBTSources") Function.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +Function.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Function.MEANINGS = RelationField("meanings") Function.MC_MONITORS = RelationField("mcMonitors") Function.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/gcp_dataplex.py b/pyatlan_v9/model/assets/gcp_dataplex.py new file mode 100644 index 000000000..24be2d33c --- /dev/null +++ b/pyatlan_v9/model/assets/gcp_dataplex.py @@ -0,0 +1,626 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# ruff: noqa: ARG002 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +GCPDataplex asset model with flattened inheritance. + +This module provides: +- GCPDataplex: Flat asset class (easy to use) +- GCPDataplexAttributes: Nested attributes struct (extends AssetAttributes) +- GCPDataplexNested: Nested API format struct +""" + +from __future__ import annotations + +from typing import Any, ClassVar, Dict, List, Set, Union + +import msgspec +from msgspec import UNSET, UnsetType + +from .airflow_related import RelatedAirflowTask +from .anomalo_related import RelatedAnomaloCheck +from .app_related import RelatedApplication, RelatedApplicationField +from .asset import ( + _ASSET_REL_FIELDS, + Asset, + AssetAttributes, + AssetNested, + AssetRelationshipAttributes, + _extract_asset_attrs, + _populate_asset_attrs, +) +from .data_contract_related import RelatedDataContract +from .data_mesh_related import RelatedDataProduct +from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gtc_related import RelatedAtlasGlossaryTerm +from .model_related import RelatedModelAttribute, RelatedModelEntity +from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor +from .partial_related import RelatedPartialField, RelatedPartialObject +from .process_related import RelatedProcess +from .referenceable_related import RelatedReferenceable +from .resource_related import RelatedFile, RelatedLink, RelatedReadme +from .schema_registry_related import RelatedSchemaRegistrySubject +from .soda_related import RelatedSodaCheck +from .spark_related import RelatedSparkJob +from pyatlan_v9.model.conversion_utils import categorize_relationships, merge_relationships +from pyatlan_v9.model.serde import Serde, get_serde +from pyatlan_v9.model.transform import register_asset + +from .gcp_dataplex_related import RelatedGCPDataplexAspectType + +# ============================================================================= +# FLAT ASSET CLASS +# ============================================================================= + +@register_asset +class GCPDataplex(Asset): + """ + Base class for GCP Dataplex Aspect Type assets. + """ + + CATALOG_DATASET_GUID: ClassVar[Any] = None + GOOGLE_SERVICE: ClassVar[Any] = None + GOOGLE_PROJECT_NAME: ClassVar[Any] = None + GOOGLE_PROJECT_ID: ClassVar[Any] = None + GOOGLE_PROJECT_NUMBER: ClassVar[Any] = None + GOOGLE_LOCATION: ClassVar[Any] = None + GOOGLE_LOCATION_TYPE: ClassVar[Any] = None + GOOGLE_LABELS: ClassVar[Any] = None + GOOGLE_TAGS: ClassVar[Any] = None + CLOUD_UNIFORM_RESOURCE_NAME: ClassVar[Any] = None + INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None + OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None + ANOMALO_CHECKS: ClassVar[Any] = None + APPLICATION: ClassVar[Any] = None + APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None + OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + MODEL_IMPLEMENTED_ENTITIES: ClassVar[Any] = None + MODEL_IMPLEMENTED_ATTRIBUTES: ClassVar[Any] = None + METRICS: ClassVar[Any] = None + DQ_BASE_DATASET_RULES: ClassVar[Any] = None + DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None + MEANINGS: ClassVar[Any] = None + MC_MONITORS: ClassVar[Any] = None + MC_INCIDENTS: ClassVar[Any] = None + PARTIAL_CHILD_FIELDS: ClassVar[Any] = None + PARTIAL_CHILD_OBJECTS: ClassVar[Any] = None + INPUT_TO_PROCESSES: ClassVar[Any] = None + OUTPUT_FROM_PROCESSES: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_TO: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_FROM: ClassVar[Any] = None + FILES: ClassVar[Any] = None + LINKS: ClassVar[Any] = None + README: ClassVar[Any] = None + SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None + SODA_CHECKS: ClassVar[Any] = None + INPUT_TO_SPARK_JOBS: ClassVar[Any] = None + OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + + type_name: Union[str, UnsetType] = "GCPDataplex" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + + google_service: Union[str, None, UnsetType] = UNSET + """Service in Google in which the asset exists.""" + + google_project_name: Union[str, None, UnsetType] = UNSET + """Name of the project in which the asset exists.""" + + google_project_id: Union[str, None, UnsetType] = UNSET + """ID of the project in which the asset exists.""" + + google_project_number: Union[int, None, UnsetType] = UNSET + """Number of the project in which the asset exists.""" + + google_location: Union[str, None, UnsetType] = UNSET + """Location of this asset in Google.""" + + google_location_type: Union[str, None, UnsetType] = UNSET + """Type of location of this asset in Google.""" + + google_labels: Union[List[Dict[str, Any]], None, UnsetType] = UNSET + """List of labels that have been applied to the asset in Google.""" + + google_tags: Union[List[Dict[str, Any]], None, UnsetType] = UNSET + """List of tags that have been applied to the asset in Google.""" + + cloud_uniform_resource_name: Union[str, None, UnsetType] = UNSET + """Uniform resource name (URN) for the asset: AWS ARN, Google Cloud URI, Azure resource ID, Oracle OCID, and so on.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + def __post_init__(self) -> None: + self.type_name = "GCPDataplex" + + + + # ========================================================================= + # Optimized Serialization Methods (override Asset base class) + # ========================================================================= + + def to_json(self, nested: bool = True, serde: Serde | None = None) -> str: + """ + Convert to JSON string using optimized nested struct serialization. + + Args: + nested: If True (default), use nested API format. If False, use flat format. + serde: Optional Serde instance for encoder reuse. Uses shared singleton if None. + + Returns: + JSON string representation + """ + if serde is None: + serde = get_serde() + if nested: + return self.to_nested_bytes(serde).decode("utf-8") + else: + return serde.encode(self).decode("utf-8") + + def to_nested_bytes(self, serde: Serde | None = None) -> bytes: + """Serialize to Atlas nested-format JSON bytes (pure msgspec, no dict intermediate).""" + if serde is None: + serde = get_serde() + return _gcp_dataplex_to_nested_bytes(self, serde) + + @staticmethod + def from_json(json_data: str | bytes, serde: Serde | None = None) -> GCPDataplex: + """ + Create from JSON string or bytes using optimized nested struct deserialization. + + Args: + json_data: JSON string or bytes to deserialize + serde: Optional Serde instance for decoder reuse. Uses shared singleton if None. + + Returns: + GCPDataplex instance + """ + if isinstance(json_data, str): + json_data = json_data.encode("utf-8") + if serde is None: + serde = get_serde() + return _gcp_dataplex_from_nested_bytes(json_data, serde) + + +# ============================================================================= +# NESTED FORMAT CLASSES +# ============================================================================= + +class GCPDataplexAttributes(AssetAttributes): + """GCPDataplex-specific attributes for nested API format.""" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + + google_service: Union[str, None, UnsetType] = UNSET + """Service in Google in which the asset exists.""" + + google_project_name: Union[str, None, UnsetType] = UNSET + """Name of the project in which the asset exists.""" + + google_project_id: Union[str, None, UnsetType] = UNSET + """ID of the project in which the asset exists.""" + + google_project_number: Union[int, None, UnsetType] = UNSET + """Number of the project in which the asset exists.""" + + google_location: Union[str, None, UnsetType] = UNSET + """Location of this asset in Google.""" + + google_location_type: Union[str, None, UnsetType] = UNSET + """Type of location of this asset in Google.""" + + google_labels: Union[List[Dict[str, Any]], None, UnsetType] = UNSET + """List of labels that have been applied to the asset in Google.""" + + google_tags: Union[List[Dict[str, Any]], None, UnsetType] = UNSET + """List of tags that have been applied to the asset in Google.""" + + cloud_uniform_resource_name: Union[str, None, UnsetType] = UNSET + """Uniform resource name (URN) for the asset: AWS ARN, Google Cloud URI, Azure resource ID, Oracle OCID, and so on.""" + +class GCPDataplexRelationshipAttributes(AssetRelationshipAttributes): + """GCPDataplex-specific relationship attributes for nested API format.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + +class GCPDataplexNested(AssetNested): + """GCPDataplex in nested API format for high-performance serialization.""" + + attributes: Union[GCPDataplexAttributes, UnsetType] = UNSET + relationship_attributes: Union[GCPDataplexRelationshipAttributes, UnsetType] = UNSET + append_relationship_attributes: Union[GCPDataplexRelationshipAttributes, UnsetType] = UNSET + remove_relationship_attributes: Union[GCPDataplexRelationshipAttributes, UnsetType] = UNSET + +# ============================================================================= +# CONVERSION HELPERS & CONSTANTS +# ============================================================================= + +_GCP_DATAPLEX_REL_FIELDS: List[str] = [ + *_ASSET_REL_FIELDS, + "input_to_airflow_tasks", + "output_from_airflow_tasks", + "anomalo_checks", + "application", + "application_field", + "data_contract_latest", + "data_contract_latest_certified", + "output_port_data_products", + "input_port_data_products", + "model_implemented_entities", + "model_implemented_attributes", + "metrics", + "dq_base_dataset_rules", + "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", + "meanings", + "mc_monitors", + "mc_incidents", + "partial_child_fields", + "partial_child_objects", + "input_to_processes", + "output_from_processes", + "user_def_relationship_to", + "user_def_relationship_from", + "files", + "links", + "readme", + "schema_registry_subjects", + "soda_checks", + "input_to_spark_jobs", + "output_from_spark_jobs", +] + +def _populate_gcp_dataplex_attrs(attrs: GCPDataplexAttributes, obj: GCPDataplex) -> None: + """Populate GCPDataplex-specific attributes on the attrs struct.""" + _populate_asset_attrs(attrs, obj) + attrs.catalog_dataset_guid = obj.catalog_dataset_guid + attrs.google_service = obj.google_service + attrs.google_project_name = obj.google_project_name + attrs.google_project_id = obj.google_project_id + attrs.google_project_number = obj.google_project_number + attrs.google_location = obj.google_location + attrs.google_location_type = obj.google_location_type + attrs.google_labels = obj.google_labels + attrs.google_tags = obj.google_tags + attrs.cloud_uniform_resource_name = obj.cloud_uniform_resource_name + +def _extract_gcp_dataplex_attrs(attrs: GCPDataplexAttributes) -> dict: + """Extract all GCPDataplex attributes from the attrs struct into a flat dict.""" + result = _extract_asset_attrs(attrs) + result["catalog_dataset_guid"] = attrs.catalog_dataset_guid + result["google_service"] = attrs.google_service + result["google_project_name"] = attrs.google_project_name + result["google_project_id"] = attrs.google_project_id + result["google_project_number"] = attrs.google_project_number + result["google_location"] = attrs.google_location + result["google_location_type"] = attrs.google_location_type + result["google_labels"] = attrs.google_labels + result["google_tags"] = attrs.google_tags + result["cloud_uniform_resource_name"] = attrs.cloud_uniform_resource_name + return result + +# ============================================================================= +# CONVERSION FUNCTIONS +# ============================================================================= + + +def _gcp_dataplex_to_nested(gcp_dataplex: GCPDataplex) -> GCPDataplexNested: + """Convert flat GCPDataplex to nested format.""" + attrs = GCPDataplexAttributes() + _populate_gcp_dataplex_attrs(attrs, gcp_dataplex) + # Categorize relationships by save semantic (REPLACE, APPEND, REMOVE) + replace_rels, append_rels, remove_rels = categorize_relationships( + gcp_dataplex, _GCP_DATAPLEX_REL_FIELDS, GCPDataplexRelationshipAttributes + ) + return GCPDataplexNested( + guid=gcp_dataplex.guid, + type_name=gcp_dataplex.type_name, + status=gcp_dataplex.status, + version=gcp_dataplex.version, + create_time=gcp_dataplex.create_time, + update_time=gcp_dataplex.update_time, + created_by=gcp_dataplex.created_by, + updated_by=gcp_dataplex.updated_by, + classifications=gcp_dataplex.classifications, + classification_names=gcp_dataplex.classification_names, + meanings=gcp_dataplex.meanings, + labels=gcp_dataplex.labels, + business_attributes=gcp_dataplex.business_attributes, + custom_attributes=gcp_dataplex.custom_attributes, + pending_tasks=gcp_dataplex.pending_tasks, + proxy=gcp_dataplex.proxy, + is_incomplete=gcp_dataplex.is_incomplete, + provenance_type=gcp_dataplex.provenance_type, + home_id=gcp_dataplex.home_id, + attributes=attrs, + relationship_attributes=replace_rels, + append_relationship_attributes=append_rels, + remove_relationship_attributes=remove_rels, + ) + +def _gcp_dataplex_from_nested(nested: GCPDataplexNested) -> GCPDataplex: + """Convert nested format to flat GCPDataplex.""" + attrs = nested.attributes if nested.attributes is not UNSET else GCPDataplexAttributes() + # Merge relationships from all three buckets + merged_rels = merge_relationships( + nested.relationship_attributes, + nested.append_relationship_attributes, + nested.remove_relationship_attributes, + _GCP_DATAPLEX_REL_FIELDS, + GCPDataplexRelationshipAttributes + ) + return GCPDataplex( + guid=nested.guid, + type_name=nested.type_name, + status=nested.status, + version=nested.version, + create_time=nested.create_time, + update_time=nested.update_time, + created_by=nested.created_by, + updated_by=nested.updated_by, + classifications=nested.classifications, + classification_names=nested.classification_names, + meanings=nested.meanings, + labels=nested.labels, + business_attributes=nested.business_attributes, + custom_attributes=nested.custom_attributes, + pending_tasks=nested.pending_tasks, + proxy=nested.proxy, + is_incomplete=nested.is_incomplete, + provenance_type=nested.provenance_type, + home_id=nested.home_id, + **_extract_gcp_dataplex_attrs(attrs), + # Merged relationship attributes + **merged_rels, + ) + +def _gcp_dataplex_to_nested_bytes(gcp_dataplex: GCPDataplex, serde: Serde) -> bytes: + """Convert flat GCPDataplex to nested JSON bytes.""" + return serde.encode(_gcp_dataplex_to_nested(gcp_dataplex)) + + +def _gcp_dataplex_from_nested_bytes(data: bytes, serde: Serde) -> GCPDataplex: + """Convert nested JSON bytes to flat GCPDataplex.""" + nested = serde.decode(data, GCPDataplexNested) + return _gcp_dataplex_from_nested(nested) + +# --------------------------------------------------------------------------- +# Deferred field descriptor initialization +# --------------------------------------------------------------------------- +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) + +GCPDataplex.CATALOG_DATASET_GUID = KeywordField("catalogDatasetGuid", "catalogDatasetGuid") +GCPDataplex.GOOGLE_SERVICE = KeywordField("googleService", "googleService") +GCPDataplex.GOOGLE_PROJECT_NAME = KeywordTextField("googleProjectName", "googleProjectName", "googleProjectName.text") +GCPDataplex.GOOGLE_PROJECT_ID = KeywordTextField("googleProjectId", "googleProjectId", "googleProjectId.text") +GCPDataplex.GOOGLE_PROJECT_NUMBER = NumericField("googleProjectNumber", "googleProjectNumber") +GCPDataplex.GOOGLE_LOCATION = KeywordField("googleLocation", "googleLocation") +GCPDataplex.GOOGLE_LOCATION_TYPE = KeywordField("googleLocationType", "googleLocationType") +GCPDataplex.GOOGLE_LABELS = KeywordField("googleLabels", "googleLabels") +GCPDataplex.GOOGLE_TAGS = KeywordField("googleTags", "googleTags") +GCPDataplex.CLOUD_UNIFORM_RESOURCE_NAME = KeywordField("cloudUniformResourceName", "cloudUniformResourceName") +GCPDataplex.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") +GCPDataplex.OUTPUT_FROM_AIRFLOW_TASKS = RelationField("outputFromAirflowTasks") +GCPDataplex.ANOMALO_CHECKS = RelationField("anomaloChecks") +GCPDataplex.APPLICATION = RelationField("application") +GCPDataplex.APPLICATION_FIELD = RelationField("applicationField") +GCPDataplex.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +GCPDataplex.DATA_CONTRACT_LATEST_CERTIFIED = RelationField("dataContractLatestCertified") +GCPDataplex.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") +GCPDataplex.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") +GCPDataplex.MODEL_IMPLEMENTED_ENTITIES = RelationField("modelImplementedEntities") +GCPDataplex.MODEL_IMPLEMENTED_ATTRIBUTES = RelationField("modelImplementedAttributes") +GCPDataplex.METRICS = RelationField("metrics") +GCPDataplex.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") +GCPDataplex.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +GCPDataplex.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField("gcpDataplexAspectTypeMetadataEntities") +GCPDataplex.MEANINGS = RelationField("meanings") +GCPDataplex.MC_MONITORS = RelationField("mcMonitors") +GCPDataplex.MC_INCIDENTS = RelationField("mcIncidents") +GCPDataplex.PARTIAL_CHILD_FIELDS = RelationField("partialChildFields") +GCPDataplex.PARTIAL_CHILD_OBJECTS = RelationField("partialChildObjects") +GCPDataplex.INPUT_TO_PROCESSES = RelationField("inputToProcesses") +GCPDataplex.OUTPUT_FROM_PROCESSES = RelationField("outputFromProcesses") +GCPDataplex.USER_DEF_RELATIONSHIP_TO = RelationField("userDefRelationshipTo") +GCPDataplex.USER_DEF_RELATIONSHIP_FROM = RelationField("userDefRelationshipFrom") +GCPDataplex.FILES = RelationField("files") +GCPDataplex.LINKS = RelationField("links") +GCPDataplex.README = RelationField("readme") +GCPDataplex.SCHEMA_REGISTRY_SUBJECTS = RelationField("schemaRegistrySubjects") +GCPDataplex.SODA_CHECKS = RelationField("sodaChecks") +GCPDataplex.INPUT_TO_SPARK_JOBS = RelationField("inputToSparkJobs") +GCPDataplex.OUTPUT_FROM_SPARK_JOBS = RelationField("outputFromSparkJobs") \ No newline at end of file diff --git a/pyatlan_v9/model/assets/gcp_dataplex_aspect_type.py b/pyatlan_v9/model/assets/gcp_dataplex_aspect_type.py new file mode 100644 index 000000000..0d1156fb7 --- /dev/null +++ b/pyatlan_v9/model/assets/gcp_dataplex_aspect_type.py @@ -0,0 +1,686 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# ruff: noqa: ARG002 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +GCPDataplexAspectType asset model with flattened inheritance. + +This module provides: +- GCPDataplexAspectType: Flat asset class (easy to use) +- GCPDataplexAspectTypeAttributes: Nested attributes struct (extends AssetAttributes) +- GCPDataplexAspectTypeNested: Nested API format struct +""" + +from __future__ import annotations + +from typing import Any, ClassVar, Dict, List, Set, Union + +import msgspec +from msgspec import UNSET, UnsetType + +from .airflow_related import RelatedAirflowTask +from .anomalo_related import RelatedAnomaloCheck +from .app_related import RelatedApplication, RelatedApplicationField +from .asset import ( + _ASSET_REL_FIELDS, + Asset, + AssetAttributes, + AssetNested, + AssetRelationshipAttributes, + _extract_asset_attrs, + _populate_asset_attrs, +) +from .asset_related import RelatedAsset +from .data_contract_related import RelatedDataContract +from .data_mesh_related import RelatedDataProduct +from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gtc_related import RelatedAtlasGlossaryTerm +from .model_related import RelatedModelAttribute, RelatedModelEntity +from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor +from .partial_related import RelatedPartialField, RelatedPartialObject +from .process_related import RelatedProcess +from .referenceable_related import RelatedReferenceable +from .resource_related import RelatedFile, RelatedLink, RelatedReadme +from .schema_registry_related import RelatedSchemaRegistrySubject +from .soda_related import RelatedSodaCheck +from .spark_related import RelatedSparkJob +from pyatlan_v9.model.conversion_utils import categorize_relationships, merge_relationships +from pyatlan_v9.model.serde import Serde, get_serde +from pyatlan_v9.model.transform import register_asset + +from .gcp_dataplex_related import RelatedGCPDataplexAspectType + +# ============================================================================= +# FLAT ASSET CLASS +# ============================================================================= + +@register_asset +class GCPDataplexAspectType(Asset): + """ + Represents a Google Dataplex Aspect Type definition. + """ + + GCP_DATAPLEX_ASPECT_TYPE_RESOURCE_NAME: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_PROJECT: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_LOCATION: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_TEMPLATE: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_LABELS: ClassVar[Any] = None + CATALOG_DATASET_GUID: ClassVar[Any] = None + GOOGLE_SERVICE: ClassVar[Any] = None + GOOGLE_PROJECT_NAME: ClassVar[Any] = None + GOOGLE_PROJECT_ID: ClassVar[Any] = None + GOOGLE_PROJECT_NUMBER: ClassVar[Any] = None + GOOGLE_LOCATION: ClassVar[Any] = None + GOOGLE_LOCATION_TYPE: ClassVar[Any] = None + GOOGLE_LABELS: ClassVar[Any] = None + GOOGLE_TAGS: ClassVar[Any] = None + CLOUD_UNIFORM_RESOURCE_NAME: ClassVar[Any] = None + INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None + OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None + ANOMALO_CHECKS: ClassVar[Any] = None + APPLICATION: ClassVar[Any] = None + APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None + OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + MODEL_IMPLEMENTED_ENTITIES: ClassVar[Any] = None + MODEL_IMPLEMENTED_ATTRIBUTES: ClassVar[Any] = None + METRICS: ClassVar[Any] = None + DQ_BASE_DATASET_RULES: ClassVar[Any] = None + DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_ENTRIES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None + MEANINGS: ClassVar[Any] = None + MC_MONITORS: ClassVar[Any] = None + MC_INCIDENTS: ClassVar[Any] = None + PARTIAL_CHILD_FIELDS: ClassVar[Any] = None + PARTIAL_CHILD_OBJECTS: ClassVar[Any] = None + INPUT_TO_PROCESSES: ClassVar[Any] = None + OUTPUT_FROM_PROCESSES: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_TO: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_FROM: ClassVar[Any] = None + FILES: ClassVar[Any] = None + LINKS: ClassVar[Any] = None + README: ClassVar[Any] = None + SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None + SODA_CHECKS: ClassVar[Any] = None + INPUT_TO_SPARK_JOBS: ClassVar[Any] = None + OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + + type_name: Union[str, UnsetType] = "GCPDataplexAspectType" + + gcp_dataplex_aspect_type_resource_name: Union[str, None, UnsetType] = UNSET + """Full GCP resource name of this Aspect Type (e.g. projects/{project}/locations/{location}/aspectTypes/{id}). Used to match against assetGCPDataplexAspectType on BigQuery entry assets.""" + + gcp_dataplex_aspect_type_project: Union[str, None, UnsetType] = UNSET + """GCP project in which this Aspect Type is defined.""" + + gcp_dataplex_aspect_type_location: Union[str, None, UnsetType] = UNSET + """GCP location (region or global) in which this Aspect Type is defined.""" + + gcp_dataplex_aspect_type_metadata_template: Union[str, None, UnsetType] = UNSET + """Full Dataplex metadataTemplate JSON schema, stored as a stringified JSON blob.""" + + gcp_dataplex_aspect_type_labels: Union[Dict[str, str], None, UnsetType] = UNSET + """GCP labels attached to this Aspect Type resource.""" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + + google_service: Union[str, None, UnsetType] = UNSET + """Service in Google in which the asset exists.""" + + google_project_name: Union[str, None, UnsetType] = UNSET + """Name of the project in which the asset exists.""" + + google_project_id: Union[str, None, UnsetType] = UNSET + """ID of the project in which the asset exists.""" + + google_project_number: Union[int, None, UnsetType] = UNSET + """Number of the project in which the asset exists.""" + + google_location: Union[str, None, UnsetType] = UNSET + """Location of this asset in Google.""" + + google_location_type: Union[str, None, UnsetType] = UNSET + """Type of location of this asset in Google.""" + + google_labels: Union[List[Dict[str, Any]], None, UnsetType] = UNSET + """List of labels that have been applied to the asset in Google.""" + + google_tags: Union[List[Dict[str, Any]], None, UnsetType] = UNSET + """List of tags that have been applied to the asset in Google.""" + + cloud_uniform_resource_name: Union[str, None, UnsetType] = UNSET + """Uniform resource name (URN) for the asset: AWS ARN, Google Cloud URI, Azure resource ID, Oracle OCID, and so on.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_entries: Union[List[RelatedAsset], None, UnsetType] = UNSET + """Dataplex Aspect Types whose aspects are attached to this entry (asset).""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + def __post_init__(self) -> None: + self.type_name = "GCPDataplexAspectType" + + + + # ========================================================================= + # Optimized Serialization Methods (override Asset base class) + # ========================================================================= + + def to_json(self, nested: bool = True, serde: Serde | None = None) -> str: + """ + Convert to JSON string using optimized nested struct serialization. + + Args: + nested: If True (default), use nested API format. If False, use flat format. + serde: Optional Serde instance for encoder reuse. Uses shared singleton if None. + + Returns: + JSON string representation + """ + if serde is None: + serde = get_serde() + if nested: + return self.to_nested_bytes(serde).decode("utf-8") + else: + return serde.encode(self).decode("utf-8") + + def to_nested_bytes(self, serde: Serde | None = None) -> bytes: + """Serialize to Atlas nested-format JSON bytes (pure msgspec, no dict intermediate).""" + if serde is None: + serde = get_serde() + return _gcp_dataplex_aspect_type_to_nested_bytes(self, serde) + + @staticmethod + def from_json(json_data: str | bytes, serde: Serde | None = None) -> GCPDataplexAspectType: + """ + Create from JSON string or bytes using optimized nested struct deserialization. + + Args: + json_data: JSON string or bytes to deserialize + serde: Optional Serde instance for decoder reuse. Uses shared singleton if None. + + Returns: + GCPDataplexAspectType instance + """ + if isinstance(json_data, str): + json_data = json_data.encode("utf-8") + if serde is None: + serde = get_serde() + return _gcp_dataplex_aspect_type_from_nested_bytes(json_data, serde) + + +# ============================================================================= +# NESTED FORMAT CLASSES +# ============================================================================= + +class GCPDataplexAspectTypeAttributes(AssetAttributes): + """GCPDataplexAspectType-specific attributes for nested API format.""" + + gcp_dataplex_aspect_type_resource_name: Union[str, None, UnsetType] = UNSET + """Full GCP resource name of this Aspect Type (e.g. projects/{project}/locations/{location}/aspectTypes/{id}). Used to match against assetGCPDataplexAspectType on BigQuery entry assets.""" + + gcp_dataplex_aspect_type_project: Union[str, None, UnsetType] = UNSET + """GCP project in which this Aspect Type is defined.""" + + gcp_dataplex_aspect_type_location: Union[str, None, UnsetType] = UNSET + """GCP location (region or global) in which this Aspect Type is defined.""" + + gcp_dataplex_aspect_type_metadata_template: Union[str, None, UnsetType] = UNSET + """Full Dataplex metadataTemplate JSON schema, stored as a stringified JSON blob.""" + + gcp_dataplex_aspect_type_labels: Union[Dict[str, str], None, UnsetType] = UNSET + """GCP labels attached to this Aspect Type resource.""" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + + google_service: Union[str, None, UnsetType] = UNSET + """Service in Google in which the asset exists.""" + + google_project_name: Union[str, None, UnsetType] = UNSET + """Name of the project in which the asset exists.""" + + google_project_id: Union[str, None, UnsetType] = UNSET + """ID of the project in which the asset exists.""" + + google_project_number: Union[int, None, UnsetType] = UNSET + """Number of the project in which the asset exists.""" + + google_location: Union[str, None, UnsetType] = UNSET + """Location of this asset in Google.""" + + google_location_type: Union[str, None, UnsetType] = UNSET + """Type of location of this asset in Google.""" + + google_labels: Union[List[Dict[str, Any]], None, UnsetType] = UNSET + """List of labels that have been applied to the asset in Google.""" + + google_tags: Union[List[Dict[str, Any]], None, UnsetType] = UNSET + """List of tags that have been applied to the asset in Google.""" + + cloud_uniform_resource_name: Union[str, None, UnsetType] = UNSET + """Uniform resource name (URN) for the asset: AWS ARN, Google Cloud URI, Azure resource ID, Oracle OCID, and so on.""" + +class GCPDataplexAspectTypeRelationshipAttributes(AssetRelationshipAttributes): + """GCPDataplexAspectType-specific relationship attributes for nested API format.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_entries: Union[List[RelatedAsset], None, UnsetType] = UNSET + """Dataplex Aspect Types whose aspects are attached to this entry (asset).""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + +class GCPDataplexAspectTypeNested(AssetNested): + """GCPDataplexAspectType in nested API format for high-performance serialization.""" + + attributes: Union[GCPDataplexAspectTypeAttributes, UnsetType] = UNSET + relationship_attributes: Union[GCPDataplexAspectTypeRelationshipAttributes, UnsetType] = UNSET + append_relationship_attributes: Union[GCPDataplexAspectTypeRelationshipAttributes, UnsetType] = UNSET + remove_relationship_attributes: Union[GCPDataplexAspectTypeRelationshipAttributes, UnsetType] = UNSET + +# ============================================================================= +# CONVERSION HELPERS & CONSTANTS +# ============================================================================= + +_GCP_DATAPLEX_ASPECT_TYPE_REL_FIELDS: List[str] = [ + *_ASSET_REL_FIELDS, + "input_to_airflow_tasks", + "output_from_airflow_tasks", + "anomalo_checks", + "application", + "application_field", + "data_contract_latest", + "data_contract_latest_certified", + "output_port_data_products", + "input_port_data_products", + "model_implemented_entities", + "model_implemented_attributes", + "metrics", + "dq_base_dataset_rules", + "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_entries", + "gcp_dataplex_aspect_type_metadata_entities", + "meanings", + "mc_monitors", + "mc_incidents", + "partial_child_fields", + "partial_child_objects", + "input_to_processes", + "output_from_processes", + "user_def_relationship_to", + "user_def_relationship_from", + "files", + "links", + "readme", + "schema_registry_subjects", + "soda_checks", + "input_to_spark_jobs", + "output_from_spark_jobs", +] + +def _populate_gcp_dataplex_aspect_type_attrs(attrs: GCPDataplexAspectTypeAttributes, obj: GCPDataplexAspectType) -> None: + """Populate GCPDataplexAspectType-specific attributes on the attrs struct.""" + _populate_asset_attrs(attrs, obj) + attrs.gcp_dataplex_aspect_type_resource_name = obj.gcp_dataplex_aspect_type_resource_name + attrs.gcp_dataplex_aspect_type_project = obj.gcp_dataplex_aspect_type_project + attrs.gcp_dataplex_aspect_type_location = obj.gcp_dataplex_aspect_type_location + attrs.gcp_dataplex_aspect_type_metadata_template = obj.gcp_dataplex_aspect_type_metadata_template + attrs.gcp_dataplex_aspect_type_labels = obj.gcp_dataplex_aspect_type_labels + attrs.catalog_dataset_guid = obj.catalog_dataset_guid + attrs.google_service = obj.google_service + attrs.google_project_name = obj.google_project_name + attrs.google_project_id = obj.google_project_id + attrs.google_project_number = obj.google_project_number + attrs.google_location = obj.google_location + attrs.google_location_type = obj.google_location_type + attrs.google_labels = obj.google_labels + attrs.google_tags = obj.google_tags + attrs.cloud_uniform_resource_name = obj.cloud_uniform_resource_name + +def _extract_gcp_dataplex_aspect_type_attrs(attrs: GCPDataplexAspectTypeAttributes) -> dict: + """Extract all GCPDataplexAspectType attributes from the attrs struct into a flat dict.""" + result = _extract_asset_attrs(attrs) + result["gcp_dataplex_aspect_type_resource_name"] = attrs.gcp_dataplex_aspect_type_resource_name + result["gcp_dataplex_aspect_type_project"] = attrs.gcp_dataplex_aspect_type_project + result["gcp_dataplex_aspect_type_location"] = attrs.gcp_dataplex_aspect_type_location + result["gcp_dataplex_aspect_type_metadata_template"] = attrs.gcp_dataplex_aspect_type_metadata_template + result["gcp_dataplex_aspect_type_labels"] = attrs.gcp_dataplex_aspect_type_labels + result["catalog_dataset_guid"] = attrs.catalog_dataset_guid + result["google_service"] = attrs.google_service + result["google_project_name"] = attrs.google_project_name + result["google_project_id"] = attrs.google_project_id + result["google_project_number"] = attrs.google_project_number + result["google_location"] = attrs.google_location + result["google_location_type"] = attrs.google_location_type + result["google_labels"] = attrs.google_labels + result["google_tags"] = attrs.google_tags + result["cloud_uniform_resource_name"] = attrs.cloud_uniform_resource_name + return result + +# ============================================================================= +# CONVERSION FUNCTIONS +# ============================================================================= + + +def _gcp_dataplex_aspect_type_to_nested(gcp_dataplex_aspect_type: GCPDataplexAspectType) -> GCPDataplexAspectTypeNested: + """Convert flat GCPDataplexAspectType to nested format.""" + attrs = GCPDataplexAspectTypeAttributes() + _populate_gcp_dataplex_aspect_type_attrs(attrs, gcp_dataplex_aspect_type) + # Categorize relationships by save semantic (REPLACE, APPEND, REMOVE) + replace_rels, append_rels, remove_rels = categorize_relationships( + gcp_dataplex_aspect_type, _GCP_DATAPLEX_ASPECT_TYPE_REL_FIELDS, GCPDataplexAspectTypeRelationshipAttributes + ) + return GCPDataplexAspectTypeNested( + guid=gcp_dataplex_aspect_type.guid, + type_name=gcp_dataplex_aspect_type.type_name, + status=gcp_dataplex_aspect_type.status, + version=gcp_dataplex_aspect_type.version, + create_time=gcp_dataplex_aspect_type.create_time, + update_time=gcp_dataplex_aspect_type.update_time, + created_by=gcp_dataplex_aspect_type.created_by, + updated_by=gcp_dataplex_aspect_type.updated_by, + classifications=gcp_dataplex_aspect_type.classifications, + classification_names=gcp_dataplex_aspect_type.classification_names, + meanings=gcp_dataplex_aspect_type.meanings, + labels=gcp_dataplex_aspect_type.labels, + business_attributes=gcp_dataplex_aspect_type.business_attributes, + custom_attributes=gcp_dataplex_aspect_type.custom_attributes, + pending_tasks=gcp_dataplex_aspect_type.pending_tasks, + proxy=gcp_dataplex_aspect_type.proxy, + is_incomplete=gcp_dataplex_aspect_type.is_incomplete, + provenance_type=gcp_dataplex_aspect_type.provenance_type, + home_id=gcp_dataplex_aspect_type.home_id, + attributes=attrs, + relationship_attributes=replace_rels, + append_relationship_attributes=append_rels, + remove_relationship_attributes=remove_rels, + ) + +def _gcp_dataplex_aspect_type_from_nested(nested: GCPDataplexAspectTypeNested) -> GCPDataplexAspectType: + """Convert nested format to flat GCPDataplexAspectType.""" + attrs = nested.attributes if nested.attributes is not UNSET else GCPDataplexAspectTypeAttributes() + # Merge relationships from all three buckets + merged_rels = merge_relationships( + nested.relationship_attributes, + nested.append_relationship_attributes, + nested.remove_relationship_attributes, + _GCP_DATAPLEX_ASPECT_TYPE_REL_FIELDS, + GCPDataplexAspectTypeRelationshipAttributes + ) + return GCPDataplexAspectType( + guid=nested.guid, + type_name=nested.type_name, + status=nested.status, + version=nested.version, + create_time=nested.create_time, + update_time=nested.update_time, + created_by=nested.created_by, + updated_by=nested.updated_by, + classifications=nested.classifications, + classification_names=nested.classification_names, + meanings=nested.meanings, + labels=nested.labels, + business_attributes=nested.business_attributes, + custom_attributes=nested.custom_attributes, + pending_tasks=nested.pending_tasks, + proxy=nested.proxy, + is_incomplete=nested.is_incomplete, + provenance_type=nested.provenance_type, + home_id=nested.home_id, + **_extract_gcp_dataplex_aspect_type_attrs(attrs), + # Merged relationship attributes + **merged_rels, + ) + +def _gcp_dataplex_aspect_type_to_nested_bytes(gcp_dataplex_aspect_type: GCPDataplexAspectType, serde: Serde) -> bytes: + """Convert flat GCPDataplexAspectType to nested JSON bytes.""" + return serde.encode(_gcp_dataplex_aspect_type_to_nested(gcp_dataplex_aspect_type)) + + +def _gcp_dataplex_aspect_type_from_nested_bytes(data: bytes, serde: Serde) -> GCPDataplexAspectType: + """Convert nested JSON bytes to flat GCPDataplexAspectType.""" + nested = serde.decode(data, GCPDataplexAspectTypeNested) + return _gcp_dataplex_aspect_type_from_nested(nested) + +# --------------------------------------------------------------------------- +# Deferred field descriptor initialization +# --------------------------------------------------------------------------- +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + KeywordField, + KeywordTextField, + NumericField, + RelationField, +) + +GCPDataplexAspectType.GCP_DATAPLEX_ASPECT_TYPE_RESOURCE_NAME = KeywordField("gcpDataplexAspectTypeResourceName", "gcpDataplexAspectTypeResourceName") +GCPDataplexAspectType.GCP_DATAPLEX_ASPECT_TYPE_PROJECT = KeywordField("gcpDataplexAspectTypeProject", "gcpDataplexAspectTypeProject") +GCPDataplexAspectType.GCP_DATAPLEX_ASPECT_TYPE_LOCATION = KeywordField("gcpDataplexAspectTypeLocation", "gcpDataplexAspectTypeLocation") +GCPDataplexAspectType.GCP_DATAPLEX_ASPECT_TYPE_METADATA_TEMPLATE = KeywordField("gcpDataplexAspectTypeMetadataTemplate", "gcpDataplexAspectTypeMetadataTemplate") +GCPDataplexAspectType.GCP_DATAPLEX_ASPECT_TYPE_LABELS = KeywordField("gcpDataplexAspectTypeLabels", "gcpDataplexAspectTypeLabels") +GCPDataplexAspectType.CATALOG_DATASET_GUID = KeywordField("catalogDatasetGuid", "catalogDatasetGuid") +GCPDataplexAspectType.GOOGLE_SERVICE = KeywordField("googleService", "googleService") +GCPDataplexAspectType.GOOGLE_PROJECT_NAME = KeywordTextField("googleProjectName", "googleProjectName", "googleProjectName.text") +GCPDataplexAspectType.GOOGLE_PROJECT_ID = KeywordTextField("googleProjectId", "googleProjectId", "googleProjectId.text") +GCPDataplexAspectType.GOOGLE_PROJECT_NUMBER = NumericField("googleProjectNumber", "googleProjectNumber") +GCPDataplexAspectType.GOOGLE_LOCATION = KeywordField("googleLocation", "googleLocation") +GCPDataplexAspectType.GOOGLE_LOCATION_TYPE = KeywordField("googleLocationType", "googleLocationType") +GCPDataplexAspectType.GOOGLE_LABELS = KeywordField("googleLabels", "googleLabels") +GCPDataplexAspectType.GOOGLE_TAGS = KeywordField("googleTags", "googleTags") +GCPDataplexAspectType.CLOUD_UNIFORM_RESOURCE_NAME = KeywordField("cloudUniformResourceName", "cloudUniformResourceName") +GCPDataplexAspectType.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") +GCPDataplexAspectType.OUTPUT_FROM_AIRFLOW_TASKS = RelationField("outputFromAirflowTasks") +GCPDataplexAspectType.ANOMALO_CHECKS = RelationField("anomaloChecks") +GCPDataplexAspectType.APPLICATION = RelationField("application") +GCPDataplexAspectType.APPLICATION_FIELD = RelationField("applicationField") +GCPDataplexAspectType.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +GCPDataplexAspectType.DATA_CONTRACT_LATEST_CERTIFIED = RelationField("dataContractLatestCertified") +GCPDataplexAspectType.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") +GCPDataplexAspectType.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") +GCPDataplexAspectType.MODEL_IMPLEMENTED_ENTITIES = RelationField("modelImplementedEntities") +GCPDataplexAspectType.MODEL_IMPLEMENTED_ATTRIBUTES = RelationField("modelImplementedAttributes") +GCPDataplexAspectType.METRICS = RelationField("metrics") +GCPDataplexAspectType.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") +GCPDataplexAspectType.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +GCPDataplexAspectType.GCP_DATAPLEX_ASPECT_TYPE_ENTRIES = RelationField("gcpDataplexAspectTypeEntries") +GCPDataplexAspectType.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField("gcpDataplexAspectTypeMetadataEntities") +GCPDataplexAspectType.MEANINGS = RelationField("meanings") +GCPDataplexAspectType.MC_MONITORS = RelationField("mcMonitors") +GCPDataplexAspectType.MC_INCIDENTS = RelationField("mcIncidents") +GCPDataplexAspectType.PARTIAL_CHILD_FIELDS = RelationField("partialChildFields") +GCPDataplexAspectType.PARTIAL_CHILD_OBJECTS = RelationField("partialChildObjects") +GCPDataplexAspectType.INPUT_TO_PROCESSES = RelationField("inputToProcesses") +GCPDataplexAspectType.OUTPUT_FROM_PROCESSES = RelationField("outputFromProcesses") +GCPDataplexAspectType.USER_DEF_RELATIONSHIP_TO = RelationField("userDefRelationshipTo") +GCPDataplexAspectType.USER_DEF_RELATIONSHIP_FROM = RelationField("userDefRelationshipFrom") +GCPDataplexAspectType.FILES = RelationField("files") +GCPDataplexAspectType.LINKS = RelationField("links") +GCPDataplexAspectType.README = RelationField("readme") +GCPDataplexAspectType.SCHEMA_REGISTRY_SUBJECTS = RelationField("schemaRegistrySubjects") +GCPDataplexAspectType.SODA_CHECKS = RelationField("sodaChecks") +GCPDataplexAspectType.INPUT_TO_SPARK_JOBS = RelationField("inputToSparkJobs") +GCPDataplexAspectType.OUTPUT_FROM_SPARK_JOBS = RelationField("outputFromSparkJobs") \ No newline at end of file diff --git a/pyatlan_v9/model/assets/gcp_dataplex_related.py b/pyatlan_v9/model/assets/gcp_dataplex_related.py new file mode 100644 index 000000000..d980fd8de --- /dev/null +++ b/pyatlan_v9/model/assets/gcp_dataplex_related.py @@ -0,0 +1,69 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +Related type classes for GCPDataplex module. + +This module contains all Related{Type} classes for the GCPDataplex type hierarchy. +These classes are used for relationship attributes to reference related entities. +""" + +from __future__ import annotations + +from typing import Dict, List, Set, Union + +import msgspec +from msgspec import UNSET, UnsetType + +from .catalog_related import RelatedCatalog +from .referenceable_related import RelatedReferenceable + +__all__ = [ + "RelatedGCPDataplex", + "RelatedGCPDataplexAspectType", +] + + +class RelatedGCPDataplex(RelatedCatalog): + """ + Related entity reference for GCPDataplex assets. + + Extends RelatedCatalog with GCPDataplex-specific attributes. + """ + + # type_name inherited from parent with default=UNSET + # __post_init__ sets it to "GCPDataplex" so it serializes correctly + + def __post_init__(self) -> None: + RelatedReferenceable.__post_init__(self) + self.type_name = "GCPDataplex" + +class RelatedGCPDataplexAspectType(RelatedGCPDataplex): + """ + Related entity reference for GCPDataplexAspectType assets. + + Extends RelatedGCPDataplex with GCPDataplexAspectType-specific attributes. + """ + + # type_name inherited from parent with default=UNSET + # __post_init__ sets it to "GCPDataplexAspectType" so it serializes correctly + + gcp_dataplex_aspect_type_resource_name: Union[str, None, UnsetType] = UNSET + """Full GCP resource name of this Aspect Type (e.g. projects/{project}/locations/{location}/aspectTypes/{id}). Used to match against assetGCPDataplexAspectType on BigQuery entry assets.""" + + gcp_dataplex_aspect_type_project: Union[str, None, UnsetType] = UNSET + """GCP project in which this Aspect Type is defined.""" + + gcp_dataplex_aspect_type_location: Union[str, None, UnsetType] = UNSET + """GCP location (region or global) in which this Aspect Type is defined.""" + + gcp_dataplex_aspect_type_metadata_template: Union[str, None, UnsetType] = UNSET + """Full Dataplex metadataTemplate JSON schema, stored as a stringified JSON blob.""" + + gcp_dataplex_aspect_type_labels: Union[Dict[str, str], None, UnsetType] = UNSET + """GCP labels attached to this Aspect Type resource.""" + + def __post_init__(self) -> None: + RelatedReferenceable.__post_init__(self) + self.type_name = "GCPDataplexAspectType" diff --git a/pyatlan_v9/model/assets/gcs.py b/pyatlan_v9/model/assets/gcs.py index 03debc4cb..8b3ba16eb 100644 --- a/pyatlan_v9/model/assets/gcs.py +++ b/pyatlan_v9/model/assets/gcs.py @@ -41,7 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .gcs_related import RelatedGCS +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -94,6 +94,7 @@ class GCS(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -111,6 +112,8 @@ class GCS(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "GCS" + gcs_storage_class: Union[str, None, UnsetType] = UNSET """Storage class of this asset.""" @@ -205,6 +208,11 @@ class GCS(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -260,66 +268,6 @@ class GCS(Asset): def __post_init__(self) -> None: self.type_name = "GCS" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this GCS instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"GCS validation failed: {errors}") - - def minimize(self) -> "GCS": - """ - Return a minimal copy of this GCS with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new GCS with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new GCS instance with only the minimum required fields. - """ - self.validate() - return GCS(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedGCS": - """ - Create a :class:`RelatedGCS` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedGCS reference to this asset. - """ - if self.guid is not UNSET: - return RelatedGCS(guid=self.guid) - return RelatedGCS(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -473,6 +421,11 @@ class GCSRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -555,6 +508,7 @@ class GCSNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -650,9 +604,6 @@ def _gcs_to_nested(gcs: GCS) -> GCSNested: is_incomplete=gcs.is_incomplete, provenance_type=gcs.provenance_type, home_id=gcs.home_id, - depth=gcs.depth, - immediate_upstream=gcs.immediate_upstream, - immediate_downstream=gcs.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -682,6 +633,7 @@ def _gcs_from_nested(nested: GCSNested) -> GCS: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -690,9 +642,6 @@ def _gcs_from_nested(nested: GCSNested) -> GCS: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_gcs_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -757,6 +706,9 @@ def _gcs_from_nested_bytes(data: bytes, serde: Serde) -> GCS: GCS.METRICS = RelationField("metrics") GCS.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") GCS.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +GCS.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) GCS.MEANINGS = RelationField("meanings") GCS.MC_MONITORS = RelationField("mcMonitors") GCS.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/gcs_bucket.py b/pyatlan_v9/model/assets/gcs_bucket.py index 3fefd08db..9f76ed1b4 100644 --- a/pyatlan_v9/model/assets/gcs_bucket.py +++ b/pyatlan_v9/model/assets/gcs_bucket.py @@ -42,7 +42,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .gcs_related import RelatedGCSBucket, RelatedGCSObject +from .gcp_dataplex_related import RelatedGCPDataplexAspectType +from .gcs_related import RelatedGCSObject from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -102,6 +103,7 @@ class GCSBucket(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None GCS_OBJECTS: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -120,6 +122,8 @@ class GCSBucket(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "GCSBucket" + gcs_object_count: Union[int, None, UnsetType] = UNSET """Number of objects within the bucket.""" @@ -235,6 +239,11 @@ class GCSBucket(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + gcs_objects: Union[List[RelatedGCSObject], None, UnsetType] = UNSET """GCS objects within this bucket.""" @@ -293,66 +302,6 @@ class GCSBucket(Asset): def __post_init__(self) -> None: self.type_name = "GCSBucket" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this GCSBucket instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"GCSBucket validation failed: {errors}") - - def minimize(self) -> "GCSBucket": - """ - Return a minimal copy of this GCSBucket with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new GCSBucket with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new GCSBucket instance with only the minimum required fields. - """ - self.validate() - return GCSBucket(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedGCSBucket": - """ - Create a :class:`RelatedGCSBucket` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedGCSBucket reference to this asset. - """ - if self.guid is not UNSET: - return RelatedGCSBucket(guid=self.guid) - return RelatedGCSBucket(qualified_name=self.qualified_name) - @classmethod @init_guid def creator(cls, *, name: str, connection_qualified_name: str) -> "GCSBucket": @@ -582,6 +531,11 @@ class GCSBucketRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + gcs_objects: Union[List[RelatedGCSObject], None, UnsetType] = UNSET """GCS objects within this bucket.""" @@ -671,6 +625,7 @@ class GCSBucketNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "gcs_objects", "meanings", "mc_monitors", @@ -783,9 +738,6 @@ def _gcs_bucket_to_nested(gcs_bucket: GCSBucket) -> GCSBucketNested: is_incomplete=gcs_bucket.is_incomplete, provenance_type=gcs_bucket.provenance_type, home_id=gcs_bucket.home_id, - depth=gcs_bucket.depth, - immediate_upstream=gcs_bucket.immediate_upstream, - immediate_downstream=gcs_bucket.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -817,6 +769,7 @@ def _gcs_bucket_from_nested(nested: GCSBucketNested) -> GCSBucket: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -825,9 +778,6 @@ def _gcs_bucket_from_nested(nested: GCSBucketNested) -> GCSBucket: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_gcs_bucket_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -919,6 +869,9 @@ def _gcs_bucket_from_nested_bytes(data: bytes, serde: Serde) -> GCSBucket: GCSBucket.METRICS = RelationField("metrics") GCSBucket.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") GCSBucket.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +GCSBucket.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) GCSBucket.GCS_OBJECTS = RelationField("gcsObjects") GCSBucket.MEANINGS = RelationField("meanings") GCSBucket.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/gcs_object.py b/pyatlan_v9/model/assets/gcs_object.py index 368c599be..b2d39fecb 100644 --- a/pyatlan_v9/model/assets/gcs_object.py +++ b/pyatlan_v9/model/assets/gcs_object.py @@ -44,7 +44,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric -from .gcs_related import RelatedGCSBucket, RelatedGCSObject +from .gcp_dataplex_related import RelatedGCPDataplexAspectType +from .gcs_related import RelatedGCSBucket from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -112,6 +113,7 @@ class GCSObject(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None GCS_BUCKET: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -130,6 +132,8 @@ class GCSObject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "GCSObject" + gcs_bucket_name: Union[str, None, UnsetType] = UNSET """Simple name of the bucket in which this object exists.""" @@ -273,6 +277,11 @@ class GCSObject(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + gcs_bucket: Union[RelatedGCSBucket, None, UnsetType] = UNSET """GCS bucket in which the object exists.""" @@ -337,76 +346,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this GCSObject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.gcs_bucket is UNSET: - errors.append("gcs_bucket is required for creation") - if self.gcs_bucket_name is UNSET: - errors.append("gcs_bucket_name is required for creation") - if self.gcs_bucket_qualified_name is UNSET: - errors.append("gcs_bucket_qualified_name is required for creation") - if errors: - raise ValueError(f"GCSObject validation failed: {errors}") - - def minimize(self) -> "GCSObject": - """ - Return a minimal copy of this GCSObject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new GCSObject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new GCSObject instance with only the minimum required fields. - """ - self.validate() - return GCSObject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedGCSObject": - """ - Create a :class:`RelatedGCSObject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedGCSObject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedGCSObject(guid=self.guid) - return RelatedGCSObject(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -745,6 +684,11 @@ class GCSObjectRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + gcs_bucket: Union[RelatedGCSBucket, None, UnsetType] = UNSET """GCS bucket in which the object exists.""" @@ -834,6 +778,7 @@ class GCSObjectNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "gcs_bucket", "meanings", "mc_monitors", @@ -966,9 +911,6 @@ def _gcs_object_to_nested(gcs_object: GCSObject) -> GCSObjectNested: is_incomplete=gcs_object.is_incomplete, provenance_type=gcs_object.provenance_type, home_id=gcs_object.home_id, - depth=gcs_object.depth, - immediate_upstream=gcs_object.immediate_upstream, - immediate_downstream=gcs_object.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1000,6 +942,7 @@ def _gcs_object_from_nested(nested: GCSObjectNested) -> GCSObject: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1008,9 +951,6 @@ def _gcs_object_from_nested(nested: GCSObjectNested) -> GCSObject: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_gcs_object_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1120,6 +1060,9 @@ def _gcs_object_from_nested_bytes(data: bytes, serde: Serde) -> GCSObject: GCSObject.METRICS = RelationField("metrics") GCSObject.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") GCSObject.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +GCSObject.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) GCSObject.GCS_BUCKET = RelationField("gcsBucket") GCSObject.MEANINGS = RelationField("meanings") GCSObject.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/google.py b/pyatlan_v9/model/assets/google.py index f9518bb64..2bd8e7a1c 100644 --- a/pyatlan_v9/model/assets/google.py +++ b/pyatlan_v9/model/assets/google.py @@ -36,10 +36,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cloud_related import RelatedGoogle from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable @@ -61,7 +61,7 @@ class Google(Asset): GOOGLE_SERVICE: ClassVar[Any] = None GOOGLE_PROJECT_NAME: ClassVar[Any] = None GOOGLE_PROJECT_ID: ClassVar[Any] = None - GOOGLE_PROJECT_NUMBER: ClassVar[Any] = None + CLOUD_PROJECT_NUMBER: ClassVar[Any] = None GOOGLE_LOCATION: ClassVar[Any] = None GOOGLE_LOCATION_TYPE: ClassVar[Any] = None GOOGLE_LABELS: ClassVar[Any] = None @@ -77,6 +77,7 @@ class Google(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -88,6 +89,8 @@ class Google(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Google" + google_service: Union[str, None, UnsetType] = UNSET """Service in Google in which the asset exists.""" @@ -97,7 +100,7 @@ class Google(Asset): google_project_id: Union[str, None, UnsetType] = UNSET """ID of the project in which the asset exists.""" - google_project_number: Union[int, None, UnsetType] = UNSET + cloud_project_number: Union[int, None, UnsetType] = UNSET """Number of the project in which the asset exists.""" google_location: Union[str, None, UnsetType] = UNSET @@ -147,6 +150,11 @@ class Google(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -184,66 +192,6 @@ class Google(Asset): def __post_init__(self) -> None: self.type_name = "Google" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Google instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Google validation failed: {errors}") - - def minimize(self) -> "Google": - """ - Return a minimal copy of this Google with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Google with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Google instance with only the minimum required fields. - """ - self.validate() - return Google(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedGoogle": - """ - Create a :class:`RelatedGoogle` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedGoogle reference to this asset. - """ - if self.guid is not UNSET: - return RelatedGoogle(guid=self.guid) - return RelatedGoogle(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -308,7 +256,7 @@ class GoogleAttributes(AssetAttributes): google_project_id: Union[str, None, UnsetType] = UNSET """ID of the project in which the asset exists.""" - google_project_number: Union[int, None, UnsetType] = UNSET + cloud_project_number: Union[int, None, UnsetType] = UNSET """Number of the project in which the asset exists.""" google_location: Union[str, None, UnsetType] = UNSET @@ -362,6 +310,11 @@ class GoogleRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -426,6 +379,7 @@ class GoogleNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -445,7 +399,7 @@ def _populate_google_attrs(attrs: GoogleAttributes, obj: Google) -> None: attrs.google_service = obj.google_service attrs.google_project_name = obj.google_project_name attrs.google_project_id = obj.google_project_id - attrs.google_project_number = obj.google_project_number + attrs.cloud_project_number = obj.cloud_project_number attrs.google_location = obj.google_location attrs.google_location_type = obj.google_location_type attrs.google_labels = obj.google_labels @@ -459,7 +413,7 @@ def _extract_google_attrs(attrs: GoogleAttributes) -> dict: result["google_service"] = attrs.google_service result["google_project_name"] = attrs.google_project_name result["google_project_id"] = attrs.google_project_id - result["google_project_number"] = attrs.google_project_number + result["cloud_project_number"] = attrs.cloud_project_number result["google_location"] = attrs.google_location result["google_location_type"] = attrs.google_location_type result["google_labels"] = attrs.google_labels @@ -501,9 +455,6 @@ def _google_to_nested(google: Google) -> GoogleNested: is_incomplete=google.is_incomplete, provenance_type=google.provenance_type, home_id=google.home_id, - depth=google.depth, - immediate_upstream=google.immediate_upstream, - immediate_downstream=google.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -533,6 +484,7 @@ def _google_from_nested(nested: GoogleNested) -> Google: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -541,9 +493,6 @@ def _google_from_nested(nested: GoogleNested) -> Google: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_google_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -578,9 +527,7 @@ def _google_from_nested_bytes(data: bytes, serde: Serde) -> Google: Google.GOOGLE_PROJECT_ID = KeywordTextField( "googleProjectId", "googleProjectId", "googleProjectId.text" ) -Google.GOOGLE_PROJECT_NUMBER = NumericField( - "googleProjectNumber", "googleProjectNumber" -) +Google.CLOUD_PROJECT_NUMBER = NumericField("cloudProjectNumber", "cloudProjectNumber") Google.GOOGLE_LOCATION = KeywordField("googleLocation", "googleLocation") Google.GOOGLE_LOCATION_TYPE = KeywordField("googleLocationType", "googleLocationType") Google.GOOGLE_LABELS = KeywordField("googleLabels", "googleLabels") @@ -598,6 +545,9 @@ def _google_from_nested_bytes(data: bytes, serde: Serde) -> Google: Google.METRICS = RelationField("metrics") Google.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Google.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Google.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Google.MEANINGS = RelationField("meanings") Google.MC_MONITORS = RelationField("mcMonitors") Google.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/iceberg.py b/pyatlan_v9/model/assets/iceberg.py index 4f269d6a8..4f2080141 100644 --- a/pyatlan_v9/model/assets/iceberg.py +++ b/pyatlan_v9/model/assets/iceberg.py @@ -47,8 +47,8 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .iceberg_related import RelatedIceberg from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -122,6 +122,7 @@ class Iceberg(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -143,6 +144,8 @@ class Iceberg(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Iceberg" + iceberg_parent_namespace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the immediate parent namespace in which this asset exists.""" @@ -292,6 +295,11 @@ class Iceberg(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -367,66 +375,6 @@ class Iceberg(Asset): def __post_init__(self) -> None: self.type_name = "Iceberg" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Iceberg instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Iceberg validation failed: {errors}") - - def minimize(self) -> "Iceberg": - """ - Return a minimal copy of this Iceberg with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Iceberg with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Iceberg instance with only the minimum required fields. - """ - self.validate() - return Iceberg(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedIceberg": - """ - Create a :class:`RelatedIceberg` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedIceberg reference to this asset. - """ - if self.guid is not UNSET: - return RelatedIceberg(guid=self.guid) - return RelatedIceberg(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -635,6 +583,11 @@ class IcebergRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -747,6 +700,7 @@ class IcebergNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -886,9 +840,6 @@ def _iceberg_to_nested(iceberg: Iceberg) -> IcebergNested: is_incomplete=iceberg.is_incomplete, provenance_type=iceberg.provenance_type, home_id=iceberg.home_id, - depth=iceberg.depth, - immediate_upstream=iceberg.immediate_upstream, - immediate_downstream=iceberg.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -918,6 +869,7 @@ def _iceberg_from_nested(nested: IcebergNested) -> Iceberg: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -926,9 +878,6 @@ def _iceberg_from_nested(nested: IcebergNested) -> Iceberg: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_iceberg_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1030,6 +979,9 @@ def _iceberg_from_nested_bytes(data: bytes, serde: Serde) -> Iceberg: Iceberg.DBT_SOURCES = RelationField("dbtSources") Iceberg.SQL_DBT_SOURCES = RelationField("sqlDBTSources") Iceberg.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +Iceberg.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Iceberg.MEANINGS = RelationField("meanings") Iceberg.MC_MONITORS = RelationField("mcMonitors") Iceberg.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/iceberg_catalog.py b/pyatlan_v9/model/assets/iceberg_catalog.py index 1b1e5f120..d12958459 100644 --- a/pyatlan_v9/model/assets/iceberg_catalog.py +++ b/pyatlan_v9/model/assets/iceberg_catalog.py @@ -48,8 +48,8 @@ RelatedDbtTest, ) from .fabric_related import RelatedFabricWorkspace +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .iceberg_related import RelatedIcebergCatalog from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -131,6 +131,7 @@ class IcebergCatalog(Asset): SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None FABRIC_WORKSPACE: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -153,6 +154,8 @@ class IcebergCatalog(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "IcebergCatalog" + iceberg_catalog_type: Union[str, None, UnsetType] = UNSET """Type of the Iceberg catalog (e.g., 'hadoop', 'hive', 'nessie', 'rest').""" @@ -323,6 +326,11 @@ class IcebergCatalog(Asset): fabric_workspace: Union[RelatedFabricWorkspace, None, UnsetType] = UNSET """Workspace containing the database.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -401,66 +409,6 @@ class IcebergCatalog(Asset): def __post_init__(self) -> None: self.type_name = "IcebergCatalog" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this IcebergCatalog instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"IcebergCatalog validation failed: {errors}") - - def minimize(self) -> "IcebergCatalog": - """ - Return a minimal copy of this IcebergCatalog with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new IcebergCatalog with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new IcebergCatalog instance with only the minimum required fields. - """ - self.validate() - return IcebergCatalog(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedIcebergCatalog": - """ - Create a :class:`RelatedIcebergCatalog` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedIcebergCatalog reference to this asset. - """ - if self.guid is not UNSET: - return RelatedIcebergCatalog(guid=self.guid) - return RelatedIcebergCatalog(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -690,6 +638,11 @@ class IcebergCatalogRelationshipAttributes(AssetRelationshipAttributes): fabric_workspace: Union[RelatedFabricWorkspace, None, UnsetType] = UNSET """Workspace containing the database.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -808,6 +761,7 @@ class IcebergCatalogNested(AssetNested): "sql_dbt_sources", "dbt_seed_assets", "fabric_workspace", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -964,9 +918,6 @@ def _iceberg_catalog_to_nested(iceberg_catalog: IcebergCatalog) -> IcebergCatalo is_incomplete=iceberg_catalog.is_incomplete, provenance_type=iceberg_catalog.provenance_type, home_id=iceberg_catalog.home_id, - depth=iceberg_catalog.depth, - immediate_upstream=iceberg_catalog.immediate_upstream, - immediate_downstream=iceberg_catalog.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1000,6 +951,7 @@ def _iceberg_catalog_from_nested(nested: IcebergCatalogNested) -> IcebergCatalog updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1008,9 +960,6 @@ def _iceberg_catalog_from_nested(nested: IcebergCatalogNested) -> IcebergCatalog is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_iceberg_catalog_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1137,6 +1086,9 @@ def _iceberg_catalog_from_nested_bytes(data: bytes, serde: Serde) -> IcebergCata IcebergCatalog.SQL_DBT_SOURCES = RelationField("sqlDBTSources") IcebergCatalog.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") IcebergCatalog.FABRIC_WORKSPACE = RelationField("fabricWorkspace") +IcebergCatalog.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) IcebergCatalog.MEANINGS = RelationField("meanings") IcebergCatalog.MC_MONITORS = RelationField("mcMonitors") IcebergCatalog.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/iceberg_column.py b/pyatlan_v9/model/assets/iceberg_column.py index d1dadc09f..d83d6e901 100644 --- a/pyatlan_v9/model/assets/iceberg_column.py +++ b/pyatlan_v9/model/assets/iceberg_column.py @@ -50,8 +50,8 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .iceberg_related import RelatedIcebergColumn from .model_related import RelatedModelAttribute, RelatedModelEntity from .mongo_db_related import RelatedMongoDBCollection from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -149,6 +149,7 @@ class IcebergColumn(Asset): PARENT_COLUMN_NAME: ClassVar[Any] = None COLUMN_DISTINCT_VALUES_COUNT: ClassVar[Any] = None COLUMN_DISTINCT_VALUES_COUNT_LONG: ClassVar[Any] = None + COLUMN_DISTINCT_VALUES_PERCENTAGE: ClassVar[Any] = None COLUMN_HISTOGRAM: ClassVar[Any] = None COLUMN_MAX: ClassVar[Any] = None COLUMN_MIN: ClassVar[Any] = None @@ -220,6 +221,7 @@ class IcebergColumn(Asset): DBT_MODEL_COLUMNS: ClassVar[Any] = None COLUMN_DBT_MODEL_COLUMNS: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MONGO_DB_COLLECTION: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -254,6 +256,8 @@ class IcebergColumn(Asset): SQL_INSIGHT_FILTERS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "IcebergColumn" + iceberg_parent_namespace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the immediate parent namespace in which this asset exists.""" @@ -427,6 +431,9 @@ class IcebergColumn(Asset): column_distinct_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" + column_distinct_values_percentage: Union[float, None, UnsetType] = UNSET + """Percentage of rows in a column that contain distinct values.""" + column_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """List of values in a histogram that represents the contents of this column.""" @@ -654,6 +661,11 @@ class IcebergColumn(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -772,69 +784,6 @@ class IcebergColumn(Asset): def __post_init__(self) -> None: self.type_name = "IcebergColumn" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this IcebergColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.order is UNSET: - errors.append("order is required for creation") - if errors: - raise ValueError(f"IcebergColumn validation failed: {errors}") - - def minimize(self) -> "IcebergColumn": - """ - Return a minimal copy of this IcebergColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new IcebergColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new IcebergColumn instance with only the minimum required fields. - """ - self.validate() - return IcebergColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedIcebergColumn": - """ - Create a :class:`RelatedIcebergColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedIcebergColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedIcebergColumn(guid=self.guid) - return RelatedIcebergColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1063,6 +1012,9 @@ class IcebergColumnAttributes(AssetAttributes): column_distinct_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" + column_distinct_values_percentage: Union[float, None, UnsetType] = UNSET + """Percentage of rows in a column that contain distinct values.""" + column_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """List of values in a histogram that represents the contents of this column.""" @@ -1294,6 +1246,11 @@ class IcebergColumnRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1459,6 +1416,7 @@ class IcebergColumnNested(AssetNested): "dbt_model_columns", "column_dbt_model_columns", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mongo_db_collection", "mc_monitors", @@ -1563,6 +1521,7 @@ def _populate_iceberg_column_attrs( attrs.parent_column_name = obj.parent_column_name attrs.column_distinct_values_count = obj.column_distinct_values_count attrs.column_distinct_values_count_long = obj.column_distinct_values_count_long + attrs.column_distinct_values_percentage = obj.column_distinct_values_percentage attrs.column_histogram = obj.column_histogram attrs.column_max = obj.column_max attrs.column_min = obj.column_min @@ -1684,6 +1643,9 @@ def _extract_iceberg_column_attrs(attrs: IcebergColumnAttributes) -> dict: result["column_distinct_values_count_long"] = ( attrs.column_distinct_values_count_long ) + result["column_distinct_values_percentage"] = ( + attrs.column_distinct_values_percentage + ) result["column_histogram"] = attrs.column_histogram result["column_max"] = attrs.column_max result["column_min"] = attrs.column_min @@ -1769,9 +1731,6 @@ def _iceberg_column_to_nested(iceberg_column: IcebergColumn) -> IcebergColumnNes is_incomplete=iceberg_column.is_incomplete, provenance_type=iceberg_column.provenance_type, home_id=iceberg_column.home_id, - depth=iceberg_column.depth, - immediate_upstream=iceberg_column.immediate_upstream, - immediate_downstream=iceberg_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1805,6 +1764,7 @@ def _iceberg_column_from_nested(nested: IcebergColumnNested) -> IcebergColumn: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1813,9 +1773,6 @@ def _iceberg_column_from_nested(nested: IcebergColumnNested) -> IcebergColumn: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_iceberg_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1952,6 +1909,9 @@ def _iceberg_column_from_nested_bytes(data: bytes, serde: Serde) -> IcebergColum IcebergColumn.COLUMN_DISTINCT_VALUES_COUNT_LONG = NumericField( "columnDistinctValuesCountLong", "columnDistinctValuesCountLong" ) +IcebergColumn.COLUMN_DISTINCT_VALUES_PERCENTAGE = NumericField( + "columnDistinctValuesPercentage", "columnDistinctValuesPercentage" +) IcebergColumn.COLUMN_HISTOGRAM = KeywordField("columnHistogram", "columnHistogram") IcebergColumn.COLUMN_MAX = NumericField("columnMax", "columnMax") IcebergColumn.COLUMN_MIN = NumericField("columnMin", "columnMin") @@ -2080,6 +2040,9 @@ def _iceberg_column_from_nested_bytes(data: bytes, serde: Serde) -> IcebergColum IcebergColumn.DBT_MODEL_COLUMNS = RelationField("dbtModelColumns") IcebergColumn.COLUMN_DBT_MODEL_COLUMNS = RelationField("columnDbtModelColumns") IcebergColumn.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +IcebergColumn.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) IcebergColumn.MEANINGS = RelationField("meanings") IcebergColumn.MONGO_DB_COLLECTION = RelationField("mongoDBCollection") IcebergColumn.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/iceberg_namespace.py b/pyatlan_v9/model/assets/iceberg_namespace.py index 3588e49a3..9ff257822 100644 --- a/pyatlan_v9/model/assets/iceberg_namespace.py +++ b/pyatlan_v9/model/assets/iceberg_namespace.py @@ -49,6 +49,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .iceberg_related import RelatedIcebergNamespace from .model_related import RelatedModelAttribute, RelatedModelEntity @@ -148,6 +149,7 @@ class IcebergNamespace(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None ICEBERG_SUB_NAMESPACES: ClassVar[Any] = None ICEBERG_PARENT_NAMESPACE: ClassVar[Any] = None @@ -185,6 +187,8 @@ class IcebergNamespace(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "IcebergNamespace" + iceberg_parent_namespace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the immediate parent namespace in which this asset exists.""" @@ -354,6 +358,11 @@ class IcebergNamespace(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -491,70 +500,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this IcebergNamespace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"IcebergNamespace validation failed: {errors}") - - def minimize(self) -> "IcebergNamespace": - """ - Return a minimal copy of this IcebergNamespace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new IcebergNamespace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new IcebergNamespace instance with only the minimum required fields. - """ - self.validate() - return IcebergNamespace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedIcebergNamespace": - """ - Create a :class:`RelatedIcebergNamespace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedIcebergNamespace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedIcebergNamespace(guid=self.guid) - return RelatedIcebergNamespace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -785,6 +730,11 @@ class IcebergNamespaceRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -957,6 +907,7 @@ class IcebergNamespaceNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "iceberg_sub_namespaces", "iceberg_parent_namespace", @@ -1126,9 +1077,6 @@ def _iceberg_namespace_to_nested( is_incomplete=iceberg_namespace.is_incomplete, provenance_type=iceberg_namespace.provenance_type, home_id=iceberg_namespace.home_id, - depth=iceberg_namespace.depth, - immediate_upstream=iceberg_namespace.immediate_upstream, - immediate_downstream=iceberg_namespace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1162,6 +1110,7 @@ def _iceberg_namespace_from_nested(nested: IcebergNamespaceNested) -> IcebergNam updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1170,9 +1119,6 @@ def _iceberg_namespace_from_nested(nested: IcebergNamespaceNested) -> IcebergNam is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_iceberg_namespace_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1300,6 +1246,9 @@ def _iceberg_namespace_from_nested_bytes(data: bytes, serde: Serde) -> IcebergNa IcebergNamespace.DBT_SOURCES = RelationField("dbtSources") IcebergNamespace.SQL_DBT_SOURCES = RelationField("sqlDBTSources") IcebergNamespace.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +IcebergNamespace.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) IcebergNamespace.MEANINGS = RelationField("meanings") IcebergNamespace.ICEBERG_SUB_NAMESPACES = RelationField("icebergSubNamespaces") IcebergNamespace.ICEBERG_PARENT_NAMESPACE = RelationField("icebergParentNamespace") diff --git a/pyatlan_v9/model/assets/iceberg_table.py b/pyatlan_v9/model/assets/iceberg_table.py index 2c9711685..49ae2d007 100644 --- a/pyatlan_v9/model/assets/iceberg_table.py +++ b/pyatlan_v9/model/assets/iceberg_table.py @@ -47,8 +47,8 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .iceberg_related import RelatedIcebergTable from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -161,6 +161,7 @@ class IcebergTable(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -188,6 +189,8 @@ class IcebergTable(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "IcebergTable" + iceberg_current_snapshot_id: Union[int, None, UnsetType] = UNSET """Current snapshot identifier for this Iceberg table.""" @@ -433,6 +436,11 @@ class IcebergTable(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -526,66 +534,6 @@ class IcebergTable(Asset): def __post_init__(self) -> None: self.type_name = "IcebergTable" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this IcebergTable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"IcebergTable validation failed: {errors}") - - def minimize(self) -> "IcebergTable": - """ - Return a minimal copy of this IcebergTable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new IcebergTable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new IcebergTable instance with only the minimum required fields. - """ - self.validate() - return IcebergTable(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedIcebergTable": - """ - Create a :class:`RelatedIcebergTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedIcebergTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedIcebergTable(guid=self.guid) - return RelatedIcebergTable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -890,6 +838,11 @@ class IcebergTableRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1022,6 +975,7 @@ class IcebergTableNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1233,9 +1187,6 @@ def _iceberg_table_to_nested(iceberg_table: IcebergTable) -> IcebergTableNested: is_incomplete=iceberg_table.is_incomplete, provenance_type=iceberg_table.provenance_type, home_id=iceberg_table.home_id, - depth=iceberg_table.depth, - immediate_upstream=iceberg_table.immediate_upstream, - immediate_downstream=iceberg_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1269,6 +1220,7 @@ def _iceberg_table_from_nested(nested: IcebergTableNested) -> IcebergTable: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1277,9 +1229,6 @@ def _iceberg_table_from_nested(nested: IcebergTableNested) -> IcebergTable: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_iceberg_table_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1451,6 +1400,9 @@ def _iceberg_table_from_nested_bytes(data: bytes, serde: Serde) -> IcebergTable: IcebergTable.DBT_SOURCES = RelationField("dbtSources") IcebergTable.SQL_DBT_SOURCES = RelationField("sqlDBTSources") IcebergTable.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +IcebergTable.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) IcebergTable.MEANINGS = RelationField("meanings") IcebergTable.MC_MONITORS = RelationField("mcMonitors") IcebergTable.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/incident.py b/pyatlan_v9/model/assets/incident.py index 1197231c3..4f8238168 100644 --- a/pyatlan_v9/model/assets/incident.py +++ b/pyatlan_v9/model/assets/incident.py @@ -27,10 +27,10 @@ from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField -from .asset_related import RelatedIncident from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable import ( @@ -57,7 +57,7 @@ class Incident(Referenceable): Base class for Incident assets. """ - INCIDENT_SEVERITY: ClassVar[Any] = None + ASSET_SEVERITY: ClassVar[Any] = None NAME: ClassVar[Any] = None DISPLAY_NAME: ClassVar[Any] = None DESCRIPTION: ClassVar[Any] = None @@ -197,6 +197,11 @@ class Incident(Referenceable): ASSET_SODA_CHECK_STATUSES: ClassVar[Any] = None ASSET_SODA_SOURCE_URL: ClassVar[Any] = None ASSET_ICON: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_SCORE_VALUE: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_ENTITIES: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_LATEST_SCORES: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_AVG_SCORES: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_MIN_SCORES: ClassVar[Any] = None ASSET_EXTERNAL_DQ_METADATA_DETAILS: ClassVar[Any] = None IS_PARTIAL: ClassVar[Any] = None IS_AI_GENERATED: ClassVar[Any] = None @@ -261,6 +266,7 @@ class Incident(Referenceable): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -272,7 +278,9 @@ class Incident(Referenceable): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None - incident_severity: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "Incident" + + asset_severity: Union[str, None, UnsetType] = UNSET """Status of this asset's severity.""" name: Union[str, None, UnsetType] = UNSET @@ -718,6 +726,31 @@ class Incident(Referenceable): asset_icon: Union[str, None, UnsetType] = UNSET """Name of the icon to use for this asset. (Only applies to glossaries, currently.)""" + asset_external_dq_score_value: Union[float, None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQScoreValue" + ) + """Single asset-level DQ score (0–100). Populated natively by tools that provide one.""" + + asset_external_dq_test_entities: Union[List[str], None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQTestEntities" + ) + """Ordered list of DQ test/scan names on this asset. Positionally aligned with the score metrics.""" + + asset_external_dq_test_latest_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestLatestScores") + ) + """List of scores of the most recent run for each DQ test.""" + + asset_external_dq_test_avg_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestAvgScores") + ) + """List of mean scores across all runs for each DQ test.""" + + asset_external_dq_test_min_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestMinScores") + ) + """List of minimum (floor) score across all runs for each DQ test.""" + asset_external_dq_metadata_details: Union[ Dict[str, Dict[str, Any]], None, UnsetType ] = msgspec.field(default=UNSET, name="assetExternalDQMetadataDetails") @@ -986,6 +1019,11 @@ class Incident(Referenceable): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1023,66 +1061,6 @@ class Incident(Referenceable): def __post_init__(self) -> None: self.type_name = "Incident" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Incident instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Incident validation failed: {errors}") - - def minimize(self) -> "Incident": - """ - Return a minimal copy of this Incident with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Incident with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Incident instance with only the minimum required fields. - """ - self.validate() - return Incident(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedIncident": - """ - Create a :class:`RelatedIncident` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedIncident reference to this asset. - """ - if self.guid is not UNSET: - return RelatedIncident(guid=self.guid) - return RelatedIncident(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1138,7 +1116,7 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> Incident: class IncidentAttributes(ReferenceableAttributes): """Incident-specific attributes for nested API format.""" - incident_severity: Union[str, None, UnsetType] = UNSET + asset_severity: Union[str, None, UnsetType] = UNSET """Status of this asset's severity.""" name: Union[str, None, UnsetType] = UNSET @@ -1584,6 +1562,31 @@ class IncidentAttributes(ReferenceableAttributes): asset_icon: Union[str, None, UnsetType] = UNSET """Name of the icon to use for this asset. (Only applies to glossaries, currently.)""" + asset_external_dq_score_value: Union[float, None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQScoreValue" + ) + """Single asset-level DQ score (0–100). Populated natively by tools that provide one.""" + + asset_external_dq_test_entities: Union[List[str], None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQTestEntities" + ) + """Ordered list of DQ test/scan names on this asset. Positionally aligned with the score metrics.""" + + asset_external_dq_test_latest_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestLatestScores") + ) + """List of scores of the most recent run for each DQ test.""" + + asset_external_dq_test_avg_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestAvgScores") + ) + """List of mean scores across all runs for each DQ test.""" + + asset_external_dq_test_min_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestMinScores") + ) + """List of minimum (floor) score across all runs for each DQ test.""" + asset_external_dq_metadata_details: Union[ Dict[str, Dict[str, Any]], None, UnsetType ] = msgspec.field(default=UNSET, name="assetExternalDQMetadataDetails") @@ -1856,6 +1859,11 @@ class IncidentRelationshipAttributes(ReferenceableRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1920,6 +1928,7 @@ class IncidentNested(ReferenceableNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1936,7 +1945,7 @@ class IncidentNested(ReferenceableNested): def _populate_incident_attrs(attrs: IncidentAttributes, obj: Incident) -> None: """Populate Incident-specific attributes on the attrs struct.""" _populate_referenceable_attrs(attrs, obj) - attrs.incident_severity = obj.incident_severity + attrs.asset_severity = obj.asset_severity attrs.name = obj.name attrs.display_name = obj.display_name attrs.description = obj.description @@ -2114,6 +2123,13 @@ def _populate_incident_attrs(attrs: IncidentAttributes, obj: Incident) -> None: attrs.asset_soda_check_statuses = obj.asset_soda_check_statuses attrs.asset_soda_source_url = obj.asset_soda_source_url attrs.asset_icon = obj.asset_icon + attrs.asset_external_dq_score_value = obj.asset_external_dq_score_value + attrs.asset_external_dq_test_entities = obj.asset_external_dq_test_entities + attrs.asset_external_dq_test_latest_scores = ( + obj.asset_external_dq_test_latest_scores + ) + attrs.asset_external_dq_test_avg_scores = obj.asset_external_dq_test_avg_scores + attrs.asset_external_dq_test_min_scores = obj.asset_external_dq_test_min_scores attrs.asset_external_dq_metadata_details = obj.asset_external_dq_metadata_details attrs.is_partial = obj.is_partial attrs.is_ai_generated = obj.is_ai_generated @@ -2187,7 +2203,7 @@ def _populate_incident_attrs(attrs: IncidentAttributes, obj: Incident) -> None: def _extract_incident_attrs(attrs: IncidentAttributes) -> dict: """Extract all Incident attributes from the attrs struct into a flat dict.""" result = _extract_referenceable_attrs(attrs) - result["incident_severity"] = attrs.incident_severity + result["asset_severity"] = attrs.asset_severity result["name"] = attrs.name result["display_name"] = attrs.display_name result["description"] = attrs.description @@ -2389,6 +2405,17 @@ def _extract_incident_attrs(attrs: IncidentAttributes) -> dict: result["asset_soda_check_statuses"] = attrs.asset_soda_check_statuses result["asset_soda_source_url"] = attrs.asset_soda_source_url result["asset_icon"] = attrs.asset_icon + result["asset_external_dq_score_value"] = attrs.asset_external_dq_score_value + result["asset_external_dq_test_entities"] = attrs.asset_external_dq_test_entities + result["asset_external_dq_test_latest_scores"] = ( + attrs.asset_external_dq_test_latest_scores + ) + result["asset_external_dq_test_avg_scores"] = ( + attrs.asset_external_dq_test_avg_scores + ) + result["asset_external_dq_test_min_scores"] = ( + attrs.asset_external_dq_test_min_scores + ) result["asset_external_dq_metadata_details"] = ( attrs.asset_external_dq_metadata_details ) @@ -2503,9 +2530,6 @@ def _incident_to_nested(incident: Incident) -> IncidentNested: is_incomplete=incident.is_incomplete, provenance_type=incident.provenance_type, home_id=incident.home_id, - depth=incident.depth, - immediate_upstream=incident.immediate_upstream, - immediate_downstream=incident.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -2537,6 +2561,7 @@ def _incident_from_nested(nested: IncidentNested) -> Incident: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -2545,9 +2570,6 @@ def _incident_from_nested(nested: IncidentNested) -> Incident: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_incident_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -2578,7 +2600,7 @@ def _incident_from_nested_bytes(data: bytes, serde: Serde) -> Incident: TextField, ) -Incident.INCIDENT_SEVERITY = KeywordField("incidentSeverity", "incidentSeverity") +Incident.ASSET_SEVERITY = KeywordField("assetSeverity", "assetSeverity") Incident.NAME = KeywordField("name", "name") Incident.DISPLAY_NAME = KeywordField("displayName", "displayName") Incident.DESCRIPTION = KeywordField("description", "description") @@ -2912,6 +2934,21 @@ def _incident_from_nested_bytes(data: bytes, serde: Serde) -> Incident: "assetSodaSourceURL", "assetSodaSourceURL" ) Incident.ASSET_ICON = KeywordField("assetIcon", "assetIcon") +Incident.ASSET_EXTERNAL_DQ_SCORE_VALUE = NumericField( + "assetExternalDQScoreValue", "assetExternalDQScoreValue" +) +Incident.ASSET_EXTERNAL_DQ_TEST_ENTITIES = KeywordField( + "assetExternalDQTestEntities", "assetExternalDQTestEntities" +) +Incident.ASSET_EXTERNAL_DQ_TEST_LATEST_SCORES = NumericField( + "assetExternalDQTestLatestScores", "assetExternalDQTestLatestScores" +) +Incident.ASSET_EXTERNAL_DQ_TEST_AVG_SCORES = NumericField( + "assetExternalDQTestAvgScores", "assetExternalDQTestAvgScores" +) +Incident.ASSET_EXTERNAL_DQ_TEST_MIN_SCORES = NumericField( + "assetExternalDQTestMinScores", "assetExternalDQTestMinScores" +) Incident.ASSET_EXTERNAL_DQ_METADATA_DETAILS = KeywordField( "assetExternalDQMetadataDetails", "assetExternalDQMetadataDetails" ) @@ -3061,6 +3098,9 @@ def _incident_from_nested_bytes(data: bytes, serde: Serde) -> Incident: Incident.METRICS = RelationField("metrics") Incident.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Incident.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Incident.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Incident.MEANINGS = RelationField("meanings") Incident.MC_MONITORS = RelationField("mcMonitors") Incident.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/infrastructure.py b/pyatlan_v9/model/assets/infrastructure.py index 208ac1fd8..1940c061c 100644 --- a/pyatlan_v9/model/assets/infrastructure.py +++ b/pyatlan_v9/model/assets/infrastructure.py @@ -27,10 +27,10 @@ from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField -from .asset_related import RelatedInfrastructure from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable import ( @@ -196,6 +196,11 @@ class Infrastructure(Referenceable): ASSET_SODA_CHECK_STATUSES: ClassVar[Any] = None ASSET_SODA_SOURCE_URL: ClassVar[Any] = None ASSET_ICON: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_SCORE_VALUE: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_ENTITIES: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_LATEST_SCORES: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_AVG_SCORES: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_MIN_SCORES: ClassVar[Any] = None ASSET_EXTERNAL_DQ_METADATA_DETAILS: ClassVar[Any] = None IS_PARTIAL: ClassVar[Any] = None IS_AI_GENERATED: ClassVar[Any] = None @@ -260,6 +265,7 @@ class Infrastructure(Referenceable): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -271,6 +277,8 @@ class Infrastructure(Referenceable): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Infrastructure" + name: Union[str, None, UnsetType] = UNSET """Name of this asset. Fallback for display purposes, if displayName is empty.""" @@ -714,6 +722,31 @@ class Infrastructure(Referenceable): asset_icon: Union[str, None, UnsetType] = UNSET """Name of the icon to use for this asset. (Only applies to glossaries, currently.)""" + asset_external_dq_score_value: Union[float, None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQScoreValue" + ) + """Single asset-level DQ score (0–100). Populated natively by tools that provide one.""" + + asset_external_dq_test_entities: Union[List[str], None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQTestEntities" + ) + """Ordered list of DQ test/scan names on this asset. Positionally aligned with the score metrics.""" + + asset_external_dq_test_latest_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestLatestScores") + ) + """List of scores of the most recent run for each DQ test.""" + + asset_external_dq_test_avg_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestAvgScores") + ) + """List of mean scores across all runs for each DQ test.""" + + asset_external_dq_test_min_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestMinScores") + ) + """List of minimum (floor) score across all runs for each DQ test.""" + asset_external_dq_metadata_details: Union[ Dict[str, Dict[str, Any]], None, UnsetType ] = msgspec.field(default=UNSET, name="assetExternalDQMetadataDetails") @@ -982,6 +1015,11 @@ class Infrastructure(Referenceable): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1019,66 +1057,6 @@ class Infrastructure(Referenceable): def __post_init__(self) -> None: self.type_name = "Infrastructure" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Infrastructure instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Infrastructure validation failed: {errors}") - - def minimize(self) -> "Infrastructure": - """ - Return a minimal copy of this Infrastructure with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Infrastructure with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Infrastructure instance with only the minimum required fields. - """ - self.validate() - return Infrastructure(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedInfrastructure": - """ - Create a :class:`RelatedInfrastructure` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedInfrastructure reference to this asset. - """ - if self.guid is not UNSET: - return RelatedInfrastructure(guid=self.guid) - return RelatedInfrastructure(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1577,6 +1555,31 @@ class InfrastructureAttributes(ReferenceableAttributes): asset_icon: Union[str, None, UnsetType] = UNSET """Name of the icon to use for this asset. (Only applies to glossaries, currently.)""" + asset_external_dq_score_value: Union[float, None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQScoreValue" + ) + """Single asset-level DQ score (0–100). Populated natively by tools that provide one.""" + + asset_external_dq_test_entities: Union[List[str], None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQTestEntities" + ) + """Ordered list of DQ test/scan names on this asset. Positionally aligned with the score metrics.""" + + asset_external_dq_test_latest_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestLatestScores") + ) + """List of scores of the most recent run for each DQ test.""" + + asset_external_dq_test_avg_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestAvgScores") + ) + """List of mean scores across all runs for each DQ test.""" + + asset_external_dq_test_min_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestMinScores") + ) + """List of minimum (floor) score across all runs for each DQ test.""" + asset_external_dq_metadata_details: Union[ Dict[str, Dict[str, Any]], None, UnsetType ] = msgspec.field(default=UNSET, name="assetExternalDQMetadataDetails") @@ -1849,6 +1852,11 @@ class InfrastructureRelationshipAttributes(ReferenceableRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1915,6 +1923,7 @@ class InfrastructureNested(ReferenceableNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -2110,6 +2119,13 @@ def _populate_infrastructure_attrs( attrs.asset_soda_check_statuses = obj.asset_soda_check_statuses attrs.asset_soda_source_url = obj.asset_soda_source_url attrs.asset_icon = obj.asset_icon + attrs.asset_external_dq_score_value = obj.asset_external_dq_score_value + attrs.asset_external_dq_test_entities = obj.asset_external_dq_test_entities + attrs.asset_external_dq_test_latest_scores = ( + obj.asset_external_dq_test_latest_scores + ) + attrs.asset_external_dq_test_avg_scores = obj.asset_external_dq_test_avg_scores + attrs.asset_external_dq_test_min_scores = obj.asset_external_dq_test_min_scores attrs.asset_external_dq_metadata_details = obj.asset_external_dq_metadata_details attrs.is_partial = obj.is_partial attrs.is_ai_generated = obj.is_ai_generated @@ -2384,6 +2400,17 @@ def _extract_infrastructure_attrs(attrs: InfrastructureAttributes) -> dict: result["asset_soda_check_statuses"] = attrs.asset_soda_check_statuses result["asset_soda_source_url"] = attrs.asset_soda_source_url result["asset_icon"] = attrs.asset_icon + result["asset_external_dq_score_value"] = attrs.asset_external_dq_score_value + result["asset_external_dq_test_entities"] = attrs.asset_external_dq_test_entities + result["asset_external_dq_test_latest_scores"] = ( + attrs.asset_external_dq_test_latest_scores + ) + result["asset_external_dq_test_avg_scores"] = ( + attrs.asset_external_dq_test_avg_scores + ) + result["asset_external_dq_test_min_scores"] = ( + attrs.asset_external_dq_test_min_scores + ) result["asset_external_dq_metadata_details"] = ( attrs.asset_external_dq_metadata_details ) @@ -2498,9 +2525,6 @@ def _infrastructure_to_nested(infrastructure: Infrastructure) -> InfrastructureN is_incomplete=infrastructure.is_incomplete, provenance_type=infrastructure.provenance_type, home_id=infrastructure.home_id, - depth=infrastructure.depth, - immediate_upstream=infrastructure.immediate_upstream, - immediate_downstream=infrastructure.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -2534,6 +2558,7 @@ def _infrastructure_from_nested(nested: InfrastructureNested) -> Infrastructure: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -2542,9 +2567,6 @@ def _infrastructure_from_nested(nested: InfrastructureNested) -> Infrastructure: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_infrastructure_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -2924,6 +2946,21 @@ def _infrastructure_from_nested_bytes(data: bytes, serde: Serde) -> Infrastructu "assetSodaSourceURL", "assetSodaSourceURL" ) Infrastructure.ASSET_ICON = KeywordField("assetIcon", "assetIcon") +Infrastructure.ASSET_EXTERNAL_DQ_SCORE_VALUE = NumericField( + "assetExternalDQScoreValue", "assetExternalDQScoreValue" +) +Infrastructure.ASSET_EXTERNAL_DQ_TEST_ENTITIES = KeywordField( + "assetExternalDQTestEntities", "assetExternalDQTestEntities" +) +Infrastructure.ASSET_EXTERNAL_DQ_TEST_LATEST_SCORES = NumericField( + "assetExternalDQTestLatestScores", "assetExternalDQTestLatestScores" +) +Infrastructure.ASSET_EXTERNAL_DQ_TEST_AVG_SCORES = NumericField( + "assetExternalDQTestAvgScores", "assetExternalDQTestAvgScores" +) +Infrastructure.ASSET_EXTERNAL_DQ_TEST_MIN_SCORES = NumericField( + "assetExternalDQTestMinScores", "assetExternalDQTestMinScores" +) Infrastructure.ASSET_EXTERNAL_DQ_METADATA_DETAILS = KeywordField( "assetExternalDQMetadataDetails", "assetExternalDQMetadataDetails" ) @@ -3083,6 +3120,9 @@ def _infrastructure_from_nested_bytes(data: bytes, serde: Serde) -> Infrastructu Infrastructure.METRICS = RelationField("metrics") Infrastructure.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Infrastructure.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Infrastructure.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Infrastructure.MEANINGS = RelationField("meanings") Infrastructure.MC_MONITORS = RelationField("mcMonitors") Infrastructure.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/insight.py b/pyatlan_v9/model/assets/insight.py index 518f6fc6a..c02002415 100644 --- a/pyatlan_v9/model/assets/insight.py +++ b/pyatlan_v9/model/assets/insight.py @@ -37,10 +37,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .catalog_related import RelatedInsight from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -78,6 +78,7 @@ class Insight(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +96,8 @@ class Insight(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Insight" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -144,6 +147,11 @@ class Insight(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -199,66 +207,6 @@ class Insight(Asset): def __post_init__(self) -> None: self.type_name = "Insight" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Insight instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Insight validation failed: {errors}") - - def minimize(self) -> "Insight": - """ - Return a minimal copy of this Insight with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Insight with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Insight instance with only the minimum required fields. - """ - self.validate() - return Insight(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedInsight": - """ - Create a :class:`RelatedInsight` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedInsight reference to this asset. - """ - if self.guid is not UNSET: - return RelatedInsight(guid=self.guid) - return RelatedInsight(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -367,6 +315,11 @@ class InsightRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -453,6 +406,7 @@ class InsightNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -518,9 +472,6 @@ def _insight_to_nested(insight: Insight) -> InsightNested: is_incomplete=insight.is_incomplete, provenance_type=insight.provenance_type, home_id=insight.home_id, - depth=insight.depth, - immediate_upstream=insight.immediate_upstream, - immediate_downstream=insight.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -550,6 +501,7 @@ def _insight_from_nested(nested: InsightNested) -> Insight: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -558,9 +510,6 @@ def _insight_from_nested(nested: InsightNested) -> Insight: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_insight_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -598,6 +547,9 @@ def _insight_from_nested_bytes(data: bytes, serde: Serde) -> Insight: Insight.METRICS = RelationField("metrics") Insight.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Insight.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Insight.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Insight.MEANINGS = RelationField("meanings") Insight.MC_MONITORS = RelationField("mcMonitors") Insight.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/kafka.py b/pyatlan_v9/model/assets/kafka.py index 264bb599d..5e85b63c5 100644 --- a/pyatlan_v9/model/assets/kafka.py +++ b/pyatlan_v9/model/assets/kafka.py @@ -40,8 +40,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .kafka_related import RelatedKafka from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -78,6 +78,7 @@ class Kafka(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +96,8 @@ class Kafka(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Kafka" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -144,6 +147,11 @@ class Kafka(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -199,66 +207,6 @@ class Kafka(Asset): def __post_init__(self) -> None: self.type_name = "Kafka" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Kafka instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Kafka validation failed: {errors}") - - def minimize(self) -> "Kafka": - """ - Return a minimal copy of this Kafka with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Kafka with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Kafka instance with only the minimum required fields. - """ - self.validate() - return Kafka(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedKafka": - """ - Create a :class:`RelatedKafka` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedKafka reference to this asset. - """ - if self.guid is not UNSET: - return RelatedKafka(guid=self.guid) - return RelatedKafka(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -367,6 +315,11 @@ class KafkaRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -453,6 +406,7 @@ class KafkaNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -518,9 +472,6 @@ def _kafka_to_nested(kafka: Kafka) -> KafkaNested: is_incomplete=kafka.is_incomplete, provenance_type=kafka.provenance_type, home_id=kafka.home_id, - depth=kafka.depth, - immediate_upstream=kafka.immediate_upstream, - immediate_downstream=kafka.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -550,6 +501,7 @@ def _kafka_from_nested(nested: KafkaNested) -> Kafka: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -558,9 +510,6 @@ def _kafka_from_nested(nested: KafkaNested) -> Kafka: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_kafka_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -598,6 +547,9 @@ def _kafka_from_nested_bytes(data: bytes, serde: Serde) -> Kafka: Kafka.METRICS = RelationField("metrics") Kafka.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Kafka.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Kafka.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Kafka.MEANINGS = RelationField("meanings") Kafka.MC_MONITORS = RelationField("mcMonitors") Kafka.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/kafka_cluster.py b/pyatlan_v9/model/assets/kafka_cluster.py index 4fce09569..d0f1a97d7 100644 --- a/pyatlan_v9/model/assets/kafka_cluster.py +++ b/pyatlan_v9/model/assets/kafka_cluster.py @@ -41,8 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .kafka_related import RelatedKafkaCluster, RelatedKafkaTopic +from .kafka_related import RelatedKafkaTopic from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -87,6 +88,7 @@ class KafkaCluster(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None KAFKA_TOPICS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -105,6 +107,8 @@ class KafkaCluster(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "KafkaCluster" + kafka_topic_count: Union[int, None, UnsetType] = UNSET """Number of topics in this cluster.""" @@ -178,6 +182,11 @@ class KafkaCluster(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -242,67 +251,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/cluster/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this KafkaCluster instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if errors: - raise ValueError(f"KafkaCluster validation failed: {errors}") - - def minimize(self) -> "KafkaCluster": - """ - Return a minimal copy of this KafkaCluster with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new KafkaCluster with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new KafkaCluster instance with only the minimum required fields. - """ - self.validate() - return KafkaCluster(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedKafkaCluster": - """ - Create a :class:`RelatedKafkaCluster` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedKafkaCluster reference to this asset. - """ - if self.guid is not UNSET: - return RelatedKafkaCluster(guid=self.guid) - return RelatedKafkaCluster(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -435,6 +383,11 @@ class KafkaClusterRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -526,6 +479,7 @@ class KafkaClusterNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "kafka_topics", "mc_monitors", @@ -610,9 +564,6 @@ def _kafka_cluster_to_nested(kafka_cluster: KafkaCluster) -> KafkaClusterNested: is_incomplete=kafka_cluster.is_incomplete, provenance_type=kafka_cluster.provenance_type, home_id=kafka_cluster.home_id, - depth=kafka_cluster.depth, - immediate_upstream=kafka_cluster.immediate_upstream, - immediate_downstream=kafka_cluster.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -646,6 +597,7 @@ def _kafka_cluster_from_nested(nested: KafkaClusterNested) -> KafkaCluster: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -654,9 +606,6 @@ def _kafka_cluster_from_nested(nested: KafkaClusterNested) -> KafkaCluster: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_kafka_cluster_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -723,6 +672,9 @@ def _kafka_cluster_from_nested_bytes(data: bytes, serde: Serde) -> KafkaCluster: KafkaCluster.METRICS = RelationField("metrics") KafkaCluster.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") KafkaCluster.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +KafkaCluster.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) KafkaCluster.MEANINGS = RelationField("meanings") KafkaCluster.KAFKA_TOPICS = RelationField("kafkaTopics") KafkaCluster.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/kafka_consumer_group.py b/pyatlan_v9/model/assets/kafka_consumer_group.py index fea8793c4..145d64087 100644 --- a/pyatlan_v9/model/assets/kafka_consumer_group.py +++ b/pyatlan_v9/model/assets/kafka_consumer_group.py @@ -42,8 +42,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .kafka_related import RelatedKafkaConsumerGroup, RelatedKafkaTopic +from .kafka_related import RelatedKafkaTopic from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -86,6 +87,7 @@ class KafkaConsumerGroup(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None KAFKA_TOPICS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -104,6 +106,8 @@ class KafkaConsumerGroup(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "KafkaConsumerGroup" + kafka_consumer_group_topic_consumption_properties: Union[ List[Dict[str, Any]], None, UnsetType ] = UNSET @@ -173,6 +177,11 @@ class KafkaConsumerGroup(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -239,76 +248,6 @@ def __post_init__(self) -> None: r"^.+/consumer-group/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this KafkaConsumerGroup instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.kafka_topics is UNSET: - errors.append("kafka_topics is required for creation") - if self.kafka_topic_names is UNSET: - errors.append("kafka_topic_names is required for creation") - if self.kafka_topic_qualified_names is UNSET: - errors.append("kafka_topic_qualified_names is required for creation") - if errors: - raise ValueError(f"KafkaConsumerGroup validation failed: {errors}") - - def minimize(self) -> "KafkaConsumerGroup": - """ - Return a minimal copy of this KafkaConsumerGroup with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new KafkaConsumerGroup with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new KafkaConsumerGroup instance with only the minimum required fields. - """ - self.validate() - return KafkaConsumerGroup(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedKafkaConsumerGroup": - """ - Create a :class:`RelatedKafkaConsumerGroup` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedKafkaConsumerGroup reference to this asset. - """ - if self.guid is not UNSET: - return RelatedKafkaConsumerGroup(guid=self.guid) - return RelatedKafkaConsumerGroup(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -475,6 +414,11 @@ class KafkaConsumerGroupRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -566,6 +510,7 @@ class KafkaConsumerGroupNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "kafka_topics", "mc_monitors", @@ -660,9 +605,6 @@ def _kafka_consumer_group_to_nested( is_incomplete=kafka_consumer_group.is_incomplete, provenance_type=kafka_consumer_group.provenance_type, home_id=kafka_consumer_group.home_id, - depth=kafka_consumer_group.depth, - immediate_upstream=kafka_consumer_group.immediate_upstream, - immediate_downstream=kafka_consumer_group.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -698,6 +640,7 @@ def _kafka_consumer_group_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -706,9 +649,6 @@ def _kafka_consumer_group_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_kafka_consumer_group_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -781,6 +721,9 @@ def _kafka_consumer_group_from_nested_bytes( KafkaConsumerGroup.METRICS = RelationField("metrics") KafkaConsumerGroup.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") KafkaConsumerGroup.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +KafkaConsumerGroup.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) KafkaConsumerGroup.MEANINGS = RelationField("meanings") KafkaConsumerGroup.KAFKA_TOPICS = RelationField("kafkaTopics") KafkaConsumerGroup.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/kafka_field.py b/pyatlan_v9/model/assets/kafka_field.py index 64d8ced74..f3f951339 100644 --- a/pyatlan_v9/model/assets/kafka_field.py +++ b/pyatlan_v9/model/assets/kafka_field.py @@ -41,8 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .kafka_related import RelatedKafkaField, RelatedKafkaTopic +from .kafka_related import RelatedKafkaTopic from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -86,6 +87,7 @@ class KafkaField(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None KAFKA_TOPIC: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -104,6 +106,8 @@ class KafkaField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "KafkaField" + kafka_field_data_type: Union[str, None, UnsetType] = UNSET """Data type of this field as defined in the schema, for example: string, int, record.""" @@ -174,6 +178,11 @@ class KafkaField(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -240,74 +249,6 @@ def __post_init__(self) -> None: r"^.+/topic/[^/]+/field/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this KafkaField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.kafka_topic is UNSET: - errors.append("kafka_topic is required for creation") - if self.kafka_topic_qualified_name is UNSET: - errors.append("kafka_topic_qualified_name is required for creation") - if errors: - raise ValueError(f"KafkaField validation failed: {errors}") - - def minimize(self) -> "KafkaField": - """ - Return a minimal copy of this KafkaField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new KafkaField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new KafkaField instance with only the minimum required fields. - """ - self.validate() - return KafkaField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedKafkaField": - """ - Create a :class:`RelatedKafkaField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedKafkaField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedKafkaField(guid=self.guid) - return RelatedKafkaField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -437,6 +378,11 @@ class KafkaFieldRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -526,6 +472,7 @@ class KafkaFieldNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "kafka_topic", "mc_monitors", @@ -606,9 +553,6 @@ def _kafka_field_to_nested(kafka_field: KafkaField) -> KafkaFieldNested: is_incomplete=kafka_field.is_incomplete, provenance_type=kafka_field.provenance_type, home_id=kafka_field.home_id, - depth=kafka_field.depth, - immediate_upstream=kafka_field.immediate_upstream, - immediate_downstream=kafka_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -640,6 +584,7 @@ def _kafka_field_from_nested(nested: KafkaFieldNested) -> KafkaField: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -648,9 +593,6 @@ def _kafka_field_from_nested(nested: KafkaFieldNested) -> KafkaField: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_kafka_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -715,6 +657,9 @@ def _kafka_field_from_nested_bytes(data: bytes, serde: Serde) -> KafkaField: KafkaField.METRICS = RelationField("metrics") KafkaField.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") KafkaField.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +KafkaField.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) KafkaField.MEANINGS = RelationField("meanings") KafkaField.KAFKA_TOPIC = RelationField("kafkaTopic") KafkaField.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/kafka_related.py b/pyatlan_v9/model/assets/kafka_related.py index 52f4c8a32..56ac5fa85 100644 --- a/pyatlan_v9/model/assets/kafka_related.py +++ b/pyatlan_v9/model/assets/kafka_related.py @@ -224,7 +224,7 @@ class RelatedAzureEventHub(RelatedKafka): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "AzureEventHub" so it serializes correctly - azure_event_hub_status: Union[str, None, UnsetType] = UNSET + kafka_status: Union[str, None, UnsetType] = UNSET """Operational status of the Azure Event Hub at the source.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/kafka_topic.py b/pyatlan_v9/model/assets/kafka_topic.py index 8307da7f2..71d21f2b9 100644 --- a/pyatlan_v9/model/assets/kafka_topic.py +++ b/pyatlan_v9/model/assets/kafka_topic.py @@ -42,12 +42,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .kafka_related import ( RelatedKafkaCluster, RelatedKafkaConsumerGroup, RelatedKafkaField, - RelatedKafkaTopic, ) from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -100,6 +100,7 @@ class KafkaTopic(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None KAFKA_CONSUMER_GROUPS: ClassVar[Any] = None KAFKA_CLUSTER: ClassVar[Any] = None @@ -120,6 +121,8 @@ class KafkaTopic(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "KafkaTopic" + kafka_topic_is_internal: Union[bool, None, UnsetType] = UNSET """Whether this topic is an internal topic (true) or not (false).""" @@ -214,6 +217,11 @@ class KafkaTopic(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -286,72 +294,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/topic/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this KafkaTopic instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.kafka_cluster is UNSET: - errors.append("kafka_cluster is required for creation") - if errors: - raise ValueError(f"KafkaTopic validation failed: {errors}") - - def minimize(self) -> "KafkaTopic": - """ - Return a minimal copy of this KafkaTopic with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new KafkaTopic with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new KafkaTopic instance with only the minimum required fields. - """ - self.validate() - return KafkaTopic(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedKafkaTopic": - """ - Create a :class:`RelatedKafkaTopic` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedKafkaTopic reference to this asset. - """ - if self.guid is not UNSET: - return RelatedKafkaTopic(guid=self.guid) - return RelatedKafkaTopic(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -535,6 +477,11 @@ class KafkaTopicRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -632,6 +579,7 @@ class KafkaTopicNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "kafka_consumer_groups", "kafka_cluster", @@ -736,9 +684,6 @@ def _kafka_topic_to_nested(kafka_topic: KafkaTopic) -> KafkaTopicNested: is_incomplete=kafka_topic.is_incomplete, provenance_type=kafka_topic.provenance_type, home_id=kafka_topic.home_id, - depth=kafka_topic.depth, - immediate_upstream=kafka_topic.immediate_upstream, - immediate_downstream=kafka_topic.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -770,6 +715,7 @@ def _kafka_topic_from_nested(nested: KafkaTopicNested) -> KafkaTopic: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -778,9 +724,6 @@ def _kafka_topic_from_nested(nested: KafkaTopicNested) -> KafkaTopic: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_kafka_topic_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -870,6 +813,9 @@ def _kafka_topic_from_nested_bytes(data: bytes, serde: Serde) -> KafkaTopic: KafkaTopic.METRICS = RelationField("metrics") KafkaTopic.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") KafkaTopic.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +KafkaTopic.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) KafkaTopic.MEANINGS = RelationField("meanings") KafkaTopic.KAFKA_CONSUMER_GROUPS = RelationField("kafkaConsumerGroups") KafkaTopic.KAFKA_CLUSTER = RelationField("kafkaCluster") diff --git a/pyatlan_v9/model/assets/link.py b/pyatlan_v9/model/assets/link.py index 64e2ae972..12aecff6e 100644 --- a/pyatlan_v9/model/assets/link.py +++ b/pyatlan_v9/model/assets/link.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -84,6 +85,7 @@ class Link(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +104,8 @@ class Link(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Link" + icon: Union[str, None, UnsetType] = UNSET """Icon for the link.""" @@ -169,6 +173,11 @@ class Link(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -227,66 +236,6 @@ class Link(Asset): def __post_init__(self) -> None: self.type_name = "Link" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Link instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Link validation failed: {errors}") - - def minimize(self) -> "Link": - """ - Return a minimal copy of this Link with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Link with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Link instance with only the minimum required fields. - """ - self.validate() - return Link(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLink": - """ - Create a :class:`RelatedLink` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLink reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLink(guid=self.guid) - return RelatedLink(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -413,6 +362,11 @@ class LinkRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -498,6 +452,7 @@ class LinkNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -576,9 +531,6 @@ def _link_to_nested(link: Link) -> LinkNested: is_incomplete=link.is_incomplete, provenance_type=link.provenance_type, home_id=link.home_id, - depth=link.depth, - immediate_upstream=link.immediate_upstream, - immediate_downstream=link.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -608,6 +560,7 @@ def _link_from_nested(nested: LinkNested) -> Link: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -616,9 +569,6 @@ def _link_from_nested(nested: LinkNested) -> Link: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_link_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -666,6 +616,9 @@ def _link_from_nested_bytes(data: bytes, serde: Serde) -> Link: Link.METRICS = RelationField("metrics") Link.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Link.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Link.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Link.MEANINGS = RelationField("meanings") Link.MC_MONITORS = RelationField("mcMonitors") Link.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/looker.py b/pyatlan_v9/model/assets/looker.py index 705425ee4..58f72113e 100644 --- a/pyatlan_v9/model/assets/looker.py +++ b/pyatlan_v9/model/assets/looker.py @@ -40,8 +40,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .looker_related import RelatedLooker from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -79,6 +79,7 @@ class Looker(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -96,6 +97,8 @@ class Looker(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Looker" + looker_slug: Union[str, None, UnsetType] = UNSET """An alpha-numeric slug for the underlying Looker asset that can be used to uniquely identify it""" @@ -148,6 +151,11 @@ class Looker(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -203,66 +211,6 @@ class Looker(Asset): def __post_init__(self) -> None: self.type_name = "Looker" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Looker instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Looker validation failed: {errors}") - - def minimize(self) -> "Looker": - """ - Return a minimal copy of this Looker with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Looker with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Looker instance with only the minimum required fields. - """ - self.validate() - return Looker(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLooker": - """ - Create a :class:`RelatedLooker` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLooker reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLooker(guid=self.guid) - return RelatedLooker(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -374,6 +322,11 @@ class LookerRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -460,6 +413,7 @@ class LookerNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -527,9 +481,6 @@ def _looker_to_nested(looker: Looker) -> LookerNested: is_incomplete=looker.is_incomplete, provenance_type=looker.provenance_type, home_id=looker.home_id, - depth=looker.depth, - immediate_upstream=looker.immediate_upstream, - immediate_downstream=looker.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -559,6 +510,7 @@ def _looker_from_nested(nested: LookerNested) -> Looker: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -567,9 +519,6 @@ def _looker_from_nested(nested: LookerNested) -> Looker: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -608,6 +557,9 @@ def _looker_from_nested_bytes(data: bytes, serde: Serde) -> Looker: Looker.METRICS = RelationField("metrics") Looker.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Looker.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Looker.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Looker.MEANINGS = RelationField("meanings") Looker.MC_MONITORS = RelationField("mcMonitors") Looker.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/looker_dashboard.py b/pyatlan_v9/model/assets/looker_dashboard.py index 50281a4fb..794a02e7d 100644 --- a/pyatlan_v9/model/assets/looker_dashboard.py +++ b/pyatlan_v9/model/assets/looker_dashboard.py @@ -41,9 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .looker_related import ( - RelatedLookerDashboard, RelatedLookerField, RelatedLookerFolder, RelatedLookerLook, @@ -93,6 +93,7 @@ class LookerDashboard(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None FOLDER: ClassVar[Any] = None LOOKS: ClassVar[Any] = None @@ -114,6 +115,8 @@ class LookerDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerDashboard" + folder_name: Union[str, None, UnsetType] = UNSET """Name of the parent folder in Looker that contains this dashboard.""" @@ -187,6 +190,11 @@ class LookerDashboard(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -260,74 +268,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.folder is UNSET: - errors.append("folder is required for creation") - if self.folder_name is UNSET: - errors.append("folder_name is required for creation") - if errors: - raise ValueError(f"LookerDashboard validation failed: {errors}") - - def minimize(self) -> "LookerDashboard": - """ - Return a minimal copy of this LookerDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerDashboard instance with only the minimum required fields. - """ - self.validate() - return LookerDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerDashboard": - """ - Create a :class:`RelatedLookerDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerDashboard(guid=self.guid) - return RelatedLookerDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -462,6 +402,11 @@ class LookerDashboardRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -562,6 +507,7 @@ class LookerDashboardNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "folder", "looks", @@ -653,9 +599,6 @@ def _looker_dashboard_to_nested( is_incomplete=looker_dashboard.is_incomplete, provenance_type=looker_dashboard.provenance_type, home_id=looker_dashboard.home_id, - depth=looker_dashboard.depth, - immediate_upstream=looker_dashboard.immediate_upstream, - immediate_downstream=looker_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -689,6 +632,7 @@ def _looker_dashboard_from_nested(nested: LookerDashboardNested) -> LookerDashbo updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -697,9 +641,6 @@ def _looker_dashboard_from_nested(nested: LookerDashboardNested) -> LookerDashbo is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -765,6 +706,9 @@ def _looker_dashboard_from_nested_bytes(data: bytes, serde: Serde) -> LookerDash LookerDashboard.METRICS = RelationField("metrics") LookerDashboard.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") LookerDashboard.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +LookerDashboard.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) LookerDashboard.MEANINGS = RelationField("meanings") LookerDashboard.FOLDER = RelationField("folder") LookerDashboard.LOOKS = RelationField("looks") diff --git a/pyatlan_v9/model/assets/looker_explore.py b/pyatlan_v9/model/assets/looker_explore.py index 8cbb37a40..31e20bf35 100644 --- a/pyatlan_v9/model/assets/looker_explore.py +++ b/pyatlan_v9/model/assets/looker_explore.py @@ -41,13 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .looker_related import ( - RelatedLookerExplore, - RelatedLookerField, - RelatedLookerModel, - RelatedLookerProject, -) +from .looker_related import RelatedLookerField, RelatedLookerModel, RelatedLookerProject from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -90,6 +86,7 @@ class LookerExplore(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MODEL: ClassVar[Any] = None PROJECT: ClassVar[Any] = None @@ -110,6 +107,8 @@ class LookerExplore(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerExplore" + project_name: Union[str, None, UnsetType] = UNSET """Name of the parent project of this Explore.""" @@ -177,6 +176,11 @@ class LookerExplore(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -249,76 +253,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerExplore instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.model is UNSET: - errors.append("model is required for creation") - if self.model_name is UNSET: - errors.append("model_name is required for creation") - if self.project_name is UNSET: - errors.append("project_name is required for creation") - if errors: - raise ValueError(f"LookerExplore validation failed: {errors}") - - def minimize(self) -> "LookerExplore": - """ - Return a minimal copy of this LookerExplore with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerExplore with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerExplore instance with only the minimum required fields. - """ - self.validate() - return LookerExplore(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerExplore": - """ - Create a :class:`RelatedLookerExplore` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerExplore reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerExplore(guid=self.guid) - return RelatedLookerExplore(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -445,6 +379,11 @@ class LookerExploreRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -542,6 +481,7 @@ class LookerExploreNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "model", "project", @@ -624,9 +564,6 @@ def _looker_explore_to_nested(looker_explore: LookerExplore) -> LookerExploreNes is_incomplete=looker_explore.is_incomplete, provenance_type=looker_explore.provenance_type, home_id=looker_explore.home_id, - depth=looker_explore.depth, - immediate_upstream=looker_explore.immediate_upstream, - immediate_downstream=looker_explore.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -660,6 +597,7 @@ def _looker_explore_from_nested(nested: LookerExploreNested) -> LookerExplore: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -668,9 +606,6 @@ def _looker_explore_from_nested(nested: LookerExploreNested) -> LookerExplore: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_explore_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -722,6 +657,9 @@ def _looker_explore_from_nested_bytes(data: bytes, serde: Serde) -> LookerExplor LookerExplore.METRICS = RelationField("metrics") LookerExplore.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") LookerExplore.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +LookerExplore.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) LookerExplore.MEANINGS = RelationField("meanings") LookerExplore.MODEL = RelationField("model") LookerExplore.PROJECT = RelationField("project") diff --git a/pyatlan_v9/model/assets/looker_field.py b/pyatlan_v9/model/assets/looker_field.py index 30696d8a6..5d264cb92 100644 --- a/pyatlan_v9/model/assets/looker_field.py +++ b/pyatlan_v9/model/assets/looker_field.py @@ -41,11 +41,11 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .looker_related import ( RelatedLookerDashboard, RelatedLookerExplore, - RelatedLookerField, RelatedLookerLook, RelatedLookerModel, RelatedLookerProject, @@ -102,6 +102,7 @@ class LookerField(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MODEL: ClassVar[Any] = None EXPLORE: ClassVar[Any] = None @@ -126,6 +127,8 @@ class LookerField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerField" + project_name: Union[str, None, UnsetType] = UNSET """Name of the project in which this field exists.""" @@ -217,6 +220,11 @@ class LookerField(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -301,76 +309,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.model is UNSET: - errors.append("model is required for creation") - if self.model_name is UNSET: - errors.append("model_name is required for creation") - if self.project_name is UNSET: - errors.append("project_name is required for creation") - if errors: - raise ValueError(f"LookerField validation failed: {errors}") - - def minimize(self) -> "LookerField": - """ - Return a minimal copy of this LookerField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerField instance with only the minimum required fields. - """ - self.validate() - return LookerField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerField": - """ - Create a :class:`RelatedLookerField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerField(guid=self.guid) - return RelatedLookerField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -521,6 +459,11 @@ class LookerFieldRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -628,6 +571,7 @@ class LookerFieldNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "model", "explore", @@ -734,9 +678,6 @@ def _looker_field_to_nested(looker_field: LookerField) -> LookerFieldNested: is_incomplete=looker_field.is_incomplete, provenance_type=looker_field.provenance_type, home_id=looker_field.home_id, - depth=looker_field.depth, - immediate_upstream=looker_field.immediate_upstream, - immediate_downstream=looker_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -768,6 +709,7 @@ def _looker_field_from_nested(nested: LookerFieldNested) -> LookerField: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -776,9 +718,6 @@ def _looker_field_from_nested(nested: LookerFieldNested) -> LookerField: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -862,6 +801,9 @@ def _looker_field_from_nested_bytes(data: bytes, serde: Serde) -> LookerField: LookerField.METRICS = RelationField("metrics") LookerField.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") LookerField.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +LookerField.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) LookerField.MEANINGS = RelationField("meanings") LookerField.MODEL = RelationField("model") LookerField.EXPLORE = RelationField("explore") diff --git a/pyatlan_v9/model/assets/looker_folder.py b/pyatlan_v9/model/assets/looker_folder.py index 749b0abb6..b68e466e7 100644 --- a/pyatlan_v9/model/assets/looker_folder.py +++ b/pyatlan_v9/model/assets/looker_folder.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .looker_related import ( RelatedLookerDashboard, @@ -88,6 +89,7 @@ class LookerFolder(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None DASHBOARDS: ClassVar[Any] = None LOOKS: ClassVar[Any] = None @@ -109,6 +111,8 @@ class LookerFolder(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerFolder" + source_content_metadata_id: Union[int, None, UnsetType] = UNSET """Identifier for the folder's content metadata in Looker.""" @@ -175,6 +179,11 @@ class LookerFolder(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -242,66 +251,6 @@ class LookerFolder(Asset): def __post_init__(self) -> None: self.type_name = "LookerFolder" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerFolder instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"LookerFolder validation failed: {errors}") - - def minimize(self) -> "LookerFolder": - """ - Return a minimal copy of this LookerFolder with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerFolder with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerFolder instance with only the minimum required fields. - """ - self.validate() - return LookerFolder(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerFolder": - """ - Create a :class:`RelatedLookerFolder` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerFolder reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerFolder(guid=self.guid) - return RelatedLookerFolder(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -427,6 +376,11 @@ class LookerFolderRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -527,6 +481,7 @@ class LookerFolderNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "dashboards", "looks", @@ -608,9 +563,6 @@ def _looker_folder_to_nested(looker_folder: LookerFolder) -> LookerFolderNested: is_incomplete=looker_folder.is_incomplete, provenance_type=looker_folder.provenance_type, home_id=looker_folder.home_id, - depth=looker_folder.depth, - immediate_upstream=looker_folder.immediate_upstream, - immediate_downstream=looker_folder.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -644,6 +596,7 @@ def _looker_folder_from_nested(nested: LookerFolderNested) -> LookerFolder: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -652,9 +605,6 @@ def _looker_folder_from_nested(nested: LookerFolderNested) -> LookerFolder: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_folder_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -707,6 +657,9 @@ def _looker_folder_from_nested_bytes(data: bytes, serde: Serde) -> LookerFolder: LookerFolder.METRICS = RelationField("metrics") LookerFolder.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") LookerFolder.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +LookerFolder.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) LookerFolder.MEANINGS = RelationField("meanings") LookerFolder.DASHBOARDS = RelationField("dashboards") LookerFolder.LOOKS = RelationField("looks") diff --git a/pyatlan_v9/model/assets/looker_look.py b/pyatlan_v9/model/assets/looker_look.py index 4f002b172..06e101aec 100644 --- a/pyatlan_v9/model/assets/looker_look.py +++ b/pyatlan_v9/model/assets/looker_look.py @@ -41,12 +41,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .looker_related import ( RelatedLookerDashboard, RelatedLookerField, RelatedLookerFolder, - RelatedLookerLook, RelatedLookerModel, RelatedLookerQuery, RelatedLookerTile, @@ -98,6 +98,7 @@ class LookerLook(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None FOLDER: ClassVar[Any] = None DASHBOARD: ClassVar[Any] = None @@ -121,6 +122,8 @@ class LookerLook(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerLook" + folder_name: Union[str, None, UnsetType] = UNSET """Name of the folder in which the Look is organized.""" @@ -203,6 +206,11 @@ class LookerLook(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -282,74 +290,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerLook instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.folder is UNSET: - errors.append("folder is required for creation") - if self.folder_name is UNSET: - errors.append("folder_name is required for creation") - if errors: - raise ValueError(f"LookerLook validation failed: {errors}") - - def minimize(self) -> "LookerLook": - """ - Return a minimal copy of this LookerLook with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerLook with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerLook instance with only the minimum required fields. - """ - self.validate() - return LookerLook(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerLook": - """ - Create a :class:`RelatedLookerLook` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerLook reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerLook(guid=self.guid) - return RelatedLookerLook(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -491,6 +431,11 @@ class LookerLookRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -595,6 +540,7 @@ class LookerLookNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "folder", "dashboard", @@ -688,9 +634,6 @@ def _looker_look_to_nested(looker_look: LookerLook) -> LookerLookNested: is_incomplete=looker_look.is_incomplete, provenance_type=looker_look.provenance_type, home_id=looker_look.home_id, - depth=looker_look.depth, - immediate_upstream=looker_look.immediate_upstream, - immediate_downstream=looker_look.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -722,6 +665,7 @@ def _looker_look_from_nested(nested: LookerLookNested) -> LookerLook: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -730,9 +674,6 @@ def _looker_look_from_nested(nested: LookerLookNested) -> LookerLook: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_look_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -797,6 +738,9 @@ def _looker_look_from_nested_bytes(data: bytes, serde: Serde) -> LookerLook: LookerLook.METRICS = RelationField("metrics") LookerLook.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") LookerLook.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +LookerLook.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) LookerLook.MEANINGS = RelationField("meanings") LookerLook.FOLDER = RelationField("folder") LookerLook.DASHBOARD = RelationField("dashboard") diff --git a/pyatlan_v9/model/assets/looker_model.py b/pyatlan_v9/model/assets/looker_model.py index e2c80feb0..3911002f7 100644 --- a/pyatlan_v9/model/assets/looker_model.py +++ b/pyatlan_v9/model/assets/looker_model.py @@ -41,12 +41,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .looker_related import ( RelatedLookerExplore, RelatedLookerField, RelatedLookerLook, - RelatedLookerModel, RelatedLookerProject, RelatedLookerQuery, ) @@ -88,6 +88,7 @@ class LookerModel(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None PROJECT: ClassVar[Any] = None EXPLORES: ClassVar[Any] = None @@ -110,6 +111,8 @@ class LookerModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerModel" + project_name: Union[str, None, UnsetType] = UNSET """Name of the project in which the model exists.""" @@ -165,6 +168,11 @@ class LookerModel(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -241,74 +249,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.project is UNSET: - errors.append("project is required for creation") - if self.project_name is UNSET: - errors.append("project_name is required for creation") - if errors: - raise ValueError(f"LookerModel validation failed: {errors}") - - def minimize(self) -> "LookerModel": - """ - Return a minimal copy of this LookerModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerModel instance with only the minimum required fields. - """ - self.validate() - return LookerModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerModel": - """ - Create a :class:`RelatedLookerModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerModel(guid=self.guid) - return RelatedLookerModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -423,6 +363,11 @@ class LookerModelRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -524,6 +469,7 @@ class LookerModelNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "project", "explores", @@ -600,9 +546,6 @@ def _looker_model_to_nested(looker_model: LookerModel) -> LookerModelNested: is_incomplete=looker_model.is_incomplete, provenance_type=looker_model.provenance_type, home_id=looker_model.home_id, - depth=looker_model.depth, - immediate_upstream=looker_model.immediate_upstream, - immediate_downstream=looker_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -634,6 +577,7 @@ def _looker_model_from_nested(nested: LookerModelNested) -> LookerModel: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -642,9 +586,6 @@ def _looker_model_from_nested(nested: LookerModelNested) -> LookerModel: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_model_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -688,6 +629,9 @@ def _looker_model_from_nested_bytes(data: bytes, serde: Serde) -> LookerModel: LookerModel.METRICS = RelationField("metrics") LookerModel.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") LookerModel.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +LookerModel.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) LookerModel.MEANINGS = RelationField("meanings") LookerModel.PROJECT = RelationField("project") LookerModel.EXPLORES = RelationField("explores") diff --git a/pyatlan_v9/model/assets/looker_project.py b/pyatlan_v9/model/assets/looker_project.py index 52573353a..b1384b0ec 100644 --- a/pyatlan_v9/model/assets/looker_project.py +++ b/pyatlan_v9/model/assets/looker_project.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .looker_related import ( RelatedLookerExplore, @@ -85,6 +86,7 @@ class LookerProject(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MODELS: ClassVar[Any] = None LOOKER_CHILD_PROJECTS: ClassVar[Any] = None @@ -108,6 +110,8 @@ class LookerProject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerProject" + looker_slug: Union[str, None, UnsetType] = UNSET """An alpha-numeric slug for the underlying Looker asset that can be used to uniquely identify it""" @@ -160,6 +164,11 @@ class LookerProject(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -233,66 +242,6 @@ class LookerProject(Asset): def __post_init__(self) -> None: self.type_name = "LookerProject" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerProject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"LookerProject validation failed: {errors}") - - def minimize(self) -> "LookerProject": - """ - Return a minimal copy of this LookerProject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerProject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerProject instance with only the minimum required fields. - """ - self.validate() - return LookerProject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerProject": - """ - Create a :class:`RelatedLookerProject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerProject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerProject(guid=self.guid) - return RelatedLookerProject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -404,6 +353,11 @@ class LookerProjectRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -510,6 +464,7 @@ class LookerProjectNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "models", "looker_child_projects", @@ -585,9 +540,6 @@ def _looker_project_to_nested(looker_project: LookerProject) -> LookerProjectNes is_incomplete=looker_project.is_incomplete, provenance_type=looker_project.provenance_type, home_id=looker_project.home_id, - depth=looker_project.depth, - immediate_upstream=looker_project.immediate_upstream, - immediate_downstream=looker_project.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -621,6 +573,7 @@ def _looker_project_from_nested(nested: LookerProjectNested) -> LookerProject: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -629,9 +582,6 @@ def _looker_project_from_nested(nested: LookerProjectNested) -> LookerProject: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_project_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -676,6 +626,9 @@ def _looker_project_from_nested_bytes(data: bytes, serde: Serde) -> LookerProjec LookerProject.METRICS = RelationField("metrics") LookerProject.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") LookerProject.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +LookerProject.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) LookerProject.MEANINGS = RelationField("meanings") LookerProject.MODELS = RelationField("models") LookerProject.LOOKER_CHILD_PROJECTS = RelationField("lookerChildProjects") diff --git a/pyatlan_v9/model/assets/looker_query.py b/pyatlan_v9/model/assets/looker_query.py index 7f7dc0da9..e5a6df5e8 100644 --- a/pyatlan_v9/model/assets/looker_query.py +++ b/pyatlan_v9/model/assets/looker_query.py @@ -41,13 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .looker_related import ( - RelatedLookerLook, - RelatedLookerModel, - RelatedLookerQuery, - RelatedLookerTile, -) +from .looker_related import RelatedLookerLook, RelatedLookerModel, RelatedLookerTile from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -89,6 +85,7 @@ class LookerQuery(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None TILES: ClassVar[Any] = None LOOKS: ClassVar[Any] = None @@ -109,6 +106,8 @@ class LookerQuery(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerQuery" + source_definition: Union[str, None, UnsetType] = UNSET """Deprecated.""" @@ -173,6 +172,11 @@ class LookerQuery(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -245,72 +249,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerQuery instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.model is UNSET: - errors.append("model is required for creation") - if errors: - raise ValueError(f"LookerQuery validation failed: {errors}") - - def minimize(self) -> "LookerQuery": - """ - Return a minimal copy of this LookerQuery with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerQuery with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerQuery instance with only the minimum required fields. - """ - self.validate() - return LookerQuery(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerQuery": - """ - Create a :class:`RelatedLookerQuery` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerQuery reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerQuery(guid=self.guid) - return RelatedLookerQuery(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -434,6 +372,11 @@ class LookerQueryRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -529,6 +472,7 @@ class LookerQueryNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "tiles", "looks", @@ -609,9 +553,6 @@ def _looker_query_to_nested(looker_query: LookerQuery) -> LookerQueryNested: is_incomplete=looker_query.is_incomplete, provenance_type=looker_query.provenance_type, home_id=looker_query.home_id, - depth=looker_query.depth, - immediate_upstream=looker_query.immediate_upstream, - immediate_downstream=looker_query.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -643,6 +584,7 @@ def _looker_query_from_nested(nested: LookerQueryNested) -> LookerQuery: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -651,9 +593,6 @@ def _looker_query_from_nested(nested: LookerQueryNested) -> LookerQuery: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_query_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -704,6 +643,9 @@ def _looker_query_from_nested_bytes(data: bytes, serde: Serde) -> LookerQuery: LookerQuery.METRICS = RelationField("metrics") LookerQuery.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") LookerQuery.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +LookerQuery.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) LookerQuery.MEANINGS = RelationField("meanings") LookerQuery.TILES = RelationField("tiles") LookerQuery.LOOKS = RelationField("looks") diff --git a/pyatlan_v9/model/assets/looker_related.py b/pyatlan_v9/model/assets/looker_related.py index 564e486ac..a0fd46b13 100644 --- a/pyatlan_v9/model/assets/looker_related.py +++ b/pyatlan_v9/model/assets/looker_related.py @@ -134,7 +134,7 @@ class RelatedLookerView(RelatedLooker): looker_view_file_path: Union[str, None, UnsetType] = UNSET """File path of this view within the project.""" - looker_view_file_name: Union[str, None, UnsetType] = UNSET + looker_file_name: Union[str, None, UnsetType] = UNSET """File name of this view.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/looker_tile.py b/pyatlan_v9/model/assets/looker_tile.py index b8d65d559..70b20f46d 100644 --- a/pyatlan_v9/model/assets/looker_tile.py +++ b/pyatlan_v9/model/assets/looker_tile.py @@ -42,13 +42,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .looker_related import ( RelatedLookerDashboard, RelatedLookerField, RelatedLookerLook, RelatedLookerQuery, - RelatedLookerTile, ) from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -95,6 +95,7 @@ class LookerTile(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None DASHBOARD: ClassVar[Any] = None LOOK: ClassVar[Any] = None @@ -116,6 +117,8 @@ class LookerTile(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerTile" + lookml_link_id: Union[str, None, UnsetType] = UNSET """Identifier for the LoomML link.""" @@ -196,6 +199,11 @@ class LookerTile(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -271,72 +279,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerTile instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dashboard is UNSET: - errors.append("dashboard is required for creation") - if errors: - raise ValueError(f"LookerTile validation failed: {errors}") - - def minimize(self) -> "LookerTile": - """ - Return a minimal copy of this LookerTile with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerTile with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerTile instance with only the minimum required fields. - """ - self.validate() - return LookerTile(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerTile": - """ - Create a :class:`RelatedLookerTile` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerTile reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerTile(guid=self.guid) - return RelatedLookerTile(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -476,6 +418,11 @@ class LookerTileRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -574,6 +521,7 @@ class LookerTileNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "dashboard", "look", @@ -661,9 +609,6 @@ def _looker_tile_to_nested(looker_tile: LookerTile) -> LookerTileNested: is_incomplete=looker_tile.is_incomplete, provenance_type=looker_tile.provenance_type, home_id=looker_tile.home_id, - depth=looker_tile.depth, - immediate_upstream=looker_tile.immediate_upstream, - immediate_downstream=looker_tile.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -695,6 +640,7 @@ def _looker_tile_from_nested(nested: LookerTileNested) -> LookerTile: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -703,9 +649,6 @@ def _looker_tile_from_nested(nested: LookerTileNested) -> LookerTile: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_tile_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -758,6 +701,9 @@ def _looker_tile_from_nested_bytes(data: bytes, serde: Serde) -> LookerTile: LookerTile.METRICS = RelationField("metrics") LookerTile.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") LookerTile.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +LookerTile.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) LookerTile.MEANINGS = RelationField("meanings") LookerTile.DASHBOARD = RelationField("dashboard") LookerTile.LOOK = RelationField("look") diff --git a/pyatlan_v9/model/assets/looker_view.py b/pyatlan_v9/model/assets/looker_view.py index 9a303f5e8..f811b6901 100644 --- a/pyatlan_v9/model/assets/looker_view.py +++ b/pyatlan_v9/model/assets/looker_view.py @@ -41,8 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .looker_related import RelatedLookerField, RelatedLookerProject, RelatedLookerView +from .looker_related import RelatedLookerField, RelatedLookerProject from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -66,7 +67,7 @@ class LookerView(Asset): PROJECT_NAME: ClassVar[Any] = None LOOKER_VIEW_FILE_PATH: ClassVar[Any] = None - LOOKER_VIEW_FILE_NAME: ClassVar[Any] = None + LOOKER_FILE_NAME: ClassVar[Any] = None LOOKER_SLUG: ClassVar[Any] = None CATALOG_DATASET_GUID: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None @@ -83,6 +84,7 @@ class LookerView(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None PROJECT: ClassVar[Any] = None FIELDS: ClassVar[Any] = None @@ -102,13 +104,15 @@ class LookerView(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "LookerView" + project_name: Union[str, None, UnsetType] = UNSET """Name of the project in which this view exists.""" looker_view_file_path: Union[str, None, UnsetType] = UNSET """File path of this view within the project.""" - looker_view_file_name: Union[str, None, UnsetType] = UNSET + looker_file_name: Union[str, None, UnsetType] = UNSET """File name of this view.""" looker_slug: Union[str, None, UnsetType] = UNSET @@ -163,6 +167,11 @@ class LookerView(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -230,74 +239,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this LookerView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.project is UNSET: - errors.append("project is required for creation") - if self.project_name is UNSET: - errors.append("project_name is required for creation") - if errors: - raise ValueError(f"LookerView validation failed: {errors}") - - def minimize(self) -> "LookerView": - """ - Return a minimal copy of this LookerView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new LookerView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new LookerView instance with only the minimum required fields. - """ - self.validate() - return LookerView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedLookerView": - """ - Create a :class:`RelatedLookerView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedLookerView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedLookerView(guid=self.guid) - return RelatedLookerView(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -359,7 +300,7 @@ class LookerViewAttributes(AssetAttributes): looker_view_file_path: Union[str, None, UnsetType] = UNSET """File path of this view within the project.""" - looker_view_file_name: Union[str, None, UnsetType] = UNSET + looker_file_name: Union[str, None, UnsetType] = UNSET """File name of this view.""" looker_slug: Union[str, None, UnsetType] = UNSET @@ -418,6 +359,11 @@ class LookerViewRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -510,6 +456,7 @@ class LookerViewNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "project", "fields", @@ -536,7 +483,7 @@ def _populate_looker_view_attrs(attrs: LookerViewAttributes, obj: LookerView) -> _populate_asset_attrs(attrs, obj) attrs.project_name = obj.project_name attrs.looker_view_file_path = obj.looker_view_file_path - attrs.looker_view_file_name = obj.looker_view_file_name + attrs.looker_file_name = obj.looker_file_name attrs.looker_slug = obj.looker_slug attrs.catalog_dataset_guid = obj.catalog_dataset_guid @@ -546,7 +493,7 @@ def _extract_looker_view_attrs(attrs: LookerViewAttributes) -> dict: result = _extract_asset_attrs(attrs) result["project_name"] = attrs.project_name result["looker_view_file_path"] = attrs.looker_view_file_path - result["looker_view_file_name"] = attrs.looker_view_file_name + result["looker_file_name"] = attrs.looker_file_name result["looker_slug"] = attrs.looker_slug result["catalog_dataset_guid"] = attrs.catalog_dataset_guid return result @@ -585,9 +532,6 @@ def _looker_view_to_nested(looker_view: LookerView) -> LookerViewNested: is_incomplete=looker_view.is_incomplete, provenance_type=looker_view.provenance_type, home_id=looker_view.home_id, - depth=looker_view.depth, - immediate_upstream=looker_view.immediate_upstream, - immediate_downstream=looker_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -619,6 +563,7 @@ def _looker_view_from_nested(nested: LookerViewNested) -> LookerView: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -627,9 +572,6 @@ def _looker_view_from_nested(nested: LookerViewNested) -> LookerView: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_looker_view_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -656,9 +598,7 @@ def _looker_view_from_nested_bytes(data: bytes, serde: Serde) -> LookerView: LookerView.LOOKER_VIEW_FILE_PATH = KeywordField( "lookerViewFilePath", "lookerViewFilePath" ) -LookerView.LOOKER_VIEW_FILE_NAME = KeywordField( - "lookerViewFileName", "lookerViewFileName" -) +LookerView.LOOKER_FILE_NAME = KeywordField("lookerFileName", "lookerFileName") LookerView.LOOKER_SLUG = KeywordField("lookerSlug", "lookerSlug") LookerView.CATALOG_DATASET_GUID = KeywordField( "catalogDatasetGuid", "catalogDatasetGuid" @@ -677,6 +617,9 @@ def _looker_view_from_nested_bytes(data: bytes, serde: Serde) -> LookerView: LookerView.METRICS = RelationField("metrics") LookerView.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") LookerView.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +LookerView.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) LookerView.MEANINGS = RelationField("meanings") LookerView.PROJECT = RelationField("project") LookerView.FIELDS = RelationField("fields") diff --git a/pyatlan_v9/model/assets/materialised_view.py b/pyatlan_v9/model/assets/materialised_view.py index afc3a8cc0..83c887fe2 100644 --- a/pyatlan_v9/model/assets/materialised_view.py +++ b/pyatlan_v9/model/assets/materialised_view.py @@ -49,6 +49,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -64,7 +65,7 @@ RelatedSqlInsightBusinessQuestion, RelatedSqlInsightJoin, ) -from .sql_related import RelatedColumn, RelatedMaterialisedView, RelatedSchema +from .sql_related import RelatedColumn, RelatedSchema # ============================================================================= # FLAT ASSET CLASS @@ -134,6 +135,7 @@ class MaterialisedView(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -157,6 +159,8 @@ class MaterialisedView(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MaterialisedView" + refresh_mode: Union[str, None, UnsetType] = UNSET """Refresh mode for this materialized view.""" @@ -336,6 +340,11 @@ class MaterialisedView(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -425,80 +434,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MaterialisedView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.atlan_schema is UNSET: - errors.append("atlan_schema is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"MaterialisedView validation failed: {errors}") - - def minimize(self) -> "MaterialisedView": - """ - Return a minimal copy of this MaterialisedView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MaterialisedView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MaterialisedView instance with only the minimum required fields. - """ - self.validate() - return MaterialisedView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMaterialisedView": - """ - Create a :class:`RelatedMaterialisedView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMaterialisedView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMaterialisedView(guid=self.guid) - return RelatedMaterialisedView(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -837,6 +772,11 @@ class MaterialisedViewRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -957,6 +897,7 @@ class MaterialisedViewNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1120,9 +1061,6 @@ def _materialised_view_to_nested( is_incomplete=materialised_view.is_incomplete, provenance_type=materialised_view.provenance_type, home_id=materialised_view.home_id, - depth=materialised_view.depth, - immediate_upstream=materialised_view.immediate_upstream, - immediate_downstream=materialised_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1156,6 +1094,7 @@ def _materialised_view_from_nested(nested: MaterialisedViewNested) -> Materialis updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1164,9 +1103,6 @@ def _materialised_view_from_nested(nested: MaterialisedViewNested) -> Materialis is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_materialised_view_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1290,6 +1226,9 @@ def _materialised_view_from_nested_bytes(data: bytes, serde: Serde) -> Materiali MaterialisedView.DBT_SOURCES = RelationField("dbtSources") MaterialisedView.SQL_DBT_SOURCES = RelationField("sqlDBTSources") MaterialisedView.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +MaterialisedView.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MaterialisedView.MEANINGS = RelationField("meanings") MaterialisedView.MC_MONITORS = RelationField("mcMonitors") MaterialisedView.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/matillion.py b/pyatlan_v9/model/assets/matillion.py index 0633b6823..a4957f7be 100644 --- a/pyatlan_v9/model/assets/matillion.py +++ b/pyatlan_v9/model/assets/matillion.py @@ -40,8 +40,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .matillion_related import RelatedMatillion from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -79,6 +79,7 @@ class Matillion(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -96,6 +97,8 @@ class Matillion(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Matillion" + matillion_version: Union[str, None, UnsetType] = UNSET """Current point in time state of a project.""" @@ -148,6 +151,11 @@ class Matillion(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -203,66 +211,6 @@ class Matillion(Asset): def __post_init__(self) -> None: self.type_name = "Matillion" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Matillion instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Matillion validation failed: {errors}") - - def minimize(self) -> "Matillion": - """ - Return a minimal copy of this Matillion with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Matillion with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Matillion instance with only the minimum required fields. - """ - self.validate() - return Matillion(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMatillion": - """ - Create a :class:`RelatedMatillion` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMatillion reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMatillion(guid=self.guid) - return RelatedMatillion(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -374,6 +322,11 @@ class MatillionRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -460,6 +413,7 @@ class MatillionNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -527,9 +481,6 @@ def _matillion_to_nested(matillion: Matillion) -> MatillionNested: is_incomplete=matillion.is_incomplete, provenance_type=matillion.provenance_type, home_id=matillion.home_id, - depth=matillion.depth, - immediate_upstream=matillion.immediate_upstream, - immediate_downstream=matillion.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -561,6 +512,7 @@ def _matillion_from_nested(nested: MatillionNested) -> Matillion: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -569,9 +521,6 @@ def _matillion_from_nested(nested: MatillionNested) -> Matillion: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_matillion_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -612,6 +561,9 @@ def _matillion_from_nested_bytes(data: bytes, serde: Serde) -> Matillion: Matillion.METRICS = RelationField("metrics") Matillion.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Matillion.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Matillion.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Matillion.MEANINGS = RelationField("meanings") Matillion.MC_MONITORS = RelationField("mcMonitors") Matillion.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/matillion_component.py b/pyatlan_v9/model/assets/matillion_component.py index f409ac304..ba756ded3 100644 --- a/pyatlan_v9/model/assets/matillion_component.py +++ b/pyatlan_v9/model/assets/matillion_component.py @@ -41,8 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .matillion_related import RelatedMatillionComponent, RelatedMatillionJob +from .matillion_related import RelatedMatillionJob from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -88,6 +89,7 @@ class MatillionComponent(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MATILLION_JOB: ClassVar[Any] = None MATILLION_PROCESS: ClassVar[Any] = None @@ -107,6 +109,8 @@ class MatillionComponent(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MatillionComponent" + matillion_component_id: Union[str, None, UnsetType] = UNSET """Unique identifier of the component in Matillion.""" @@ -183,6 +187,11 @@ class MatillionComponent(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -252,76 +261,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MatillionComponent instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.matillion_job is UNSET: - errors.append("matillion_job is required for creation") - if self.matillion_job_name is UNSET: - errors.append("matillion_job_name is required for creation") - if self.matillion_job_qualified_name is UNSET: - errors.append("matillion_job_qualified_name is required for creation") - if errors: - raise ValueError(f"MatillionComponent validation failed: {errors}") - - def minimize(self) -> "MatillionComponent": - """ - Return a minimal copy of this MatillionComponent with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MatillionComponent with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MatillionComponent instance with only the minimum required fields. - """ - self.validate() - return MatillionComponent(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMatillionComponent": - """ - Create a :class:`RelatedMatillionComponent` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMatillionComponent reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMatillionComponent(guid=self.guid) - return RelatedMatillionComponent(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -459,6 +398,11 @@ class MatillionComponentRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -553,6 +497,7 @@ class MatillionComponentNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "matillion_job", "matillion_process", @@ -654,9 +599,6 @@ def _matillion_component_to_nested( is_incomplete=matillion_component.is_incomplete, provenance_type=matillion_component.provenance_type, home_id=matillion_component.home_id, - depth=matillion_component.depth, - immediate_upstream=matillion_component.immediate_upstream, - immediate_downstream=matillion_component.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -692,6 +634,7 @@ def _matillion_component_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -700,9 +643,6 @@ def _matillion_component_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_matillion_component_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -785,6 +725,9 @@ def _matillion_component_from_nested_bytes( MatillionComponent.METRICS = RelationField("metrics") MatillionComponent.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") MatillionComponent.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +MatillionComponent.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MatillionComponent.MEANINGS = RelationField("meanings") MatillionComponent.MATILLION_JOB = RelationField("matillionJob") MatillionComponent.MATILLION_PROCESS = RelationField("matillionProcess") diff --git a/pyatlan_v9/model/assets/matillion_group.py b/pyatlan_v9/model/assets/matillion_group.py index 04522cff3..710b04e77 100644 --- a/pyatlan_v9/model/assets/matillion_group.py +++ b/pyatlan_v9/model/assets/matillion_group.py @@ -40,8 +40,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .matillion_related import RelatedMatillionGroup, RelatedMatillionProject +from .matillion_related import RelatedMatillionProject from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -80,6 +81,7 @@ class MatillionGroup(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MATILLION_PROJECTS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -98,6 +100,8 @@ class MatillionGroup(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MatillionGroup" + matillion_project_count: Union[int, None, UnsetType] = UNSET """Number of projects within the group.""" @@ -153,6 +157,11 @@ class MatillionGroup(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -211,66 +220,6 @@ class MatillionGroup(Asset): def __post_init__(self) -> None: self.type_name = "MatillionGroup" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MatillionGroup instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MatillionGroup validation failed: {errors}") - - def minimize(self) -> "MatillionGroup": - """ - Return a minimal copy of this MatillionGroup with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MatillionGroup with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MatillionGroup instance with only the minimum required fields. - """ - self.validate() - return MatillionGroup(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMatillionGroup": - """ - Create a :class:`RelatedMatillionGroup` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMatillionGroup reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMatillionGroup(guid=self.guid) - return RelatedMatillionGroup(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -385,6 +334,11 @@ class MatillionGroupRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -476,6 +430,7 @@ class MatillionGroupNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "matillion_projects", "mc_monitors", @@ -550,9 +505,6 @@ def _matillion_group_to_nested(matillion_group: MatillionGroup) -> MatillionGrou is_incomplete=matillion_group.is_incomplete, provenance_type=matillion_group.provenance_type, home_id=matillion_group.home_id, - depth=matillion_group.depth, - immediate_upstream=matillion_group.immediate_upstream, - immediate_downstream=matillion_group.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -586,6 +538,7 @@ def _matillion_group_from_nested(nested: MatillionGroupNested) -> MatillionGroup updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -594,9 +547,6 @@ def _matillion_group_from_nested(nested: MatillionGroupNested) -> MatillionGroup is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_matillion_group_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -650,6 +600,9 @@ def _matillion_group_from_nested_bytes(data: bytes, serde: Serde) -> MatillionGr MatillionGroup.METRICS = RelationField("metrics") MatillionGroup.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") MatillionGroup.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +MatillionGroup.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MatillionGroup.MEANINGS = RelationField("meanings") MatillionGroup.MATILLION_PROJECTS = RelationField("matillionProjects") MatillionGroup.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/matillion_job.py b/pyatlan_v9/model/assets/matillion_job.py index 2cc828303..3b418b263 100644 --- a/pyatlan_v9/model/assets/matillion_job.py +++ b/pyatlan_v9/model/assets/matillion_job.py @@ -41,12 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .matillion_related import ( - RelatedMatillionComponent, - RelatedMatillionJob, - RelatedMatillionProject, -) +from .matillion_related import RelatedMatillionComponent, RelatedMatillionProject from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -90,6 +87,7 @@ class MatillionJob(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MATILLION_PROJECT: ClassVar[Any] = None MATILLION_COMPONENTS: ClassVar[Any] = None @@ -109,6 +107,8 @@ class MatillionJob(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MatillionJob" + matillion_job_type: Union[str, None, UnsetType] = UNSET """Type of the job, for example: orchestration or transformation.""" @@ -179,6 +179,11 @@ class MatillionJob(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -250,78 +255,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MatillionJob instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.matillion_project is UNSET: - errors.append("matillion_project is required for creation") - if self.matillion_project_name is UNSET: - errors.append("matillion_project_name is required for creation") - if self.matillion_project_qualified_name is UNSET: - errors.append( - "matillion_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MatillionJob validation failed: {errors}") - - def minimize(self) -> "MatillionJob": - """ - Return a minimal copy of this MatillionJob with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MatillionJob with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MatillionJob instance with only the minimum required fields. - """ - self.validate() - return MatillionJob(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMatillionJob": - """ - Create a :class:`RelatedMatillionJob` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMatillionJob reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMatillionJob(guid=self.guid) - return RelatedMatillionJob(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -451,6 +384,11 @@ class MatillionJobRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -547,6 +485,7 @@ class MatillionJobNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "matillion_project", "matillion_components", @@ -630,9 +569,6 @@ def _matillion_job_to_nested(matillion_job: MatillionJob) -> MatillionJobNested: is_incomplete=matillion_job.is_incomplete, provenance_type=matillion_job.provenance_type, home_id=matillion_job.home_id, - depth=matillion_job.depth, - immediate_upstream=matillion_job.immediate_upstream, - immediate_downstream=matillion_job.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -666,6 +602,7 @@ def _matillion_job_from_nested(nested: MatillionJobNested) -> MatillionJob: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -674,9 +611,6 @@ def _matillion_job_from_nested(nested: MatillionJobNested) -> MatillionJob: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_matillion_job_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -742,6 +676,9 @@ def _matillion_job_from_nested_bytes(data: bytes, serde: Serde) -> MatillionJob: MatillionJob.METRICS = RelationField("metrics") MatillionJob.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") MatillionJob.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +MatillionJob.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MatillionJob.MEANINGS = RelationField("meanings") MatillionJob.MATILLION_PROJECT = RelationField("matillionProject") MatillionJob.MATILLION_COMPONENTS = RelationField("matillionComponents") diff --git a/pyatlan_v9/model/assets/matillion_project.py b/pyatlan_v9/model/assets/matillion_project.py index c1f1fad68..16244d93f 100644 --- a/pyatlan_v9/model/assets/matillion_project.py +++ b/pyatlan_v9/model/assets/matillion_project.py @@ -41,12 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .matillion_related import ( - RelatedMatillionGroup, - RelatedMatillionJob, - RelatedMatillionProject, -) +from .matillion_related import RelatedMatillionGroup, RelatedMatillionJob from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -89,6 +86,7 @@ class MatillionProject(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MATILLION_GROUP: ClassVar[Any] = None MATILLION_JOBS: ClassVar[Any] = None @@ -108,6 +106,8 @@ class MatillionProject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MatillionProject" + matillion_versions: Union[List[str], None, UnsetType] = UNSET """List of versions in the project.""" @@ -175,6 +175,11 @@ class MatillionProject(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -242,76 +247,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MatillionProject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.matillion_group is UNSET: - errors.append("matillion_group is required for creation") - if self.matillion_group_name is UNSET: - errors.append("matillion_group_name is required for creation") - if self.matillion_group_qualified_name is UNSET: - errors.append("matillion_group_qualified_name is required for creation") - if errors: - raise ValueError(f"MatillionProject validation failed: {errors}") - - def minimize(self) -> "MatillionProject": - """ - Return a minimal copy of this MatillionProject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MatillionProject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MatillionProject instance with only the minimum required fields. - """ - self.validate() - return MatillionProject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMatillionProject": - """ - Create a :class:`RelatedMatillionProject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMatillionProject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMatillionProject(guid=self.guid) - return RelatedMatillionProject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -440,6 +375,11 @@ class MatillionProjectRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -534,6 +474,7 @@ class MatillionProjectNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "matillion_group", "matillion_jobs", @@ -619,9 +560,6 @@ def _matillion_project_to_nested( is_incomplete=matillion_project.is_incomplete, provenance_type=matillion_project.provenance_type, home_id=matillion_project.home_id, - depth=matillion_project.depth, - immediate_upstream=matillion_project.immediate_upstream, - immediate_downstream=matillion_project.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -655,6 +593,7 @@ def _matillion_project_from_nested(nested: MatillionProjectNested) -> MatillionP updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -663,9 +602,6 @@ def _matillion_project_from_nested(nested: MatillionProjectNested) -> MatillionP is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_matillion_project_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -736,6 +672,9 @@ def _matillion_project_from_nested_bytes(data: bytes, serde: Serde) -> Matillion MatillionProject.METRICS = RelationField("metrics") MatillionProject.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") MatillionProject.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +MatillionProject.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MatillionProject.MEANINGS = RelationField("meanings") MatillionProject.MATILLION_GROUP = RelationField("matillionGroup") MatillionProject.MATILLION_JOBS = RelationField("matillionJobs") diff --git a/pyatlan_v9/model/assets/mc_incident.py b/pyatlan_v9/model/assets/mc_incident.py index ed36a129c..477a10954 100644 --- a/pyatlan_v9/model/assets/mc_incident.py +++ b/pyatlan_v9/model/assets/mc_incident.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -89,6 +90,7 @@ class MCIncident(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITOR: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -108,6 +110,8 @@ class MCIncident(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MCIncident" + mc_incident_id: Union[str, None, UnsetType] = UNSET """Identifier of this incident, from Monte Carlo.""" @@ -187,6 +191,11 @@ class MCIncident(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -254,72 +263,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MCIncident instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.mc_monitor is UNSET: - errors.append("mc_monitor is required for creation") - if errors: - raise ValueError(f"MCIncident validation failed: {errors}") - - def minimize(self) -> "MCIncident": - """ - Return a minimal copy of this MCIncident with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MCIncident with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MCIncident instance with only the minimum required fields. - """ - self.validate() - return MCIncident(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMCIncident": - """ - Create a :class:`RelatedMCIncident` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMCIncident reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMCIncident(guid=self.guid) - return RelatedMCIncident(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -458,6 +401,11 @@ class MCIncidentRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -550,6 +498,7 @@ class MCIncidentNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitor", "mc_monitors", @@ -637,9 +586,6 @@ def _mc_incident_to_nested(mc_incident: MCIncident) -> MCIncidentNested: is_incomplete=mc_incident.is_incomplete, provenance_type=mc_incident.provenance_type, home_id=mc_incident.home_id, - depth=mc_incident.depth, - immediate_upstream=mc_incident.immediate_upstream, - immediate_downstream=mc_incident.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -671,6 +617,7 @@ def _mc_incident_from_nested(nested: MCIncidentNested) -> MCIncident: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -679,9 +626,6 @@ def _mc_incident_from_nested(nested: MCIncidentNested) -> MCIncident: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mc_incident_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -747,6 +691,9 @@ def _mc_incident_from_nested_bytes(data: bytes, serde: Serde) -> MCIncident: MCIncident.METRICS = RelationField("metrics") MCIncident.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") MCIncident.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +MCIncident.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MCIncident.MEANINGS = RelationField("meanings") MCIncident.MC_MONITOR = RelationField("mcMonitor") MCIncident.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/mc_monitor.py b/pyatlan_v9/model/assets/mc_monitor.py index d01ae588e..108b969b2 100644 --- a/pyatlan_v9/model/assets/mc_monitor.py +++ b/pyatlan_v9/model/assets/mc_monitor.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -102,6 +103,7 @@ class MCMonitor(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None MC_MONITOR_ASSETS: ClassVar[Any] = None @@ -120,6 +122,8 @@ class MCMonitor(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MCMonitor" + mc_monitor_id: Union[str, None, UnsetType] = UNSET """Unique identifier for this monitor, from Monte Carlo.""" @@ -241,6 +245,11 @@ class MCMonitor(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -299,66 +308,6 @@ class MCMonitor(Asset): def __post_init__(self) -> None: self.type_name = "MCMonitor" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MCMonitor instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MCMonitor validation failed: {errors}") - - def minimize(self) -> "MCMonitor": - """ - Return a minimal copy of this MCMonitor with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MCMonitor with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MCMonitor instance with only the minimum required fields. - """ - self.validate() - return MCMonitor(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMCMonitor": - """ - Create a :class:`RelatedMCMonitor` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMCMonitor reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMCMonitor(guid=self.guid) - return RelatedMCMonitor(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -539,6 +488,11 @@ class MCMonitorRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -628,6 +582,7 @@ class MCMonitorNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_incidents", "mc_monitor_assets", @@ -752,9 +707,6 @@ def _mc_monitor_to_nested(mc_monitor: MCMonitor) -> MCMonitorNested: is_incomplete=mc_monitor.is_incomplete, provenance_type=mc_monitor.provenance_type, home_id=mc_monitor.home_id, - depth=mc_monitor.depth, - immediate_upstream=mc_monitor.immediate_upstream, - immediate_downstream=mc_monitor.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -786,6 +738,7 @@ def _mc_monitor_from_nested(nested: MCMonitorNested) -> MCMonitor: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -794,9 +747,6 @@ def _mc_monitor_from_nested(nested: MCMonitorNested) -> MCMonitor: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mc_monitor_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -900,6 +850,9 @@ def _mc_monitor_from_nested_bytes(data: bytes, serde: Serde) -> MCMonitor: MCMonitor.METRICS = RelationField("metrics") MCMonitor.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") MCMonitor.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +MCMonitor.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MCMonitor.MEANINGS = RelationField("meanings") MCMonitor.MC_INCIDENTS = RelationField("mcIncidents") MCMonitor.MC_MONITOR_ASSETS = RelationField("mcMonitorAssets") diff --git a/pyatlan_v9/model/assets/metabase.py b/pyatlan_v9/model/assets/metabase.py index fc080bf76..d54ddec6d 100644 --- a/pyatlan_v9/model/assets/metabase.py +++ b/pyatlan_v9/model/assets/metabase.py @@ -40,8 +40,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .metabase_related import RelatedMetabase from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -80,6 +80,7 @@ class Metabase(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +98,8 @@ class Metabase(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Metabase" + metabase_collection_name: Union[str, None, UnsetType] = UNSET """Simple name of the Metabase collection in which this asset exists.""" @@ -152,6 +155,11 @@ class Metabase(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -207,66 +215,6 @@ class Metabase(Asset): def __post_init__(self) -> None: self.type_name = "Metabase" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Metabase instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Metabase validation failed: {errors}") - - def minimize(self) -> "Metabase": - """ - Return a minimal copy of this Metabase with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Metabase with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Metabase instance with only the minimum required fields. - """ - self.validate() - return Metabase(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMetabase": - """ - Create a :class:`RelatedMetabase` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMetabase reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMetabase(guid=self.guid) - return RelatedMetabase(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -381,6 +329,11 @@ class MetabaseRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -467,6 +420,7 @@ class MetabaseNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -538,9 +492,6 @@ def _metabase_to_nested(metabase: Metabase) -> MetabaseNested: is_incomplete=metabase.is_incomplete, provenance_type=metabase.provenance_type, home_id=metabase.home_id, - depth=metabase.depth, - immediate_upstream=metabase.immediate_upstream, - immediate_downstream=metabase.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -572,6 +523,7 @@ def _metabase_from_nested(nested: MetabaseNested) -> Metabase: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -580,9 +532,6 @@ def _metabase_from_nested(nested: MetabaseNested) -> Metabase: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_metabase_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -632,6 +581,9 @@ def _metabase_from_nested_bytes(data: bytes, serde: Serde) -> Metabase: Metabase.METRICS = RelationField("metrics") Metabase.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Metabase.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Metabase.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Metabase.MEANINGS = RelationField("meanings") Metabase.MC_MONITORS = RelationField("mcMonitors") Metabase.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/metabase_collection.py b/pyatlan_v9/model/assets/metabase_collection.py index b6605af99..079c3b48b 100644 --- a/pyatlan_v9/model/assets/metabase_collection.py +++ b/pyatlan_v9/model/assets/metabase_collection.py @@ -40,12 +40,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .metabase_related import ( - RelatedMetabaseCollection, - RelatedMetabaseDashboard, - RelatedMetabaseQuestion, -) +from .metabase_related import RelatedMetabaseDashboard, RelatedMetabaseQuestion from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -88,6 +85,7 @@ class MetabaseCollection(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None METABASE_DASHBOARDS: ClassVar[Any] = None METABASE_QUESTIONS: ClassVar[Any] = None @@ -107,6 +105,8 @@ class MetabaseCollection(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MetabaseCollection" + metabase_slug: Union[str, None, UnsetType] = UNSET """""" @@ -174,6 +174,11 @@ class MetabaseCollection(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -235,66 +240,6 @@ class MetabaseCollection(Asset): def __post_init__(self) -> None: self.type_name = "MetabaseCollection" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MetabaseCollection instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MetabaseCollection validation failed: {errors}") - - def minimize(self) -> "MetabaseCollection": - """ - Return a minimal copy of this MetabaseCollection with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MetabaseCollection with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MetabaseCollection instance with only the minimum required fields. - """ - self.validate() - return MetabaseCollection(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMetabaseCollection": - """ - Create a :class:`RelatedMetabaseCollection` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMetabaseCollection reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMetabaseCollection(guid=self.guid) - return RelatedMetabaseCollection(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -423,6 +368,11 @@ class MetabaseCollectionRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -517,6 +467,7 @@ class MetabaseCollectionNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "metabase_dashboards", "metabase_questions", @@ -604,9 +555,6 @@ def _metabase_collection_to_nested( is_incomplete=metabase_collection.is_incomplete, provenance_type=metabase_collection.provenance_type, home_id=metabase_collection.home_id, - depth=metabase_collection.depth, - immediate_upstream=metabase_collection.immediate_upstream, - immediate_downstream=metabase_collection.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -642,6 +590,7 @@ def _metabase_collection_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -650,9 +599,6 @@ def _metabase_collection_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_metabase_collection_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -725,6 +671,9 @@ def _metabase_collection_from_nested_bytes( MetabaseCollection.METRICS = RelationField("metrics") MetabaseCollection.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") MetabaseCollection.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +MetabaseCollection.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MetabaseCollection.MEANINGS = RelationField("meanings") MetabaseCollection.METABASE_DASHBOARDS = RelationField("metabaseDashboards") MetabaseCollection.METABASE_QUESTIONS = RelationField("metabaseQuestions") diff --git a/pyatlan_v9/model/assets/metabase_dashboard.py b/pyatlan_v9/model/assets/metabase_dashboard.py index 7bb8c91ed..e7d65cf63 100644 --- a/pyatlan_v9/model/assets/metabase_dashboard.py +++ b/pyatlan_v9/model/assets/metabase_dashboard.py @@ -41,12 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .metabase_related import ( - RelatedMetabaseCollection, - RelatedMetabaseDashboard, - RelatedMetabaseQuestion, -) +from .metabase_related import RelatedMetabaseCollection, RelatedMetabaseQuestion from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -86,6 +83,7 @@ class MetabaseDashboard(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None METABASE_COLLECTION: ClassVar[Any] = None METABASE_QUESTIONS: ClassVar[Any] = None @@ -105,6 +103,8 @@ class MetabaseDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MetabaseDashboard" + metabase_question_count: Union[int, None, UnsetType] = UNSET """""" @@ -163,6 +163,11 @@ class MetabaseDashboard(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -230,78 +235,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MetabaseDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.metabase_collection is UNSET: - errors.append("metabase_collection is required for creation") - if self.metabase_collection_name is UNSET: - errors.append("metabase_collection_name is required for creation") - if self.metabase_collection_qualified_name is UNSET: - errors.append( - "metabase_collection_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MetabaseDashboard validation failed: {errors}") - - def minimize(self) -> "MetabaseDashboard": - """ - Return a minimal copy of this MetabaseDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MetabaseDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MetabaseDashboard instance with only the minimum required fields. - """ - self.validate() - return MetabaseDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMetabaseDashboard": - """ - Create a :class:`RelatedMetabaseDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMetabaseDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMetabaseDashboard(guid=self.guid) - return RelatedMetabaseDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -421,6 +354,11 @@ class MetabaseDashboardRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -515,6 +453,7 @@ class MetabaseDashboardNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "metabase_collection", "metabase_questions", @@ -596,9 +535,6 @@ def _metabase_dashboard_to_nested( is_incomplete=metabase_dashboard.is_incomplete, provenance_type=metabase_dashboard.provenance_type, home_id=metabase_dashboard.home_id, - depth=metabase_dashboard.depth, - immediate_upstream=metabase_dashboard.immediate_upstream, - immediate_downstream=metabase_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -634,6 +570,7 @@ def _metabase_dashboard_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -642,9 +579,6 @@ def _metabase_dashboard_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_metabase_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -708,6 +642,9 @@ def _metabase_dashboard_from_nested_bytes( MetabaseDashboard.METRICS = RelationField("metrics") MetabaseDashboard.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") MetabaseDashboard.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +MetabaseDashboard.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MetabaseDashboard.MEANINGS = RelationField("meanings") MetabaseDashboard.METABASE_COLLECTION = RelationField("metabaseCollection") MetabaseDashboard.METABASE_QUESTIONS = RelationField("metabaseQuestions") diff --git a/pyatlan_v9/model/assets/metabase_question.py b/pyatlan_v9/model/assets/metabase_question.py index 34aeb6a3f..36235a63e 100644 --- a/pyatlan_v9/model/assets/metabase_question.py +++ b/pyatlan_v9/model/assets/metabase_question.py @@ -41,12 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .metabase_related import ( - RelatedMetabaseCollection, - RelatedMetabaseDashboard, - RelatedMetabaseQuestion, -) +from .metabase_related import RelatedMetabaseCollection, RelatedMetabaseDashboard from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -88,6 +85,7 @@ class MetabaseQuestion(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None METABASE_COLLECTION: ClassVar[Any] = None METABASE_DASHBOARDS: ClassVar[Any] = None @@ -107,6 +105,8 @@ class MetabaseQuestion(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MetabaseQuestion" + metabase_dashboard_count: Union[int, None, UnsetType] = UNSET """""" @@ -171,6 +171,11 @@ class MetabaseQuestion(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -238,78 +243,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MetabaseQuestion instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.metabase_collection is UNSET: - errors.append("metabase_collection is required for creation") - if self.metabase_collection_name is UNSET: - errors.append("metabase_collection_name is required for creation") - if self.metabase_collection_qualified_name is UNSET: - errors.append( - "metabase_collection_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MetabaseQuestion validation failed: {errors}") - - def minimize(self) -> "MetabaseQuestion": - """ - Return a minimal copy of this MetabaseQuestion with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MetabaseQuestion with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MetabaseQuestion instance with only the minimum required fields. - """ - self.validate() - return MetabaseQuestion(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMetabaseQuestion": - """ - Create a :class:`RelatedMetabaseQuestion` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMetabaseQuestion reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMetabaseQuestion(guid=self.guid) - return RelatedMetabaseQuestion(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -435,6 +368,11 @@ class MetabaseQuestionRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -529,6 +467,7 @@ class MetabaseQuestionNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "metabase_collection", "metabase_dashboards", @@ -614,9 +553,6 @@ def _metabase_question_to_nested( is_incomplete=metabase_question.is_incomplete, provenance_type=metabase_question.provenance_type, home_id=metabase_question.home_id, - depth=metabase_question.depth, - immediate_upstream=metabase_question.immediate_upstream, - immediate_downstream=metabase_question.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -650,6 +586,7 @@ def _metabase_question_from_nested(nested: MetabaseQuestionNested) -> MetabaseQu updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -658,9 +595,6 @@ def _metabase_question_from_nested(nested: MetabaseQuestionNested) -> MetabaseQu is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_metabase_question_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -726,6 +660,9 @@ def _metabase_question_from_nested_bytes(data: bytes, serde: Serde) -> MetabaseQ MetabaseQuestion.METRICS = RelationField("metrics") MetabaseQuestion.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") MetabaseQuestion.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +MetabaseQuestion.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MetabaseQuestion.MEANINGS = RelationField("meanings") MetabaseQuestion.METABASE_COLLECTION = RelationField("metabaseCollection") MetabaseQuestion.METABASE_DASHBOARDS = RelationField("metabaseDashboards") diff --git a/pyatlan_v9/model/assets/metric.py b/pyatlan_v9/model/assets/metric.py index 0f80a9708..4e57f3c43 100644 --- a/pyatlan_v9/model/assets/metric.py +++ b/pyatlan_v9/model/assets/metric.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -88,6 +89,7 @@ class Metric(Asset): METRIC_DIMENSION_COLUMNS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +107,8 @@ class Metric(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Metric" + metric_type: Union[str, None, UnsetType] = UNSET """Type of the metric.""" @@ -180,6 +184,11 @@ class Metric(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -235,66 +244,6 @@ class Metric(Asset): def __post_init__(self) -> None: self.type_name = "Metric" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Metric instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Metric validation failed: {errors}") - - def minimize(self) -> "Metric": - """ - Return a minimal copy of this Metric with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Metric with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Metric instance with only the minimum required fields. - """ - self.validate() - return Metric(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMetric": - """ - Create a :class:`RelatedMetric` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMetric reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMetric(guid=self.guid) - return RelatedMetric(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -429,6 +378,11 @@ class MetricRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -518,6 +472,7 @@ class MetricNested(AssetNested): "metric_dimension_columns", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -593,9 +548,6 @@ def _metric_to_nested(metric: Metric) -> MetricNested: is_incomplete=metric.is_incomplete, provenance_type=metric.provenance_type, home_id=metric.home_id, - depth=metric.depth, - immediate_upstream=metric.immediate_upstream, - immediate_downstream=metric.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -625,6 +577,7 @@ def _metric_from_nested(nested: MetricNested) -> Metric: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -633,9 +586,6 @@ def _metric_from_nested(nested: MetricNested) -> Metric: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_metric_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -685,6 +635,9 @@ def _metric_from_nested_bytes(data: bytes, serde: Serde) -> Metric: Metric.METRIC_DIMENSION_COLUMNS = RelationField("metricDimensionColumns") Metric.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Metric.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Metric.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Metric.MEANINGS = RelationField("meanings") Metric.MC_MONITORS = RelationField("mcMonitors") Metric.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/micro_strategy.py b/pyatlan_v9/model/assets/micro_strategy.py index 8247f2b30..cb386dfec 100644 --- a/pyatlan_v9/model/assets/micro_strategy.py +++ b/pyatlan_v9/model/assets/micro_strategy.py @@ -40,8 +40,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .micro_strategy_related import RelatedMicroStrategy from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -88,6 +88,7 @@ class MicroStrategy(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +106,8 @@ class MicroStrategy(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategy" + micro_strategy_project_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the project in which this asset exists.""" @@ -184,6 +187,11 @@ class MicroStrategy(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -239,66 +247,6 @@ class MicroStrategy(Asset): def __post_init__(self) -> None: self.type_name = "MicroStrategy" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategy instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MicroStrategy validation failed: {errors}") - - def minimize(self) -> "MicroStrategy": - """ - Return a minimal copy of this MicroStrategy with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategy with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategy instance with only the minimum required fields. - """ - self.validate() - return MicroStrategy(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategy": - """ - Create a :class:`RelatedMicroStrategy` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategy reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategy(guid=self.guid) - return RelatedMicroStrategy(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -437,6 +385,11 @@ class MicroStrategyRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -525,6 +478,7 @@ class MicroStrategyNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -622,9 +576,6 @@ def _micro_strategy_to_nested(micro_strategy: MicroStrategy) -> MicroStrategyNes is_incomplete=micro_strategy.is_incomplete, provenance_type=micro_strategy.provenance_type, home_id=micro_strategy.home_id, - depth=micro_strategy.depth, - immediate_upstream=micro_strategy.immediate_upstream, - immediate_downstream=micro_strategy.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -658,6 +609,7 @@ def _micro_strategy_from_nested(nested: MicroStrategyNested) -> MicroStrategy: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -666,9 +618,6 @@ def _micro_strategy_from_nested(nested: MicroStrategyNested) -> MicroStrategy: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -756,6 +705,9 @@ def _micro_strategy_from_nested_bytes(data: bytes, serde: Serde) -> MicroStrateg MicroStrategy.METRICS = RelationField("metrics") MicroStrategy.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") MicroStrategy.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +MicroStrategy.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MicroStrategy.MEANINGS = RelationField("meanings") MicroStrategy.MC_MONITORS = RelationField("mcMonitors") MicroStrategy.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/micro_strategy_attribute.py b/pyatlan_v9/model/assets/micro_strategy_attribute.py index bda52416b..1a4ed286e 100644 --- a/pyatlan_v9/model/assets/micro_strategy_attribute.py +++ b/pyatlan_v9/model/assets/micro_strategy_attribute.py @@ -41,9 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .micro_strategy_related import ( - RelatedMicroStrategyAttribute, RelatedMicroStrategyColumn, RelatedMicroStrategyCube, RelatedMicroStrategyMetric, @@ -97,6 +97,7 @@ class MicroStrategyAttribute(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MICRO_STRATEGY_PROJECT: ClassVar[Any] = None MICRO_STRATEGY_METRICS: ClassVar[Any] = None @@ -119,6 +120,8 @@ class MicroStrategyAttribute(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyAttribute" + micro_strategy_attribute_forms: Union[str, None, UnsetType] = UNSET """JSON string specifying the attribute's name, description, displayFormat, etc.""" @@ -201,6 +204,11 @@ class MicroStrategyAttribute(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -283,80 +291,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyAttribute instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_project is UNSET: - errors.append("micro_strategy_project is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyAttribute validation failed: {errors}") - - def minimize(self) -> "MicroStrategyAttribute": - """ - Return a minimal copy of this MicroStrategyAttribute with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyAttribute with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyAttribute instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyAttribute( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedMicroStrategyAttribute": - """ - Create a :class:`RelatedMicroStrategyAttribute` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyAttribute reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyAttribute(guid=self.guid) - return RelatedMicroStrategyAttribute(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -500,6 +434,11 @@ class MicroStrategyAttributeRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -609,6 +548,7 @@ class MicroStrategyAttributeNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "micro_strategy_project", "micro_strategy_metrics", @@ -719,9 +659,6 @@ def _micro_strategy_attribute_to_nested( is_incomplete=micro_strategy_attribute.is_incomplete, provenance_type=micro_strategy_attribute.provenance_type, home_id=micro_strategy_attribute.home_id, - depth=micro_strategy_attribute.depth, - immediate_upstream=micro_strategy_attribute.immediate_upstream, - immediate_downstream=micro_strategy_attribute.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -757,6 +694,7 @@ def _micro_strategy_attribute_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -765,9 +703,6 @@ def _micro_strategy_attribute_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_attribute_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -870,6 +805,9 @@ def _micro_strategy_attribute_from_nested_bytes( MicroStrategyAttribute.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +MicroStrategyAttribute.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MicroStrategyAttribute.MEANINGS = RelationField("meanings") MicroStrategyAttribute.MICRO_STRATEGY_PROJECT = RelationField("microStrategyProject") MicroStrategyAttribute.MICRO_STRATEGY_METRICS = RelationField("microStrategyMetrics") diff --git a/pyatlan_v9/model/assets/micro_strategy_column.py b/pyatlan_v9/model/assets/micro_strategy_column.py index 00f0ed761..757767f6b 100644 --- a/pyatlan_v9/model/assets/micro_strategy_column.py +++ b/pyatlan_v9/model/assets/micro_strategy_column.py @@ -41,10 +41,10 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .micro_strategy_related import ( RelatedMicroStrategyAttribute, - RelatedMicroStrategyColumn, RelatedMicroStrategyCube, RelatedMicroStrategyDocument, RelatedMicroStrategyDossier, @@ -110,6 +110,7 @@ class MicroStrategyColumn(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MICRO_STRATEGY_ATTRIBUTE: ClassVar[Any] = None MICRO_STRATEGY_CUBE: ClassVar[Any] = None @@ -134,6 +135,8 @@ class MicroStrategyColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyColumn" + micro_strategy_column_id: Union[str, None, UnsetType] = UNSET """Unique identifier of the column in MicroStrategy.""" @@ -249,6 +252,11 @@ class MicroStrategyColumn(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -337,78 +345,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_attribute is UNSET: - errors.append("micro_strategy_attribute is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyColumn validation failed: {errors}") - - def minimize(self) -> "MicroStrategyColumn": - """ - Return a minimal copy of this MicroStrategyColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyColumn instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategyColumn": - """ - Create a :class:`RelatedMicroStrategyColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyColumn(guid=self.guid) - return RelatedMicroStrategyColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -585,6 +521,11 @@ class MicroStrategyColumnRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -698,6 +639,7 @@ class MicroStrategyColumnNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "micro_strategy_attribute", "micro_strategy_cube", @@ -858,9 +800,6 @@ def _micro_strategy_column_to_nested( is_incomplete=micro_strategy_column.is_incomplete, provenance_type=micro_strategy_column.provenance_type, home_id=micro_strategy_column.home_id, - depth=micro_strategy_column.depth, - immediate_upstream=micro_strategy_column.immediate_upstream, - immediate_downstream=micro_strategy_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -896,6 +835,7 @@ def _micro_strategy_column_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -904,9 +844,6 @@ def _micro_strategy_column_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1052,6 +989,9 @@ def _micro_strategy_column_from_nested_bytes( MicroStrategyColumn.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +MicroStrategyColumn.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MicroStrategyColumn.MEANINGS = RelationField("meanings") MicroStrategyColumn.MICRO_STRATEGY_ATTRIBUTE = RelationField("microStrategyAttribute") MicroStrategyColumn.MICRO_STRATEGY_CUBE = RelationField("microStrategyCube") diff --git a/pyatlan_v9/model/assets/micro_strategy_cube.py b/pyatlan_v9/model/assets/micro_strategy_cube.py index 69e046435..2ed58c757 100644 --- a/pyatlan_v9/model/assets/micro_strategy_cube.py +++ b/pyatlan_v9/model/assets/micro_strategy_cube.py @@ -41,11 +41,11 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .micro_strategy_related import ( RelatedMicroStrategyAttribute, RelatedMicroStrategyColumn, - RelatedMicroStrategyCube, RelatedMicroStrategyMetric, RelatedMicroStrategyProject, ) @@ -97,6 +97,7 @@ class MicroStrategyCube(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MICRO_STRATEGY_PROJECT: ClassVar[Any] = None MICRO_STRATEGY_METRICS: ClassVar[Any] = None @@ -118,6 +119,8 @@ class MicroStrategyCube(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyCube" + micro_strategy_cube_type: Union[str, None, UnsetType] = UNSET """Type of cube, for example: OLAP or MTDI.""" @@ -203,6 +206,11 @@ class MicroStrategyCube(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -282,78 +290,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyCube instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_project is UNSET: - errors.append("micro_strategy_project is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyCube validation failed: {errors}") - - def minimize(self) -> "MicroStrategyCube": - """ - Return a minimal copy of this MicroStrategyCube with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyCube with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyCube instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyCube(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategyCube": - """ - Create a :class:`RelatedMicroStrategyCube` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyCube reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyCube(guid=self.guid) - return RelatedMicroStrategyCube(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -500,6 +436,11 @@ class MicroStrategyCubeRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -606,6 +547,7 @@ class MicroStrategyCubeNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "micro_strategy_project", "micro_strategy_metrics", @@ -715,9 +657,6 @@ def _micro_strategy_cube_to_nested( is_incomplete=micro_strategy_cube.is_incomplete, provenance_type=micro_strategy_cube.provenance_type, home_id=micro_strategy_cube.home_id, - depth=micro_strategy_cube.depth, - immediate_upstream=micro_strategy_cube.immediate_upstream, - immediate_downstream=micro_strategy_cube.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -753,6 +692,7 @@ def _micro_strategy_cube_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -761,9 +701,6 @@ def _micro_strategy_cube_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_cube_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -861,6 +798,9 @@ def _micro_strategy_cube_from_nested_bytes( MicroStrategyCube.METRICS = RelationField("metrics") MicroStrategyCube.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") MicroStrategyCube.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +MicroStrategyCube.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MicroStrategyCube.MEANINGS = RelationField("meanings") MicroStrategyCube.MICRO_STRATEGY_PROJECT = RelationField("microStrategyProject") MicroStrategyCube.MICRO_STRATEGY_METRICS = RelationField("microStrategyMetrics") diff --git a/pyatlan_v9/model/assets/micro_strategy_document.py b/pyatlan_v9/model/assets/micro_strategy_document.py index 5e2146fc6..013ea32af 100644 --- a/pyatlan_v9/model/assets/micro_strategy_document.py +++ b/pyatlan_v9/model/assets/micro_strategy_document.py @@ -41,10 +41,10 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .micro_strategy_related import ( RelatedMicroStrategyColumn, - RelatedMicroStrategyDocument, RelatedMicroStrategyProject, ) from .model_related import RelatedModelAttribute, RelatedModelEntity @@ -93,6 +93,7 @@ class MicroStrategyDocument(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MICRO_STRATEGY_PROJECT: ClassVar[Any] = None MICRO_STRATEGY_COLUMNS: ClassVar[Any] = None @@ -112,6 +113,8 @@ class MicroStrategyDocument(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyDocument" + micro_strategy_project_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the project in which this asset exists.""" @@ -191,6 +194,11 @@ class MicroStrategyDocument(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -260,78 +268,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyDocument instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_project is UNSET: - errors.append("micro_strategy_project is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyDocument validation failed: {errors}") - - def minimize(self) -> "MicroStrategyDocument": - """ - Return a minimal copy of this MicroStrategyDocument with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyDocument with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyDocument instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyDocument(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategyDocument": - """ - Create a :class:`RelatedMicroStrategyDocument` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyDocument reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyDocument(guid=self.guid) - return RelatedMicroStrategyDocument(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -472,6 +408,11 @@ class MicroStrategyDocumentRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -568,6 +509,7 @@ class MicroStrategyDocumentNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "micro_strategy_project", "micro_strategy_columns", @@ -673,9 +615,6 @@ def _micro_strategy_document_to_nested( is_incomplete=micro_strategy_document.is_incomplete, provenance_type=micro_strategy_document.provenance_type, home_id=micro_strategy_document.home_id, - depth=micro_strategy_document.depth, - immediate_upstream=micro_strategy_document.immediate_upstream, - immediate_downstream=micro_strategy_document.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -711,6 +650,7 @@ def _micro_strategy_document_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -719,9 +659,6 @@ def _micro_strategy_document_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_document_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -821,6 +758,9 @@ def _micro_strategy_document_from_nested_bytes( MicroStrategyDocument.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +MicroStrategyDocument.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MicroStrategyDocument.MEANINGS = RelationField("meanings") MicroStrategyDocument.MICRO_STRATEGY_PROJECT = RelationField("microStrategyProject") MicroStrategyDocument.MICRO_STRATEGY_COLUMNS = RelationField("microStrategyColumns") diff --git a/pyatlan_v9/model/assets/micro_strategy_dossier.py b/pyatlan_v9/model/assets/micro_strategy_dossier.py index a196fc420..bedba19c7 100644 --- a/pyatlan_v9/model/assets/micro_strategy_dossier.py +++ b/pyatlan_v9/model/assets/micro_strategy_dossier.py @@ -41,10 +41,10 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .micro_strategy_related import ( RelatedMicroStrategyColumn, - RelatedMicroStrategyDossier, RelatedMicroStrategyProject, RelatedMicroStrategyVisualization, ) @@ -95,6 +95,7 @@ class MicroStrategyDossier(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MICRO_STRATEGY_PROJECT: ClassVar[Any] = None MICRO_STRATEGY_VISUALIZATIONS: ClassVar[Any] = None @@ -115,6 +116,8 @@ class MicroStrategyDossier(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyDossier" + micro_strategy_dossier_chapter_names: Union[List[str], None, UnsetType] = UNSET """List of chapter names in this dossier.""" @@ -197,6 +200,11 @@ class MicroStrategyDossier(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -271,78 +279,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyDossier instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_project is UNSET: - errors.append("micro_strategy_project is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyDossier validation failed: {errors}") - - def minimize(self) -> "MicroStrategyDossier": - """ - Return a minimal copy of this MicroStrategyDossier with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyDossier with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyDossier instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyDossier(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategyDossier": - """ - Create a :class:`RelatedMicroStrategyDossier` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyDossier reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyDossier(guid=self.guid) - return RelatedMicroStrategyDossier(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -486,6 +422,11 @@ class MicroStrategyDossierRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -587,6 +528,7 @@ class MicroStrategyDossierNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "micro_strategy_project", "micro_strategy_visualizations", @@ -699,9 +641,6 @@ def _micro_strategy_dossier_to_nested( is_incomplete=micro_strategy_dossier.is_incomplete, provenance_type=micro_strategy_dossier.provenance_type, home_id=micro_strategy_dossier.home_id, - depth=micro_strategy_dossier.depth, - immediate_upstream=micro_strategy_dossier.immediate_upstream, - immediate_downstream=micro_strategy_dossier.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -737,6 +676,7 @@ def _micro_strategy_dossier_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -745,9 +685,6 @@ def _micro_strategy_dossier_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_dossier_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -846,6 +783,9 @@ def _micro_strategy_dossier_from_nested_bytes( MicroStrategyDossier.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +MicroStrategyDossier.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MicroStrategyDossier.MEANINGS = RelationField("meanings") MicroStrategyDossier.MICRO_STRATEGY_PROJECT = RelationField("microStrategyProject") MicroStrategyDossier.MICRO_STRATEGY_VISUALIZATIONS = RelationField( diff --git a/pyatlan_v9/model/assets/micro_strategy_fact.py b/pyatlan_v9/model/assets/micro_strategy_fact.py index 6fa9fd82e..7e7c3140d 100644 --- a/pyatlan_v9/model/assets/micro_strategy_fact.py +++ b/pyatlan_v9/model/assets/micro_strategy_fact.py @@ -41,10 +41,10 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .micro_strategy_related import ( RelatedMicroStrategyColumn, - RelatedMicroStrategyFact, RelatedMicroStrategyMetric, RelatedMicroStrategyProject, ) @@ -95,6 +95,7 @@ class MicroStrategyFact(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MICRO_STRATEGY_PROJECT: ClassVar[Any] = None MICRO_STRATEGY_METRICS: ClassVar[Any] = None @@ -115,6 +116,8 @@ class MicroStrategyFact(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyFact" + micro_strategy_fact_expressions: Union[List[str], None, UnsetType] = UNSET """List of expressions for this fact.""" @@ -197,6 +200,11 @@ class MicroStrategyFact(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -271,78 +279,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyFact instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_project is UNSET: - errors.append("micro_strategy_project is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyFact validation failed: {errors}") - - def minimize(self) -> "MicroStrategyFact": - """ - Return a minimal copy of this MicroStrategyFact with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyFact with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyFact instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyFact(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategyFact": - """ - Create a :class:`RelatedMicroStrategyFact` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyFact reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyFact(guid=self.guid) - return RelatedMicroStrategyFact(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -486,6 +422,11 @@ class MicroStrategyFactRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -587,6 +528,7 @@ class MicroStrategyFactNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "micro_strategy_project", "micro_strategy_metrics", @@ -693,9 +635,6 @@ def _micro_strategy_fact_to_nested( is_incomplete=micro_strategy_fact.is_incomplete, provenance_type=micro_strategy_fact.provenance_type, home_id=micro_strategy_fact.home_id, - depth=micro_strategy_fact.depth, - immediate_upstream=micro_strategy_fact.immediate_upstream, - immediate_downstream=micro_strategy_fact.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -731,6 +670,7 @@ def _micro_strategy_fact_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -739,9 +679,6 @@ def _micro_strategy_fact_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_fact_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -836,6 +773,9 @@ def _micro_strategy_fact_from_nested_bytes( MicroStrategyFact.METRICS = RelationField("metrics") MicroStrategyFact.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") MicroStrategyFact.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +MicroStrategyFact.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MicroStrategyFact.MEANINGS = RelationField("meanings") MicroStrategyFact.MICRO_STRATEGY_PROJECT = RelationField("microStrategyProject") MicroStrategyFact.MICRO_STRATEGY_METRICS = RelationField("microStrategyMetrics") diff --git a/pyatlan_v9/model/assets/micro_strategy_metric.py b/pyatlan_v9/model/assets/micro_strategy_metric.py index ee758609d..659318c3e 100644 --- a/pyatlan_v9/model/assets/micro_strategy_metric.py +++ b/pyatlan_v9/model/assets/micro_strategy_metric.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .micro_strategy_related import ( RelatedMicroStrategyAttribute, @@ -104,6 +105,7 @@ class MicroStrategyMetric(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MICRO_STRATEGY_PROJECT: ClassVar[Any] = None MICRO_STRATEGY_ATTRIBUTES: ClassVar[Any] = None @@ -129,6 +131,8 @@ class MicroStrategyMetric(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyMetric" + micro_strategy_metric_expression: Union[str, None, UnsetType] = UNSET """Text specifiying this metric's expression.""" @@ -231,6 +235,11 @@ class MicroStrategyMetric(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -326,78 +335,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyMetric instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_project is UNSET: - errors.append("micro_strategy_project is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyMetric validation failed: {errors}") - - def minimize(self) -> "MicroStrategyMetric": - """ - Return a minimal copy of this MicroStrategyMetric with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyMetric with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyMetric instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyMetric(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategyMetric": - """ - Create a :class:`RelatedMicroStrategyMetric` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyMetric reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyMetric(guid=self.guid) - return RelatedMicroStrategyMetric(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -561,6 +498,11 @@ class MicroStrategyMetricRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -683,6 +625,7 @@ class MicroStrategyMetricNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "micro_strategy_project", "micro_strategy_attributes", @@ -818,9 +761,6 @@ def _micro_strategy_metric_to_nested( is_incomplete=micro_strategy_metric.is_incomplete, provenance_type=micro_strategy_metric.provenance_type, home_id=micro_strategy_metric.home_id, - depth=micro_strategy_metric.depth, - immediate_upstream=micro_strategy_metric.immediate_upstream, - immediate_downstream=micro_strategy_metric.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -856,6 +796,7 @@ def _micro_strategy_metric_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -864,9 +805,6 @@ def _micro_strategy_metric_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_metric_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -989,6 +927,9 @@ def _micro_strategy_metric_from_nested_bytes( MicroStrategyMetric.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +MicroStrategyMetric.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MicroStrategyMetric.MEANINGS = RelationField("meanings") MicroStrategyMetric.MICRO_STRATEGY_PROJECT = RelationField("microStrategyProject") MicroStrategyMetric.MICRO_STRATEGY_ATTRIBUTES = RelationField("microStrategyAttributes") diff --git a/pyatlan_v9/model/assets/micro_strategy_project.py b/pyatlan_v9/model/assets/micro_strategy_project.py index 9d3c45c5e..bd80e9a24 100644 --- a/pyatlan_v9/model/assets/micro_strategy_project.py +++ b/pyatlan_v9/model/assets/micro_strategy_project.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .micro_strategy_related import ( RelatedMicroStrategyAttribute, @@ -48,7 +49,6 @@ RelatedMicroStrategyDossier, RelatedMicroStrategyFact, RelatedMicroStrategyMetric, - RelatedMicroStrategyProject, RelatedMicroStrategyReport, RelatedMicroStrategyVisualization, ) @@ -98,6 +98,7 @@ class MicroStrategyProject(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MICRO_STRATEGY_METRICS: ClassVar[Any] = None MICRO_STRATEGY_REPORTS: ClassVar[Any] = None @@ -123,6 +124,8 @@ class MicroStrategyProject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyProject" + micro_strategy_project_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the project in which this asset exists.""" @@ -202,6 +205,11 @@ class MicroStrategyProject(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -293,66 +301,6 @@ class MicroStrategyProject(Asset): def __post_init__(self) -> None: self.type_name = "MicroStrategyProject" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyProject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MicroStrategyProject validation failed: {errors}") - - def minimize(self) -> "MicroStrategyProject": - """ - Return a minimal copy of this MicroStrategyProject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyProject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyProject instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyProject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategyProject": - """ - Create a :class:`RelatedMicroStrategyProject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyProject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyProject(guid=self.guid) - return RelatedMicroStrategyProject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -493,6 +441,11 @@ class MicroStrategyProjectRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -617,6 +570,7 @@ class MicroStrategyProjectNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "micro_strategy_metrics", "micro_strategy_reports", @@ -728,9 +682,6 @@ def _micro_strategy_project_to_nested( is_incomplete=micro_strategy_project.is_incomplete, provenance_type=micro_strategy_project.provenance_type, home_id=micro_strategy_project.home_id, - depth=micro_strategy_project.depth, - immediate_upstream=micro_strategy_project.immediate_upstream, - immediate_downstream=micro_strategy_project.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -766,6 +717,7 @@ def _micro_strategy_project_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -774,9 +726,6 @@ def _micro_strategy_project_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_project_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -872,6 +821,9 @@ def _micro_strategy_project_from_nested_bytes( MicroStrategyProject.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +MicroStrategyProject.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MicroStrategyProject.MEANINGS = RelationField("meanings") MicroStrategyProject.MICRO_STRATEGY_METRICS = RelationField("microStrategyMetrics") MicroStrategyProject.MICRO_STRATEGY_REPORTS = RelationField("microStrategyReports") diff --git a/pyatlan_v9/model/assets/micro_strategy_report.py b/pyatlan_v9/model/assets/micro_strategy_report.py index 0979e0d05..05e18a676 100644 --- a/pyatlan_v9/model/assets/micro_strategy_report.py +++ b/pyatlan_v9/model/assets/micro_strategy_report.py @@ -41,13 +41,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .micro_strategy_related import ( RelatedMicroStrategyAttribute, RelatedMicroStrategyColumn, RelatedMicroStrategyMetric, RelatedMicroStrategyProject, - RelatedMicroStrategyReport, ) from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -96,6 +96,7 @@ class MicroStrategyReport(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MICRO_STRATEGY_PROJECT: ClassVar[Any] = None MICRO_STRATEGY_METRICS: ClassVar[Any] = None @@ -117,6 +118,8 @@ class MicroStrategyReport(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyReport" + micro_strategy_report_type: Union[str, None, UnsetType] = UNSET """Type of report, for example: Grid or Chart.""" @@ -199,6 +202,11 @@ class MicroStrategyReport(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -278,78 +286,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyReport instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_project is UNSET: - errors.append("micro_strategy_project is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyReport validation failed: {errors}") - - def minimize(self) -> "MicroStrategyReport": - """ - Return a minimal copy of this MicroStrategyReport with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyReport with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyReport instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyReport(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMicroStrategyReport": - """ - Create a :class:`RelatedMicroStrategyReport` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyReport reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyReport(guid=self.guid) - return RelatedMicroStrategyReport(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -493,6 +429,11 @@ class MicroStrategyReportRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -599,6 +540,7 @@ class MicroStrategyReportNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "micro_strategy_project", "micro_strategy_metrics", @@ -706,9 +648,6 @@ def _micro_strategy_report_to_nested( is_incomplete=micro_strategy_report.is_incomplete, provenance_type=micro_strategy_report.provenance_type, home_id=micro_strategy_report.home_id, - depth=micro_strategy_report.depth, - immediate_upstream=micro_strategy_report.immediate_upstream, - immediate_downstream=micro_strategy_report.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -744,6 +683,7 @@ def _micro_strategy_report_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -752,9 +692,6 @@ def _micro_strategy_report_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_report_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -853,6 +790,9 @@ def _micro_strategy_report_from_nested_bytes( MicroStrategyReport.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +MicroStrategyReport.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MicroStrategyReport.MEANINGS = RelationField("meanings") MicroStrategyReport.MICRO_STRATEGY_PROJECT = RelationField("microStrategyProject") MicroStrategyReport.MICRO_STRATEGY_METRICS = RelationField("microStrategyMetrics") diff --git a/pyatlan_v9/model/assets/micro_strategy_visualization.py b/pyatlan_v9/model/assets/micro_strategy_visualization.py index b32c5981d..dd40bd247 100644 --- a/pyatlan_v9/model/assets/micro_strategy_visualization.py +++ b/pyatlan_v9/model/assets/micro_strategy_visualization.py @@ -41,11 +41,11 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .micro_strategy_related import ( RelatedMicroStrategyDossier, RelatedMicroStrategyProject, - RelatedMicroStrategyVisualization, ) from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -96,6 +96,7 @@ class MicroStrategyVisualization(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MICRO_STRATEGY_PROJECT: ClassVar[Any] = None MICRO_STRATEGY_DOSSIER: ClassVar[Any] = None @@ -115,6 +116,8 @@ class MicroStrategyVisualization(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MicroStrategyVisualization" + micro_strategy_visualization_type: Union[str, None, UnsetType] = UNSET """Type of visualization.""" @@ -203,6 +206,11 @@ class MicroStrategyVisualization(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -270,80 +278,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MicroStrategyVisualization instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.micro_strategy_project is UNSET: - errors.append("micro_strategy_project is required for creation") - if self.micro_strategy_project_name is UNSET: - errors.append("micro_strategy_project_name is required for creation") - if self.micro_strategy_project_qualified_name is UNSET: - errors.append( - "micro_strategy_project_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"MicroStrategyVisualization validation failed: {errors}") - - def minimize(self) -> "MicroStrategyVisualization": - """ - Return a minimal copy of this MicroStrategyVisualization with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MicroStrategyVisualization with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MicroStrategyVisualization instance with only the minimum required fields. - """ - self.validate() - return MicroStrategyVisualization( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedMicroStrategyVisualization": - """ - Create a :class:`RelatedMicroStrategyVisualization` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMicroStrategyVisualization reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMicroStrategyVisualization(guid=self.guid) - return RelatedMicroStrategyVisualization(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -493,6 +427,11 @@ class MicroStrategyVisualizationRelationshipAttributes(AssetRelationshipAttribut ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -587,6 +526,7 @@ class MicroStrategyVisualizationNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "micro_strategy_project", "micro_strategy_dossier", @@ -704,9 +644,6 @@ def _micro_strategy_visualization_to_nested( is_incomplete=micro_strategy_visualization.is_incomplete, provenance_type=micro_strategy_visualization.provenance_type, home_id=micro_strategy_visualization.home_id, - depth=micro_strategy_visualization.depth, - immediate_upstream=micro_strategy_visualization.immediate_upstream, - immediate_downstream=micro_strategy_visualization.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -742,6 +679,7 @@ def _micro_strategy_visualization_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -750,9 +688,6 @@ def _micro_strategy_visualization_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_micro_strategy_visualization_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -869,6 +804,9 @@ def _micro_strategy_visualization_from_nested_bytes( MicroStrategyVisualization.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +MicroStrategyVisualization.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MicroStrategyVisualization.MEANINGS = RelationField("meanings") MicroStrategyVisualization.MICRO_STRATEGY_PROJECT = RelationField( "microStrategyProject" diff --git a/pyatlan_v9/model/assets/mode.py b/pyatlan_v9/model/assets/mode.py index 22f493370..dbe665e4e 100644 --- a/pyatlan_v9/model/assets/mode.py +++ b/pyatlan_v9/model/assets/mode.py @@ -40,8 +40,8 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .mode_related import RelatedMode from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -87,6 +87,7 @@ class Mode(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -104,6 +105,8 @@ class Mode(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Mode" + mode_id: Union[str, None, UnsetType] = UNSET """Unique identifier for the Mode asset.""" @@ -180,6 +183,11 @@ class Mode(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -235,66 +243,6 @@ class Mode(Asset): def __post_init__(self) -> None: self.type_name = "Mode" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Mode instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Mode validation failed: {errors}") - - def minimize(self) -> "Mode": - """ - Return a minimal copy of this Mode with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Mode with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Mode instance with only the minimum required fields. - """ - self.validate() - return Mode(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMode": - """ - Create a :class:`RelatedMode` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMode reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMode(guid=self.guid) - return RelatedMode(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -430,6 +378,11 @@ class ModeRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -512,6 +465,7 @@ class ModeNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -595,9 +549,6 @@ def _mode_to_nested(mode: Mode) -> ModeNested: is_incomplete=mode.is_incomplete, provenance_type=mode.provenance_type, home_id=mode.home_id, - depth=mode.depth, - immediate_upstream=mode.immediate_upstream, - immediate_downstream=mode.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -627,6 +578,7 @@ def _mode_from_nested(nested: ModeNested) -> Mode: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -635,9 +587,6 @@ def _mode_from_nested(nested: ModeNested) -> Mode: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mode_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -698,6 +647,9 @@ def _mode_from_nested_bytes(data: bytes, serde: Serde) -> Mode: Mode.METRICS = RelationField("metrics") Mode.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Mode.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Mode.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Mode.MEANINGS = RelationField("meanings") Mode.MC_MONITORS = RelationField("mcMonitors") Mode.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/mode_chart.py b/pyatlan_v9/model/assets/mode_chart.py index dd41da2b8..5ae3426d1 100644 --- a/pyatlan_v9/model/assets/mode_chart.py +++ b/pyatlan_v9/model/assets/mode_chart.py @@ -41,8 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .mode_related import RelatedModeChart, RelatedModeQuery +from .mode_related import RelatedModeQuery from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -89,6 +90,7 @@ class ModeChart(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MODE_QUERY: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -107,6 +109,8 @@ class ModeChart(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModeChart" + mode_chart_type: Union[str, None, UnsetType] = UNSET """Type of chart.""" @@ -186,6 +190,11 @@ class ModeChart(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -252,84 +261,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModeChart instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.mode_query is UNSET: - errors.append("mode_query is required for creation") - if self.mode_query_name is UNSET: - errors.append("mode_query_name is required for creation") - if self.mode_query_qualified_name is UNSET: - errors.append("mode_query_qualified_name is required for creation") - if self.mode_report_name is UNSET: - errors.append("mode_report_name is required for creation") - if self.mode_report_qualified_name is UNSET: - errors.append("mode_report_qualified_name is required for creation") - if self.mode_workspace_name is UNSET: - errors.append("mode_workspace_name is required for creation") - if self.mode_workspace_qualified_name is UNSET: - errors.append("mode_workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"ModeChart validation failed: {errors}") - - def minimize(self) -> "ModeChart": - """ - Return a minimal copy of this ModeChart with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModeChart with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModeChart instance with only the minimum required fields. - """ - self.validate() - return ModeChart(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModeChart": - """ - Create a :class:`RelatedModeChart` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModeChart reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModeChart(guid=self.guid) - return RelatedModeChart(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -468,6 +399,11 @@ class ModeChartRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -557,6 +493,7 @@ class ModeChartNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mode_query", "mc_monitors", @@ -643,9 +580,6 @@ def _mode_chart_to_nested(mode_chart: ModeChart) -> ModeChartNested: is_incomplete=mode_chart.is_incomplete, provenance_type=mode_chart.provenance_type, home_id=mode_chart.home_id, - depth=mode_chart.depth, - immediate_upstream=mode_chart.immediate_upstream, - immediate_downstream=mode_chart.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -677,6 +611,7 @@ def _mode_chart_from_nested(nested: ModeChartNested) -> ModeChart: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -685,9 +620,6 @@ def _mode_chart_from_nested(nested: ModeChartNested) -> ModeChart: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mode_chart_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -751,6 +683,9 @@ def _mode_chart_from_nested_bytes(data: bytes, serde: Serde) -> ModeChart: ModeChart.METRICS = RelationField("metrics") ModeChart.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ModeChart.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ModeChart.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ModeChart.MEANINGS = RelationField("meanings") ModeChart.MODE_QUERY = RelationField("modeQuery") ModeChart.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/mode_collection.py b/pyatlan_v9/model/assets/mode_collection.py index 89550340b..01ff94640 100644 --- a/pyatlan_v9/model/assets/mode_collection.py +++ b/pyatlan_v9/model/assets/mode_collection.py @@ -41,8 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .mode_related import RelatedModeCollection, RelatedModeReport, RelatedModeWorkspace +from .mode_related import RelatedModeReport, RelatedModeWorkspace from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -90,6 +91,7 @@ class ModeCollection(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MODE_WORKSPACE: ClassVar[Any] = None MODE_REPORTS: ClassVar[Any] = None @@ -109,6 +111,8 @@ class ModeCollection(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModeCollection" + mode_collection_type: Union[str, None, UnsetType] = UNSET """Type of this collection.""" @@ -191,6 +195,11 @@ class ModeCollection(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -258,76 +267,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModeCollection instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.mode_workspace is UNSET: - errors.append("mode_workspace is required for creation") - if self.mode_workspace_name is UNSET: - errors.append("mode_workspace_name is required for creation") - if self.mode_workspace_qualified_name is UNSET: - errors.append("mode_workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"ModeCollection validation failed: {errors}") - - def minimize(self) -> "ModeCollection": - """ - Return a minimal copy of this ModeCollection with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModeCollection with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModeCollection instance with only the minimum required fields. - """ - self.validate() - return ModeCollection(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModeCollection": - """ - Create a :class:`RelatedModeCollection` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModeCollection reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModeCollection(guid=self.guid) - return RelatedModeCollection(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -469,6 +408,11 @@ class ModeCollectionRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -563,6 +507,7 @@ class ModeCollectionNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mode_workspace", "mode_reports", @@ -656,9 +601,6 @@ def _mode_collection_to_nested(mode_collection: ModeCollection) -> ModeCollectio is_incomplete=mode_collection.is_incomplete, provenance_type=mode_collection.provenance_type, home_id=mode_collection.home_id, - depth=mode_collection.depth, - immediate_upstream=mode_collection.immediate_upstream, - immediate_downstream=mode_collection.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -692,6 +634,7 @@ def _mode_collection_from_nested(nested: ModeCollectionNested) -> ModeCollection updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -700,9 +643,6 @@ def _mode_collection_from_nested(nested: ModeCollectionNested) -> ModeCollection is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mode_collection_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -779,6 +719,9 @@ def _mode_collection_from_nested_bytes(data: bytes, serde: Serde) -> ModeCollect ModeCollection.METRICS = RelationField("metrics") ModeCollection.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ModeCollection.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ModeCollection.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ModeCollection.MEANINGS = RelationField("meanings") ModeCollection.MODE_WORKSPACE = RelationField("modeWorkspace") ModeCollection.MODE_REPORTS = RelationField("modeReports") diff --git a/pyatlan_v9/model/assets/mode_query.py b/pyatlan_v9/model/assets/mode_query.py index ab37b8301..f5dff34a4 100644 --- a/pyatlan_v9/model/assets/mode_query.py +++ b/pyatlan_v9/model/assets/mode_query.py @@ -41,8 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .mode_related import RelatedModeChart, RelatedModeQuery, RelatedModeReport +from .mode_related import RelatedModeChart, RelatedModeReport from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -90,6 +91,7 @@ class ModeQuery(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MODE_REPORT: ClassVar[Any] = None MODE_CHARTS: ClassVar[Any] = None @@ -109,6 +111,8 @@ class ModeQuery(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModeQuery" + mode_raw_query: Union[str, None, UnsetType] = UNSET """Raw query for the Mode asset.""" @@ -191,6 +195,11 @@ class ModeQuery(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -260,80 +269,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModeQuery instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.mode_report is UNSET: - errors.append("mode_report is required for creation") - if self.mode_report_name is UNSET: - errors.append("mode_report_name is required for creation") - if self.mode_report_qualified_name is UNSET: - errors.append("mode_report_qualified_name is required for creation") - if self.mode_workspace_name is UNSET: - errors.append("mode_workspace_name is required for creation") - if self.mode_workspace_qualified_name is UNSET: - errors.append("mode_workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"ModeQuery validation failed: {errors}") - - def minimize(self) -> "ModeQuery": - """ - Return a minimal copy of this ModeQuery with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModeQuery with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModeQuery instance with only the minimum required fields. - """ - self.validate() - return ModeQuery(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModeQuery": - """ - Create a :class:`RelatedModeQuery` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModeQuery reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModeQuery(guid=self.guid) - return RelatedModeQuery(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -475,6 +410,11 @@ class ModeQueryRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -567,6 +507,7 @@ class ModeQueryNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mode_report", "mode_charts", @@ -656,9 +597,6 @@ def _mode_query_to_nested(mode_query: ModeQuery) -> ModeQueryNested: is_incomplete=mode_query.is_incomplete, provenance_type=mode_query.provenance_type, home_id=mode_query.home_id, - depth=mode_query.depth, - immediate_upstream=mode_query.immediate_upstream, - immediate_downstream=mode_query.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -690,6 +628,7 @@ def _mode_query_from_nested(nested: ModeQueryNested) -> ModeQuery: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -698,9 +637,6 @@ def _mode_query_from_nested(nested: ModeQueryNested) -> ModeQuery: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mode_query_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -768,6 +704,9 @@ def _mode_query_from_nested_bytes(data: bytes, serde: Serde) -> ModeQuery: ModeQuery.METRICS = RelationField("metrics") ModeQuery.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ModeQuery.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ModeQuery.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ModeQuery.MEANINGS = RelationField("meanings") ModeQuery.MODE_REPORT = RelationField("modeReport") ModeQuery.MODE_CHARTS = RelationField("modeCharts") diff --git a/pyatlan_v9/model/assets/mode_report.py b/pyatlan_v9/model/assets/mode_report.py index 3db1f1563..7928f9242 100644 --- a/pyatlan_v9/model/assets/mode_report.py +++ b/pyatlan_v9/model/assets/mode_report.py @@ -41,8 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .mode_related import RelatedModeCollection, RelatedModeQuery, RelatedModeReport +from .mode_related import RelatedModeCollection, RelatedModeQuery from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -96,6 +97,7 @@ class ModeReport(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MODE_QUERIES: ClassVar[Any] = None MODE_COLLECTIONS: ClassVar[Any] = None @@ -115,6 +117,8 @@ class ModeReport(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModeReport" + mode_collection_token: Union[str, None, UnsetType] = UNSET """Token for the Mode collection.""" @@ -215,6 +219,11 @@ class ModeReport(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -284,76 +293,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModeReport instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.mode_collections is UNSET: - errors.append("mode_collections is required for creation") - if self.mode_workspace_name is UNSET: - errors.append("mode_workspace_name is required for creation") - if self.mode_workspace_qualified_name is UNSET: - errors.append("mode_workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"ModeReport validation failed: {errors}") - - def minimize(self) -> "ModeReport": - """ - Return a minimal copy of this ModeReport with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModeReport with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModeReport instance with only the minimum required fields. - """ - self.validate() - return ModeReport(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModeReport": - """ - Create a :class:`RelatedModeReport` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModeReport reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModeReport(guid=self.guid) - return RelatedModeReport(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -513,6 +452,11 @@ class ModeReportRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -605,6 +549,7 @@ class ModeReportNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mode_queries", "mode_collections", @@ -706,9 +651,6 @@ def _mode_report_to_nested(mode_report: ModeReport) -> ModeReportNested: is_incomplete=mode_report.is_incomplete, provenance_type=mode_report.provenance_type, home_id=mode_report.home_id, - depth=mode_report.depth, - immediate_upstream=mode_report.immediate_upstream, - immediate_downstream=mode_report.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -740,6 +682,7 @@ def _mode_report_from_nested(nested: ModeReportNested) -> ModeReport: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -748,9 +691,6 @@ def _mode_report_from_nested(nested: ModeReportNested) -> ModeReport: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mode_report_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -827,6 +767,9 @@ def _mode_report_from_nested_bytes(data: bytes, serde: Serde) -> ModeReport: ModeReport.METRICS = RelationField("metrics") ModeReport.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ModeReport.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ModeReport.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ModeReport.MEANINGS = RelationField("meanings") ModeReport.MODE_QUERIES = RelationField("modeQueries") ModeReport.MODE_COLLECTIONS = RelationField("modeCollections") diff --git a/pyatlan_v9/model/assets/mode_workspace.py b/pyatlan_v9/model/assets/mode_workspace.py index b9881ff3b..d32de680e 100644 --- a/pyatlan_v9/model/assets/mode_workspace.py +++ b/pyatlan_v9/model/assets/mode_workspace.py @@ -40,8 +40,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .mode_related import RelatedModeCollection, RelatedModeWorkspace +from .mode_related import RelatedModeCollection from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -88,6 +89,7 @@ class ModeWorkspace(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MODE_COLLECTIONS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -106,6 +108,8 @@ class ModeWorkspace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModeWorkspace" + mode_collection_count: Union[int, None, UnsetType] = UNSET """Number of collections in this workspace.""" @@ -185,6 +189,11 @@ class ModeWorkspace(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -243,66 +252,6 @@ class ModeWorkspace(Asset): def __post_init__(self) -> None: self.type_name = "ModeWorkspace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModeWorkspace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ModeWorkspace validation failed: {errors}") - - def minimize(self) -> "ModeWorkspace": - """ - Return a minimal copy of this ModeWorkspace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModeWorkspace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModeWorkspace instance with only the minimum required fields. - """ - self.validate() - return ModeWorkspace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModeWorkspace": - """ - Create a :class:`RelatedModeWorkspace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModeWorkspace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModeWorkspace(guid=self.guid) - return RelatedModeWorkspace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -441,6 +390,11 @@ class ModeWorkspaceRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -532,6 +486,7 @@ class ModeWorkspaceNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mode_collections", "mc_monitors", @@ -620,9 +575,6 @@ def _mode_workspace_to_nested(mode_workspace: ModeWorkspace) -> ModeWorkspaceNes is_incomplete=mode_workspace.is_incomplete, provenance_type=mode_workspace.provenance_type, home_id=mode_workspace.home_id, - depth=mode_workspace.depth, - immediate_upstream=mode_workspace.immediate_upstream, - immediate_downstream=mode_workspace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -656,6 +608,7 @@ def _mode_workspace_from_nested(nested: ModeWorkspaceNested) -> ModeWorkspace: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -664,9 +617,6 @@ def _mode_workspace_from_nested(nested: ModeWorkspaceNested) -> ModeWorkspace: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mode_workspace_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -739,6 +689,9 @@ def _mode_workspace_from_nested_bytes(data: bytes, serde: Serde) -> ModeWorkspac ModeWorkspace.METRICS = RelationField("metrics") ModeWorkspace.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ModeWorkspace.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ModeWorkspace.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ModeWorkspace.MEANINGS = RelationField("meanings") ModeWorkspace.MODE_COLLECTIONS = RelationField("modeCollections") ModeWorkspace.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/model.py b/pyatlan_v9/model/assets/model.py index 793d402f8..1ce94451c 100644 --- a/pyatlan_v9/model/assets/model.py +++ b/pyatlan_v9/model/assets/model.py @@ -40,8 +40,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .model_related import RelatedModel, RelatedModelAttribute, RelatedModelEntity +from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess @@ -91,6 +92,7 @@ class Model(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -108,6 +110,8 @@ class Model(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Model" + model_name: Union[str, None, UnsetType] = UNSET """Simple name of the model in which this asset exists, or empty if it is itself a data model.""" @@ -199,6 +203,11 @@ class Model(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -254,66 +263,6 @@ class Model(Asset): def __post_init__(self) -> None: self.type_name = "Model" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Model instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Model validation failed: {errors}") - - def minimize(self) -> "Model": - """ - Return a minimal copy of this Model with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Model with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Model instance with only the minimum required fields. - """ - self.validate() - return Model(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModel": - """ - Create a :class:`RelatedModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModel(guid=self.guid) - return RelatedModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -464,6 +413,11 @@ class ModelRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -550,6 +504,7 @@ class ModelNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -647,9 +602,6 @@ def _model_to_nested(model: Model) -> ModelNested: is_incomplete=model.is_incomplete, provenance_type=model.provenance_type, home_id=model.home_id, - depth=model.depth, - immediate_upstream=model.immediate_upstream, - immediate_downstream=model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -679,6 +631,7 @@ def _model_from_nested(nested: ModelNested) -> Model: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -687,9 +640,6 @@ def _model_from_nested(nested: ModelNested) -> Model: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_model_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -762,6 +712,9 @@ def _model_from_nested_bytes(data: bytes, serde: Serde) -> Model: Model.METRICS = RelationField("metrics") Model.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Model.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Model.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Model.MEANINGS = RelationField("meanings") Model.MC_MONITORS = RelationField("mcMonitors") Model.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/model_attribute.py b/pyatlan_v9/model/assets/model_attribute.py index a8ef122c2..bdf7d7de6 100644 --- a/pyatlan_v9/model/assets/model_attribute.py +++ b/pyatlan_v9/model/assets/model_attribute.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import ( RelatedModelAttribute, @@ -110,6 +111,7 @@ class ModelAttribute(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -127,6 +129,8 @@ class ModelAttribute(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModelAttribute" + model_attribute_is_nullable: Union[bool, None, UnsetType] = UNSET """When true, the values in this attribute can be null.""" @@ -270,6 +274,11 @@ class ModelAttribute(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -325,66 +334,6 @@ class ModelAttribute(Asset): def __post_init__(self) -> None: self.type_name = "ModelAttribute" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModelAttribute instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ModelAttribute validation failed: {errors}") - - def minimize(self) -> "ModelAttribute": - """ - Return a minimal copy of this ModelAttribute with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModelAttribute with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModelAttribute instance with only the minimum required fields. - """ - self.validate() - return ModelAttribute(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModelAttribute": - """ - Create a :class:`RelatedModelAttribute` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModelAttribute reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModelAttribute(guid=self.guid) - return RelatedModelAttribute(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -587,6 +536,11 @@ class ModelAttributeRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -681,6 +635,7 @@ class ModelAttributeNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -800,9 +755,6 @@ def _model_attribute_to_nested(model_attribute: ModelAttribute) -> ModelAttribut is_incomplete=model_attribute.is_incomplete, provenance_type=model_attribute.provenance_type, home_id=model_attribute.home_id, - depth=model_attribute.depth, - immediate_upstream=model_attribute.immediate_upstream, - immediate_downstream=model_attribute.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -836,6 +788,7 @@ def _model_attribute_from_nested(nested: ModelAttributeNested) -> ModelAttribute updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -844,9 +797,6 @@ def _model_attribute_from_nested(nested: ModelAttributeNested) -> ModelAttribute is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_model_attribute_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -974,6 +924,9 @@ def _model_attribute_from_nested_bytes(data: bytes, serde: Serde) -> ModelAttrib ModelAttribute.METRICS = RelationField("metrics") ModelAttribute.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ModelAttribute.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ModelAttribute.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ModelAttribute.MEANINGS = RelationField("meanings") ModelAttribute.MC_MONITORS = RelationField("mcMonitors") ModelAttribute.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/model_attribute_association.py b/pyatlan_v9/model/assets/model_attribute_association.py index 62f9455dd..90e88ac33 100644 --- a/pyatlan_v9/model/assets/model_attribute_association.py +++ b/pyatlan_v9/model/assets/model_attribute_association.py @@ -41,12 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .model_related import ( - RelatedModelAttribute, - RelatedModelAttributeAssociation, - RelatedModelEntity, -) +from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess @@ -101,6 +98,7 @@ class ModelAttributeAssociation(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -118,6 +116,8 @@ class ModelAttributeAssociation(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModelAttributeAssociation" + model_attribute_association_to_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the association to which this attribute is related.""" @@ -228,6 +228,11 @@ class ModelAttributeAssociation(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -289,74 +294,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModelAttributeAssociation instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.model_attribute_association_to is UNSET: - errors.append("model_attribute_association_to is required for creation") - if errors: - raise ValueError(f"ModelAttributeAssociation validation failed: {errors}") - - def minimize(self) -> "ModelAttributeAssociation": - """ - Return a minimal copy of this ModelAttributeAssociation with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModelAttributeAssociation with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModelAttributeAssociation instance with only the minimum required fields. - """ - self.validate() - return ModelAttributeAssociation( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedModelAttributeAssociation": - """ - Create a :class:`RelatedModelAttributeAssociation` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModelAttributeAssociation reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModelAttributeAssociation(guid=self.guid) - return RelatedModelAttributeAssociation(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -528,6 +465,11 @@ class ModelAttributeAssociationRelationshipAttributes(AssetRelationshipAttribute ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -618,6 +560,7 @@ class ModelAttributeAssociationNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -741,9 +684,6 @@ def _model_attribute_association_to_nested( is_incomplete=model_attribute_association.is_incomplete, provenance_type=model_attribute_association.provenance_type, home_id=model_attribute_association.home_id, - depth=model_attribute_association.depth, - immediate_upstream=model_attribute_association.immediate_upstream, - immediate_downstream=model_attribute_association.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -779,6 +719,7 @@ def _model_attribute_association_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -787,9 +728,6 @@ def _model_attribute_association_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_model_attribute_association_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -911,6 +849,9 @@ def _model_attribute_association_from_nested_bytes( ModelAttributeAssociation.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +ModelAttributeAssociation.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ModelAttributeAssociation.MEANINGS = RelationField("meanings") ModelAttributeAssociation.MC_MONITORS = RelationField("mcMonitors") ModelAttributeAssociation.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/model_data_model.py b/pyatlan_v9/model/assets/model_data_model.py index a662a2729..1fb4b7094 100644 --- a/pyatlan_v9/model/assets/model_data_model.py +++ b/pyatlan_v9/model/assets/model_data_model.py @@ -40,10 +40,10 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import ( RelatedModelAttribute, - RelatedModelDataModel, RelatedModelEntity, RelatedModelVersion, ) @@ -99,6 +99,7 @@ class ModelDataModel(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -116,6 +117,8 @@ class ModelDataModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModelDataModel" + model_version_count: Union[int, None, UnsetType] = UNSET """Number of versions of the data model.""" @@ -216,6 +219,11 @@ class ModelDataModel(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -271,66 +279,6 @@ class ModelDataModel(Asset): def __post_init__(self) -> None: self.type_name = "ModelDataModel" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModelDataModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ModelDataModel validation failed: {errors}") - - def minimize(self) -> "ModelDataModel": - """ - Return a minimal copy of this ModelDataModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModelDataModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModelDataModel instance with only the minimum required fields. - """ - self.validate() - return ModelDataModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModelDataModel": - """ - Create a :class:`RelatedModelDataModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModelDataModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModelDataModel(guid=self.guid) - return RelatedModelDataModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -490,6 +438,11 @@ class ModelDataModelRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -579,6 +532,7 @@ class ModelDataModelNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -686,9 +640,6 @@ def _model_data_model_to_nested( is_incomplete=model_data_model.is_incomplete, provenance_type=model_data_model.provenance_type, home_id=model_data_model.home_id, - depth=model_data_model.depth, - immediate_upstream=model_data_model.immediate_upstream, - immediate_downstream=model_data_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -722,6 +673,7 @@ def _model_data_model_from_nested(nested: ModelDataModelNested) -> ModelDataMode updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -730,9 +682,6 @@ def _model_data_model_from_nested(nested: ModelDataModelNested) -> ModelDataMode is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_model_data_model_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -824,6 +773,9 @@ def _model_data_model_from_nested_bytes(data: bytes, serde: Serde) -> ModelDataM ModelDataModel.METRICS = RelationField("metrics") ModelDataModel.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ModelDataModel.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ModelDataModel.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ModelDataModel.MEANINGS = RelationField("meanings") ModelDataModel.MC_MONITORS = RelationField("mcMonitors") ModelDataModel.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/model_entity.py b/pyatlan_v9/model/assets/model_entity.py index 0b14eca34..db6d37fed 100644 --- a/pyatlan_v9/model/assets/model_entity.py +++ b/pyatlan_v9/model/assets/model_entity.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import ( RelatedModelAttribute, @@ -111,6 +112,7 @@ class ModelEntity(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -128,6 +130,8 @@ class ModelEntity(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModelEntity" + model_entity_attribute_count: Union[int, None, UnsetType] = UNSET """Number of attributes in the entity.""" @@ -272,6 +276,11 @@ class ModelEntity(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -333,70 +342,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModelEntity instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"ModelEntity validation failed: {errors}") - - def minimize(self) -> "ModelEntity": - """ - Return a minimal copy of this ModelEntity with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModelEntity with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModelEntity instance with only the minimum required fields. - """ - self.validate() - return ModelEntity(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModelEntity": - """ - Create a :class:`RelatedModelEntity` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModelEntity reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModelEntity(guid=self.guid) - return RelatedModelEntity(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -600,6 +545,11 @@ class ModelEntityRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -695,6 +645,7 @@ class ModelEntityNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -806,9 +757,6 @@ def _model_entity_to_nested(model_entity: ModelEntity) -> ModelEntityNested: is_incomplete=model_entity.is_incomplete, provenance_type=model_entity.provenance_type, home_id=model_entity.home_id, - depth=model_entity.depth, - immediate_upstream=model_entity.immediate_upstream, - immediate_downstream=model_entity.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -840,6 +788,7 @@ def _model_entity_from_nested(nested: ModelEntityNested) -> ModelEntity: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -848,9 +797,6 @@ def _model_entity_from_nested(nested: ModelEntityNested) -> ModelEntity: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_model_entity_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -968,6 +914,9 @@ def _model_entity_from_nested_bytes(data: bytes, serde: Serde) -> ModelEntity: ModelEntity.METRICS = RelationField("metrics") ModelEntity.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ModelEntity.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ModelEntity.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ModelEntity.MEANINGS = RelationField("meanings") ModelEntity.MC_MONITORS = RelationField("mcMonitors") ModelEntity.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/model_entity_association.py b/pyatlan_v9/model/assets/model_entity_association.py index 5d1d16f3e..f90e1ba09 100644 --- a/pyatlan_v9/model/assets/model_entity_association.py +++ b/pyatlan_v9/model/assets/model_entity_association.py @@ -41,12 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm -from .model_related import ( - RelatedModelAttribute, - RelatedModelEntity, - RelatedModelEntityAssociation, -) +from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess @@ -108,6 +105,7 @@ class ModelEntityAssociation(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -125,6 +123,8 @@ class ModelEntityAssociation(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModelEntityAssociation" + model_entity_association_cardinality: Union[str, None, UnsetType] = UNSET """(Deprecated) Cardinality of the data entity association.""" @@ -252,6 +252,11 @@ class ModelEntityAssociation(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -315,78 +320,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModelEntityAssociation instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.model_entity_association_to is UNSET: - errors.append("model_entity_association_to is required for creation") - if self.model_entity_name is UNSET: - errors.append("model_entity_name is required for creation") - if self.model_entity_qualified_name is UNSET: - errors.append("model_entity_qualified_name is required for creation") - if errors: - raise ValueError(f"ModelEntityAssociation validation failed: {errors}") - - def minimize(self) -> "ModelEntityAssociation": - """ - Return a minimal copy of this ModelEntityAssociation with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModelEntityAssociation with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModelEntityAssociation instance with only the minimum required fields. - """ - self.validate() - return ModelEntityAssociation( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedModelEntityAssociation": - """ - Create a :class:`RelatedModelEntityAssociation` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModelEntityAssociation reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModelEntityAssociation(guid=self.guid) - return RelatedModelEntityAssociation(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -575,6 +508,11 @@ class ModelEntityAssociationRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -665,6 +603,7 @@ class ModelEntityAssociationNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -822,9 +761,6 @@ def _model_entity_association_to_nested( is_incomplete=model_entity_association.is_incomplete, provenance_type=model_entity_association.provenance_type, home_id=model_entity_association.home_id, - depth=model_entity_association.depth, - immediate_upstream=model_entity_association.immediate_upstream, - immediate_downstream=model_entity_association.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -860,6 +796,7 @@ def _model_entity_association_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -868,9 +805,6 @@ def _model_entity_association_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_model_entity_association_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1007,6 +941,9 @@ def _model_entity_association_from_nested_bytes( ModelEntityAssociation.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +ModelEntityAssociation.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ModelEntityAssociation.MEANINGS = RelationField("meanings") ModelEntityAssociation.MC_MONITORS = RelationField("mcMonitors") ModelEntityAssociation.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/model_version.py b/pyatlan_v9/model/assets/model_version.py index 6387beb2b..873d71ec0 100644 --- a/pyatlan_v9/model/assets/model_version.py +++ b/pyatlan_v9/model/assets/model_version.py @@ -41,12 +41,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import ( RelatedModelAttribute, RelatedModelDataModel, RelatedModelEntity, - RelatedModelVersion, ) from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject @@ -100,6 +100,7 @@ class ModelVersion(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -117,6 +118,8 @@ class ModelVersion(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ModelVersion" + model_version_entity_count: Union[int, None, UnsetType] = UNSET """Number of entities in the version.""" @@ -217,6 +220,11 @@ class ModelVersion(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -278,72 +286,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ModelVersion instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.model_data_model is UNSET: - errors.append("model_data_model is required for creation") - if errors: - raise ValueError(f"ModelVersion validation failed: {errors}") - - def minimize(self) -> "ModelVersion": - """ - Return a minimal copy of this ModelVersion with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ModelVersion with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ModelVersion instance with only the minimum required fields. - """ - self.validate() - return ModelVersion(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedModelVersion": - """ - Create a :class:`RelatedModelVersion` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedModelVersion reference to this asset. - """ - if self.guid is not UNSET: - return RelatedModelVersion(guid=self.guid) - return RelatedModelVersion(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -503,6 +445,11 @@ class ModelVersionRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -593,6 +540,7 @@ class ModelVersionNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -694,9 +642,6 @@ def _model_version_to_nested(model_version: ModelVersion) -> ModelVersionNested: is_incomplete=model_version.is_incomplete, provenance_type=model_version.provenance_type, home_id=model_version.home_id, - depth=model_version.depth, - immediate_upstream=model_version.immediate_upstream, - immediate_downstream=model_version.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -730,6 +675,7 @@ def _model_version_from_nested(nested: ModelVersionNested) -> ModelVersion: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -738,9 +684,6 @@ def _model_version_from_nested(nested: ModelVersionNested) -> ModelVersion: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_model_version_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -828,6 +771,9 @@ def _model_version_from_nested_bytes(data: bytes, serde: Serde) -> ModelVersion: ModelVersion.METRICS = RelationField("metrics") ModelVersion.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ModelVersion.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ModelVersion.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ModelVersion.MEANINGS = RelationField("meanings") ModelVersion.MC_MONITORS = RelationField("mcMonitors") ModelVersion.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/mongo_db.py b/pyatlan_v9/model/assets/mongo_db.py index 09386c3e7..4868a3aa2 100644 --- a/pyatlan_v9/model/assets/mongo_db.py +++ b/pyatlan_v9/model/assets/mongo_db.py @@ -41,9 +41,9 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity -from .mongo_db_related import RelatedMongoDB from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess @@ -80,6 +80,7 @@ class MongoDB(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +98,8 @@ class MongoDB(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MongoDB" + no_sql_schema_definition: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="noSQLSchemaDefinition" ) @@ -151,6 +154,11 @@ class MongoDB(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -206,66 +214,6 @@ class MongoDB(Asset): def __post_init__(self) -> None: self.type_name = "MongoDB" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MongoDB instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MongoDB validation failed: {errors}") - - def minimize(self) -> "MongoDB": - """ - Return a minimal copy of this MongoDB with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MongoDB with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MongoDB instance with only the minimum required fields. - """ - self.validate() - return MongoDB(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMongoDB": - """ - Create a :class:`RelatedMongoDB` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMongoDB reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMongoDB(guid=self.guid) - return RelatedMongoDB(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -379,6 +327,11 @@ class MongoDBRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -465,6 +418,7 @@ class MongoDBNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -532,9 +486,6 @@ def _mongo_db_to_nested(mongo_db: MongoDB) -> MongoDBNested: is_incomplete=mongo_db.is_incomplete, provenance_type=mongo_db.provenance_type, home_id=mongo_db.home_id, - depth=mongo_db.depth, - immediate_upstream=mongo_db.immediate_upstream, - immediate_downstream=mongo_db.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -564,6 +515,7 @@ def _mongo_db_from_nested(nested: MongoDBNested) -> MongoDB: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -572,9 +524,6 @@ def _mongo_db_from_nested(nested: MongoDBNested) -> MongoDB: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mongo_db_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -615,6 +564,9 @@ def _mongo_db_from_nested_bytes(data: bytes, serde: Serde) -> MongoDB: MongoDB.METRICS = RelationField("metrics") MongoDB.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") MongoDB.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +MongoDB.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MongoDB.MEANINGS = RelationField("meanings") MongoDB.MC_MONITORS = RelationField("mcMonitors") MongoDB.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/mongo_db_collection.py b/pyatlan_v9/model/assets/mongo_db_collection.py index 45289007c..0174a4150 100644 --- a/pyatlan_v9/model/assets/mongo_db_collection.py +++ b/pyatlan_v9/model/assets/mongo_db_collection.py @@ -48,9 +48,10 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity -from .mongo_db_related import RelatedMongoDBCollection, RelatedMongoDBDatabase +from .mongo_db_related import RelatedMongoDBDatabase from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess @@ -84,16 +85,16 @@ class MongoDBCollection(Asset): """ MONGO_DB_COLLECTION_SUBTYPE: ClassVar[Any] = None - MONGO_DB_COLLECTION_IS_CAPPED: ClassVar[Any] = None + MONGO_DB_IS_CAPPED: ClassVar[Any] = None MONGO_DB_COLLECTION_TIME_FIELD: ClassVar[Any] = None - MONGO_DB_COLLECTION_TIME_GRANULARITY: ClassVar[Any] = None - MONGO_DB_COLLECTION_EXPIRE_AFTER_SECONDS: ClassVar[Any] = None - MONGO_DB_COLLECTION_MAXIMUM_DOCUMENT_COUNT: ClassVar[Any] = None - MONGO_DB_COLLECTION_MAX_SIZE: ClassVar[Any] = None - MONGO_DB_COLLECTION_NUM_ORPHAN_DOCS: ClassVar[Any] = None - MONGO_DB_COLLECTION_NUM_INDEXES: ClassVar[Any] = None - MONGO_DB_COLLECTION_TOTAL_INDEX_SIZE: ClassVar[Any] = None - MONGO_DB_COLLECTION_AVERAGE_OBJECT_SIZE: ClassVar[Any] = None + MONGO_DB_TIME_GRANULARITY: ClassVar[Any] = None + MONGO_DB_EXPIRE_AFTER_SECONDS: ClassVar[Any] = None + MONGO_DB_MAXIMUM_DOCUMENT_COUNT: ClassVar[Any] = None + MONGO_DB_MAX_SIZE: ClassVar[Any] = None + MONGO_DB_NUM_ORPHAN_DOCS: ClassVar[Any] = None + MONGO_DB_NUM_INDEXES: ClassVar[Any] = None + MONGO_DB_TOTAL_INDEX_SIZE: ClassVar[Any] = None + MONGO_DB_AVERAGE_OBJECT_SIZE: ClassVar[Any] = None MONGO_DB_COLLECTION_SCHEMA_DEFINITION: ClassVar[Any] = None NO_SQL_SCHEMA_DEFINITION: ClassVar[Any] = None CATALOG_DATASET_GUID: ClassVar[Any] = None @@ -168,6 +169,7 @@ class MongoDBCollection(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MONGO_DB_DATABASE: ClassVar[Any] = None MONGO_DB_COLUMNS: ClassVar[Any] = None @@ -197,13 +199,15 @@ class MongoDBCollection(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MongoDBCollection" + mongo_db_collection_subtype: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="mongoDBCollectionSubtype" ) """Subtype of a MongoDB collection, for example: Capped, Time Series, etc.""" - mongo_db_collection_is_capped: Union[bool, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionIsCapped" + mongo_db_is_capped: Union[bool, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBIsCapped" ) """Whether the collection is capped (true) or not (false).""" @@ -212,43 +216,43 @@ class MongoDBCollection(Asset): ) """Name of the field containing the date in each time series document.""" - mongo_db_collection_time_granularity: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionTimeGranularity" + mongo_db_time_granularity: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBTimeGranularity" ) """Closest match to the time span between consecutive incoming measurements.""" - mongo_db_collection_expire_after_seconds: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionExpireAfterSeconds") + mongo_db_expire_after_seconds: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBExpireAfterSeconds" ) """Seconds after which documents in a time series collection or clustered collection expire.""" - mongo_db_collection_maximum_document_count: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionMaximumDocumentCount") + mongo_db_maximum_document_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBMaximumDocumentCount" ) """Maximum number of documents allowed in a capped collection.""" - mongo_db_collection_max_size: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionMaxSize" + mongo_db_max_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBMaxSize" ) """Maximum size allowed in a capped collection.""" - mongo_db_collection_num_orphan_docs: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionNumOrphanDocs" + mongo_db_num_orphan_docs: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBNumOrphanDocs" ) """Number of orphaned documents in the collection.""" - mongo_db_collection_num_indexes: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionNumIndexes" + mongo_db_num_indexes: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBNumIndexes" ) """Number of indexes on the collection.""" - mongo_db_collection_total_index_size: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionTotalIndexSize" + mongo_db_total_index_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBTotalIndexSize" ) """Total size of all indexes.""" - mongo_db_collection_average_object_size: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionAverageObjectSize") + mongo_db_average_object_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBAverageObjectSize" ) """Average size of an object in the collection.""" @@ -486,6 +490,11 @@ class MongoDBCollection(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -595,76 +604,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MongoDBCollection instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.mongo_db_database is UNSET: - errors.append("mongo_db_database is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"MongoDBCollection validation failed: {errors}") - - def minimize(self) -> "MongoDBCollection": - """ - Return a minimal copy of this MongoDBCollection with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MongoDBCollection with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MongoDBCollection instance with only the minimum required fields. - """ - self.validate() - return MongoDBCollection(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMongoDBCollection": - """ - Create a :class:`RelatedMongoDBCollection` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMongoDBCollection reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMongoDBCollection(guid=self.guid) - return RelatedMongoDBCollection(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -727,8 +666,8 @@ class MongoDBCollectionAttributes(AssetAttributes): ) """Subtype of a MongoDB collection, for example: Capped, Time Series, etc.""" - mongo_db_collection_is_capped: Union[bool, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionIsCapped" + mongo_db_is_capped: Union[bool, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBIsCapped" ) """Whether the collection is capped (true) or not (false).""" @@ -737,43 +676,43 @@ class MongoDBCollectionAttributes(AssetAttributes): ) """Name of the field containing the date in each time series document.""" - mongo_db_collection_time_granularity: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionTimeGranularity" + mongo_db_time_granularity: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBTimeGranularity" ) """Closest match to the time span between consecutive incoming measurements.""" - mongo_db_collection_expire_after_seconds: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionExpireAfterSeconds") + mongo_db_expire_after_seconds: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBExpireAfterSeconds" ) """Seconds after which documents in a time series collection or clustered collection expire.""" - mongo_db_collection_maximum_document_count: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionMaximumDocumentCount") + mongo_db_maximum_document_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBMaximumDocumentCount" ) """Maximum number of documents allowed in a capped collection.""" - mongo_db_collection_max_size: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionMaxSize" + mongo_db_max_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBMaxSize" ) """Maximum size allowed in a capped collection.""" - mongo_db_collection_num_orphan_docs: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionNumOrphanDocs" + mongo_db_num_orphan_docs: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBNumOrphanDocs" ) """Number of orphaned documents in the collection.""" - mongo_db_collection_num_indexes: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionNumIndexes" + mongo_db_num_indexes: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBNumIndexes" ) """Number of indexes on the collection.""" - mongo_db_collection_total_index_size: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionTotalIndexSize" + mongo_db_total_index_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBTotalIndexSize" ) """Total size of all indexes.""" - mongo_db_collection_average_object_size: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionAverageObjectSize") + mongo_db_average_object_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBAverageObjectSize" ) """Average size of an object in the collection.""" @@ -1015,6 +954,11 @@ class MongoDBCollectionRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1157,6 +1101,7 @@ class MongoDBCollectionNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mongo_db_database", "mongo_db_columns", @@ -1194,26 +1139,16 @@ def _populate_mongo_db_collection_attrs( """Populate MongoDBCollection-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) attrs.mongo_db_collection_subtype = obj.mongo_db_collection_subtype - attrs.mongo_db_collection_is_capped = obj.mongo_db_collection_is_capped + attrs.mongo_db_is_capped = obj.mongo_db_is_capped attrs.mongo_db_collection_time_field = obj.mongo_db_collection_time_field - attrs.mongo_db_collection_time_granularity = ( - obj.mongo_db_collection_time_granularity - ) - attrs.mongo_db_collection_expire_after_seconds = ( - obj.mongo_db_collection_expire_after_seconds - ) - attrs.mongo_db_collection_maximum_document_count = ( - obj.mongo_db_collection_maximum_document_count - ) - attrs.mongo_db_collection_max_size = obj.mongo_db_collection_max_size - attrs.mongo_db_collection_num_orphan_docs = obj.mongo_db_collection_num_orphan_docs - attrs.mongo_db_collection_num_indexes = obj.mongo_db_collection_num_indexes - attrs.mongo_db_collection_total_index_size = ( - obj.mongo_db_collection_total_index_size - ) - attrs.mongo_db_collection_average_object_size = ( - obj.mongo_db_collection_average_object_size - ) + attrs.mongo_db_time_granularity = obj.mongo_db_time_granularity + attrs.mongo_db_expire_after_seconds = obj.mongo_db_expire_after_seconds + attrs.mongo_db_maximum_document_count = obj.mongo_db_maximum_document_count + attrs.mongo_db_max_size = obj.mongo_db_max_size + attrs.mongo_db_num_orphan_docs = obj.mongo_db_num_orphan_docs + attrs.mongo_db_num_indexes = obj.mongo_db_num_indexes + attrs.mongo_db_total_index_size = obj.mongo_db_total_index_size + attrs.mongo_db_average_object_size = obj.mongo_db_average_object_size attrs.mongo_db_collection_schema_definition = ( obj.mongo_db_collection_schema_definition ) @@ -1280,28 +1215,16 @@ def _extract_mongo_db_collection_attrs(attrs: MongoDBCollectionAttributes) -> di """Extract all MongoDBCollection attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) result["mongo_db_collection_subtype"] = attrs.mongo_db_collection_subtype - result["mongo_db_collection_is_capped"] = attrs.mongo_db_collection_is_capped + result["mongo_db_is_capped"] = attrs.mongo_db_is_capped result["mongo_db_collection_time_field"] = attrs.mongo_db_collection_time_field - result["mongo_db_collection_time_granularity"] = ( - attrs.mongo_db_collection_time_granularity - ) - result["mongo_db_collection_expire_after_seconds"] = ( - attrs.mongo_db_collection_expire_after_seconds - ) - result["mongo_db_collection_maximum_document_count"] = ( - attrs.mongo_db_collection_maximum_document_count - ) - result["mongo_db_collection_max_size"] = attrs.mongo_db_collection_max_size - result["mongo_db_collection_num_orphan_docs"] = ( - attrs.mongo_db_collection_num_orphan_docs - ) - result["mongo_db_collection_num_indexes"] = attrs.mongo_db_collection_num_indexes - result["mongo_db_collection_total_index_size"] = ( - attrs.mongo_db_collection_total_index_size - ) - result["mongo_db_collection_average_object_size"] = ( - attrs.mongo_db_collection_average_object_size - ) + result["mongo_db_time_granularity"] = attrs.mongo_db_time_granularity + result["mongo_db_expire_after_seconds"] = attrs.mongo_db_expire_after_seconds + result["mongo_db_maximum_document_count"] = attrs.mongo_db_maximum_document_count + result["mongo_db_max_size"] = attrs.mongo_db_max_size + result["mongo_db_num_orphan_docs"] = attrs.mongo_db_num_orphan_docs + result["mongo_db_num_indexes"] = attrs.mongo_db_num_indexes + result["mongo_db_total_index_size"] = attrs.mongo_db_total_index_size + result["mongo_db_average_object_size"] = attrs.mongo_db_average_object_size result["mongo_db_collection_schema_definition"] = ( attrs.mongo_db_collection_schema_definition ) @@ -1408,9 +1331,6 @@ def _mongo_db_collection_to_nested( is_incomplete=mongo_db_collection.is_incomplete, provenance_type=mongo_db_collection.provenance_type, home_id=mongo_db_collection.home_id, - depth=mongo_db_collection.depth, - immediate_upstream=mongo_db_collection.immediate_upstream, - immediate_downstream=mongo_db_collection.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1446,6 +1366,7 @@ def _mongo_db_collection_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1454,9 +1375,6 @@ def _mongo_db_collection_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mongo_db_collection_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1494,35 +1412,33 @@ def _mongo_db_collection_from_nested_bytes( "mongoDBCollectionSubtype", "mongoDBCollectionSubtype.text", ) -MongoDBCollection.MONGO_DB_COLLECTION_IS_CAPPED = BooleanField( - "mongoDBCollectionIsCapped", "mongoDBCollectionIsCapped" +MongoDBCollection.MONGO_DB_IS_CAPPED = BooleanField( + "mongoDBIsCapped", "mongoDBIsCapped" ) MongoDBCollection.MONGO_DB_COLLECTION_TIME_FIELD = KeywordField( "mongoDBCollectionTimeField", "mongoDBCollectionTimeField" ) -MongoDBCollection.MONGO_DB_COLLECTION_TIME_GRANULARITY = KeywordField( - "mongoDBCollectionTimeGranularity", "mongoDBCollectionTimeGranularity" +MongoDBCollection.MONGO_DB_TIME_GRANULARITY = KeywordField( + "mongoDBTimeGranularity", "mongoDBTimeGranularity" ) -MongoDBCollection.MONGO_DB_COLLECTION_EXPIRE_AFTER_SECONDS = NumericField( - "mongoDBCollectionExpireAfterSeconds", "mongoDBCollectionExpireAfterSeconds" +MongoDBCollection.MONGO_DB_EXPIRE_AFTER_SECONDS = NumericField( + "mongoDBExpireAfterSeconds", "mongoDBExpireAfterSeconds" ) -MongoDBCollection.MONGO_DB_COLLECTION_MAXIMUM_DOCUMENT_COUNT = NumericField( - "mongoDBCollectionMaximumDocumentCount", "mongoDBCollectionMaximumDocumentCount" +MongoDBCollection.MONGO_DB_MAXIMUM_DOCUMENT_COUNT = NumericField( + "mongoDBMaximumDocumentCount", "mongoDBMaximumDocumentCount" ) -MongoDBCollection.MONGO_DB_COLLECTION_MAX_SIZE = NumericField( - "mongoDBCollectionMaxSize", "mongoDBCollectionMaxSize" +MongoDBCollection.MONGO_DB_MAX_SIZE = NumericField("mongoDBMaxSize", "mongoDBMaxSize") +MongoDBCollection.MONGO_DB_NUM_ORPHAN_DOCS = NumericField( + "mongoDBNumOrphanDocs", "mongoDBNumOrphanDocs" ) -MongoDBCollection.MONGO_DB_COLLECTION_NUM_ORPHAN_DOCS = NumericField( - "mongoDBCollectionNumOrphanDocs", "mongoDBCollectionNumOrphanDocs" +MongoDBCollection.MONGO_DB_NUM_INDEXES = NumericField( + "mongoDBNumIndexes", "mongoDBNumIndexes" ) -MongoDBCollection.MONGO_DB_COLLECTION_NUM_INDEXES = NumericField( - "mongoDBCollectionNumIndexes", "mongoDBCollectionNumIndexes" +MongoDBCollection.MONGO_DB_TOTAL_INDEX_SIZE = NumericField( + "mongoDBTotalIndexSize", "mongoDBTotalIndexSize" ) -MongoDBCollection.MONGO_DB_COLLECTION_TOTAL_INDEX_SIZE = NumericField( - "mongoDBCollectionTotalIndexSize", "mongoDBCollectionTotalIndexSize" -) -MongoDBCollection.MONGO_DB_COLLECTION_AVERAGE_OBJECT_SIZE = NumericField( - "mongoDBCollectionAverageObjectSize", "mongoDBCollectionAverageObjectSize" +MongoDBCollection.MONGO_DB_AVERAGE_OBJECT_SIZE = NumericField( + "mongoDBAverageObjectSize", "mongoDBAverageObjectSize" ) MongoDBCollection.MONGO_DB_COLLECTION_SCHEMA_DEFINITION = KeywordField( "mongoDBCollectionSchemaDefinition", "mongoDBCollectionSchemaDefinition" @@ -1667,6 +1583,9 @@ def _mongo_db_collection_from_nested_bytes( MongoDBCollection.DBT_SOURCES = RelationField("dbtSources") MongoDBCollection.SQL_DBT_SOURCES = RelationField("sqlDBTSources") MongoDBCollection.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +MongoDBCollection.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MongoDBCollection.MEANINGS = RelationField("meanings") MongoDBCollection.MONGO_DB_DATABASE = RelationField("mongoDBDatabase") MongoDBCollection.MONGO_DB_COLUMNS = RelationField("mongoDBColumns") diff --git a/pyatlan_v9/model/assets/mongo_db_database.py b/pyatlan_v9/model/assets/mongo_db_database.py index 094004b9d..da246a1f3 100644 --- a/pyatlan_v9/model/assets/mongo_db_database.py +++ b/pyatlan_v9/model/assets/mongo_db_database.py @@ -48,9 +48,10 @@ RelatedDbtTest, ) from .fabric_related import RelatedFabricWorkspace +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity -from .mongo_db_related import RelatedMongoDBCollection, RelatedMongoDBDatabase +from .mongo_db_related import RelatedMongoDBCollection from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess @@ -126,6 +127,7 @@ class MongoDBDatabase(Asset): SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None FABRIC_WORKSPACE: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MONGO_DB_COLLECTIONS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -149,6 +151,8 @@ class MongoDBDatabase(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MongoDBDatabase" + mongo_db_database_collection_count: Union[int, None, UnsetType] = msgspec.field( default=UNSET, name="mongoDBDatabaseCollectionCount" ) @@ -308,6 +312,11 @@ class MongoDBDatabase(Asset): fabric_workspace: Union[RelatedFabricWorkspace, None, UnsetType] = UNSET """Workspace containing the database.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -391,66 +400,6 @@ class MongoDBDatabase(Asset): def __post_init__(self) -> None: self.type_name = "MongoDBDatabase" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MongoDBDatabase instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MongoDBDatabase validation failed: {errors}") - - def minimize(self) -> "MongoDBDatabase": - """ - Return a minimal copy of this MongoDBDatabase with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MongoDBDatabase with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MongoDBDatabase instance with only the minimum required fields. - """ - self.validate() - return MongoDBDatabase(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMongoDBDatabase": - """ - Create a :class:`RelatedMongoDBDatabase` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMongoDBDatabase reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMongoDBDatabase(guid=self.guid) - return RelatedMongoDBDatabase(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -671,6 +620,11 @@ class MongoDBDatabaseRelationshipAttributes(AssetRelationshipAttributes): fabric_workspace: Union[RelatedFabricWorkspace, None, UnsetType] = UNSET """Workspace containing the database.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -794,6 +748,7 @@ class MongoDBDatabaseNested(AssetNested): "sql_dbt_sources", "dbt_seed_assets", "fabric_workspace", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mongo_db_collections", "mc_monitors", @@ -941,9 +896,6 @@ def _mongo_db_database_to_nested( is_incomplete=mongo_db_database.is_incomplete, provenance_type=mongo_db_database.provenance_type, home_id=mongo_db_database.home_id, - depth=mongo_db_database.depth, - immediate_upstream=mongo_db_database.immediate_upstream, - immediate_downstream=mongo_db_database.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -977,6 +929,7 @@ def _mongo_db_database_from_nested(nested: MongoDBDatabaseNested) -> MongoDBData updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -985,9 +938,6 @@ def _mongo_db_database_from_nested(nested: MongoDBDatabaseNested) -> MongoDBData is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_mongo_db_database_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1105,6 +1055,9 @@ def _mongo_db_database_from_nested_bytes(data: bytes, serde: Serde) -> MongoDBDa MongoDBDatabase.SQL_DBT_SOURCES = RelationField("sqlDBTSources") MongoDBDatabase.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") MongoDBDatabase.FABRIC_WORKSPACE = RelationField("fabricWorkspace") +MongoDBDatabase.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MongoDBDatabase.MEANINGS = RelationField("meanings") MongoDBDatabase.MONGO_DB_COLLECTIONS = RelationField("mongoDBCollections") MongoDBDatabase.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/mongo_db_related.py b/pyatlan_v9/model/assets/mongo_db_related.py index 52354d7c1..3dce83c5e 100644 --- a/pyatlan_v9/model/assets/mongo_db_related.py +++ b/pyatlan_v9/model/assets/mongo_db_related.py @@ -76,8 +76,8 @@ class RelatedMongoDBCollection(RelatedMongoDB): ) """Subtype of a MongoDB collection, for example: Capped, Time Series, etc.""" - mongo_db_collection_is_capped: Union[bool, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionIsCapped" + mongo_db_is_capped: Union[bool, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBIsCapped" ) """Whether the collection is capped (true) or not (false).""" @@ -86,43 +86,43 @@ class RelatedMongoDBCollection(RelatedMongoDB): ) """Name of the field containing the date in each time series document.""" - mongo_db_collection_time_granularity: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionTimeGranularity" + mongo_db_time_granularity: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBTimeGranularity" ) """Closest match to the time span between consecutive incoming measurements.""" - mongo_db_collection_expire_after_seconds: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionExpireAfterSeconds") + mongo_db_expire_after_seconds: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBExpireAfterSeconds" ) """Seconds after which documents in a time series collection or clustered collection expire.""" - mongo_db_collection_maximum_document_count: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionMaximumDocumentCount") + mongo_db_maximum_document_count: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBMaximumDocumentCount" ) """Maximum number of documents allowed in a capped collection.""" - mongo_db_collection_max_size: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionMaxSize" + mongo_db_max_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBMaxSize" ) """Maximum size allowed in a capped collection.""" - mongo_db_collection_num_orphan_docs: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionNumOrphanDocs" + mongo_db_num_orphan_docs: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBNumOrphanDocs" ) """Number of orphaned documents in the collection.""" - mongo_db_collection_num_indexes: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionNumIndexes" + mongo_db_num_indexes: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBNumIndexes" ) """Number of indexes on the collection.""" - mongo_db_collection_total_index_size: Union[int, None, UnsetType] = msgspec.field( - default=UNSET, name="mongoDBCollectionTotalIndexSize" + mongo_db_total_index_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBTotalIndexSize" ) """Total size of all indexes.""" - mongo_db_collection_average_object_size: Union[int, None, UnsetType] = ( - msgspec.field(default=UNSET, name="mongoDBCollectionAverageObjectSize") + mongo_db_average_object_size: Union[int, None, UnsetType] = msgspec.field( + default=UNSET, name="mongoDBAverageObjectSize" ) """Average size of an object in the collection.""" diff --git a/pyatlan_v9/model/assets/monte_carlo.py b/pyatlan_v9/model/assets/monte_carlo.py index 89f70616b..41f5800ac 100644 --- a/pyatlan_v9/model/assets/monte_carlo.py +++ b/pyatlan_v9/model/assets/monte_carlo.py @@ -40,9 +40,10 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity -from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor, RelatedMonteCarlo +from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable @@ -80,6 +81,7 @@ class MonteCarlo(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +99,8 @@ class MonteCarlo(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MonteCarlo" + mc_labels: Union[List[str], None, UnsetType] = UNSET """List of labels for this Monte Carlo asset.""" @@ -155,6 +159,11 @@ class MonteCarlo(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -210,66 +219,6 @@ class MonteCarlo(Asset): def __post_init__(self) -> None: self.type_name = "MonteCarlo" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MonteCarlo instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MonteCarlo validation failed: {errors}") - - def minimize(self) -> "MonteCarlo": - """ - Return a minimal copy of this MonteCarlo with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MonteCarlo with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MonteCarlo instance with only the minimum required fields. - """ - self.validate() - return MonteCarlo(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedMonteCarlo": - """ - Create a :class:`RelatedMonteCarlo` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMonteCarlo reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMonteCarlo(guid=self.guid) - return RelatedMonteCarlo(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -387,6 +336,11 @@ class MonteCarloRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -473,6 +427,7 @@ class MonteCarloNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -544,9 +499,6 @@ def _monte_carlo_to_nested(monte_carlo: MonteCarlo) -> MonteCarloNested: is_incomplete=monte_carlo.is_incomplete, provenance_type=monte_carlo.provenance_type, home_id=monte_carlo.home_id, - depth=monte_carlo.depth, - immediate_upstream=monte_carlo.immediate_upstream, - immediate_downstream=monte_carlo.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -578,6 +530,7 @@ def _monte_carlo_from_nested(nested: MonteCarloNested) -> MonteCarlo: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -586,9 +539,6 @@ def _monte_carlo_from_nested(nested: MonteCarloNested) -> MonteCarlo: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_monte_carlo_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -639,6 +589,9 @@ def _monte_carlo_from_nested_bytes(data: bytes, serde: Serde) -> MonteCarlo: MonteCarlo.METRICS = RelationField("metrics") MonteCarlo.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") MonteCarlo.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +MonteCarlo.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MonteCarlo.MEANINGS = RelationField("meanings") MonteCarlo.MC_MONITORS = RelationField("mcMonitors") MonteCarlo.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/multi_dimensional_dataset.py b/pyatlan_v9/model/assets/multi_dimensional_dataset.py index 53b72b76c..9f46af855 100644 --- a/pyatlan_v9/model/assets/multi_dimensional_dataset.py +++ b/pyatlan_v9/model/assets/multi_dimensional_dataset.py @@ -37,10 +37,11 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .cube_related import RelatedCubeDimension, RelatedMultiDimensionalDataset +from .cube_related import RelatedCubeDimension from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -84,6 +85,7 @@ class MultiDimensionalDataset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +104,8 @@ class MultiDimensionalDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "MultiDimensionalDataset" + cube_name: Union[str, None, UnsetType] = UNSET """Simple name of the cube in which this asset exists, or empty if it is itself a cube.""" @@ -169,6 +173,11 @@ class MultiDimensionalDataset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -227,68 +236,6 @@ class MultiDimensionalDataset(Asset): def __post_init__(self) -> None: self.type_name = "MultiDimensionalDataset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this MultiDimensionalDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"MultiDimensionalDataset validation failed: {errors}") - - def minimize(self) -> "MultiDimensionalDataset": - """ - Return a minimal copy of this MultiDimensionalDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new MultiDimensionalDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new MultiDimensionalDataset instance with only the minimum required fields. - """ - self.validate() - return MultiDimensionalDataset( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedMultiDimensionalDataset": - """ - Create a :class:`RelatedMultiDimensionalDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedMultiDimensionalDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedMultiDimensionalDataset(guid=self.guid) - return RelatedMultiDimensionalDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -417,6 +364,11 @@ class MultiDimensionalDatasetRelationshipAttributes(AssetRelationshipAttributes) ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -508,6 +460,7 @@ class MultiDimensionalDatasetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -594,9 +547,6 @@ def _multi_dimensional_dataset_to_nested( is_incomplete=multi_dimensional_dataset.is_incomplete, provenance_type=multi_dimensional_dataset.provenance_type, home_id=multi_dimensional_dataset.home_id, - depth=multi_dimensional_dataset.depth, - immediate_upstream=multi_dimensional_dataset.immediate_upstream, - immediate_downstream=multi_dimensional_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -632,6 +582,7 @@ def _multi_dimensional_dataset_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -640,9 +591,6 @@ def _multi_dimensional_dataset_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_multi_dimensional_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -722,6 +670,9 @@ def _multi_dimensional_dataset_from_nested_bytes( MultiDimensionalDataset.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +MultiDimensionalDataset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) MultiDimensionalDataset.MEANINGS = RelationField("meanings") MultiDimensionalDataset.MC_MONITORS = RelationField("mcMonitors") MultiDimensionalDataset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/namespace.py b/pyatlan_v9/model/assets/namespace.py index 32e92b847..91fcb107b 100644 --- a/pyatlan_v9/model/assets/namespace.py +++ b/pyatlan_v9/model/assets/namespace.py @@ -39,9 +39,10 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor -from .namespace_related import RelatedFolder, RelatedNamespace +from .namespace_related import RelatedFolder from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -69,6 +70,7 @@ class Namespace(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -82,6 +84,8 @@ class Namespace(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Namespace" + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET """Checks that run on this asset.""" @@ -114,6 +118,11 @@ class Namespace(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -157,66 +166,6 @@ class Namespace(Asset): def __post_init__(self) -> None: self.type_name = "Namespace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Namespace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Namespace validation failed: {errors}") - - def minimize(self) -> "Namespace": - """ - Return a minimal copy of this Namespace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Namespace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Namespace instance with only the minimum required fields. - """ - self.validate() - return Namespace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedNamespace": - """ - Create a :class:`RelatedNamespace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedNamespace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedNamespace(guid=self.guid) - return RelatedNamespace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -310,6 +259,11 @@ class NamespaceRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -380,6 +334,7 @@ class NamespaceNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -438,9 +393,6 @@ def _namespace_to_nested(namespace: Namespace) -> NamespaceNested: is_incomplete=namespace.is_incomplete, provenance_type=namespace.provenance_type, home_id=namespace.home_id, - depth=namespace.depth, - immediate_upstream=namespace.immediate_upstream, - immediate_downstream=namespace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -472,6 +424,7 @@ def _namespace_from_nested(nested: NamespaceNested) -> Namespace: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -480,9 +433,6 @@ def _namespace_from_nested(nested: NamespaceNested) -> Namespace: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_namespace_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -515,6 +465,9 @@ def _namespace_from_nested_bytes(data: bytes, serde: Serde) -> Namespace: Namespace.METRICS = RelationField("metrics") Namespace.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Namespace.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Namespace.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Namespace.MEANINGS = RelationField("meanings") Namespace.MC_MONITORS = RelationField("mcMonitors") Namespace.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/no_sql.py b/pyatlan_v9/model/assets/no_sql.py index d3371dde8..531b6f8cd 100644 --- a/pyatlan_v9/model/assets/no_sql.py +++ b/pyatlan_v9/model/assets/no_sql.py @@ -38,10 +38,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .catalog_related import RelatedNoSQL from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -80,6 +80,7 @@ class NoSQL(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +98,8 @@ class NoSQL(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "NoSQL" + no_sql_schema_definition: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="noSQLSchemaDefinition" ) @@ -151,6 +154,11 @@ class NoSQL(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -206,66 +214,6 @@ class NoSQL(Asset): def __post_init__(self) -> None: self.type_name = "NoSQL" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this NoSQL instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"NoSQL validation failed: {errors}") - - def minimize(self) -> "NoSQL": - """ - Return a minimal copy of this NoSQL with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new NoSQL with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new NoSQL instance with only the minimum required fields. - """ - self.validate() - return NoSQL(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedNoSQL": - """ - Create a :class:`RelatedNoSQL` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedNoSQL reference to this asset. - """ - if self.guid is not UNSET: - return RelatedNoSQL(guid=self.guid) - return RelatedNoSQL(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -379,6 +327,11 @@ class NoSQLRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -465,6 +418,7 @@ class NoSQLNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -532,9 +486,6 @@ def _no_sql_to_nested(no_sql: NoSQL) -> NoSQLNested: is_incomplete=no_sql.is_incomplete, provenance_type=no_sql.provenance_type, home_id=no_sql.home_id, - depth=no_sql.depth, - immediate_upstream=no_sql.immediate_upstream, - immediate_downstream=no_sql.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -564,6 +515,7 @@ def _no_sql_from_nested(nested: NoSQLNested) -> NoSQL: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -572,9 +524,6 @@ def _no_sql_from_nested(nested: NoSQLNested) -> NoSQL: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_no_sql_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -615,6 +564,9 @@ def _no_sql_from_nested_bytes(data: bytes, serde: Serde) -> NoSQL: NoSQL.METRICS = RelationField("metrics") NoSQL.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") NoSQL.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +NoSQL.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) NoSQL.MEANINGS = RelationField("meanings") NoSQL.MC_MONITORS = RelationField("mcMonitors") NoSQL.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/notebook.py b/pyatlan_v9/model/assets/notebook.py index 8523273c9..035c3ac3b 100644 --- a/pyatlan_v9/model/assets/notebook.py +++ b/pyatlan_v9/model/assets/notebook.py @@ -40,10 +40,10 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor -from .notebook_related import RelatedNotebook from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable @@ -78,6 +78,7 @@ class Notebook(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +96,8 @@ class Notebook(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Notebook" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -144,6 +147,11 @@ class Notebook(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -199,66 +207,6 @@ class Notebook(Asset): def __post_init__(self) -> None: self.type_name = "Notebook" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Notebook instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Notebook validation failed: {errors}") - - def minimize(self) -> "Notebook": - """ - Return a minimal copy of this Notebook with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Notebook with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Notebook instance with only the minimum required fields. - """ - self.validate() - return Notebook(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedNotebook": - """ - Create a :class:`RelatedNotebook` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedNotebook reference to this asset. - """ - if self.guid is not UNSET: - return RelatedNotebook(guid=self.guid) - return RelatedNotebook(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -367,6 +315,11 @@ class NotebookRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -453,6 +406,7 @@ class NotebookNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -518,9 +472,6 @@ def _notebook_to_nested(notebook: Notebook) -> NotebookNested: is_incomplete=notebook.is_incomplete, provenance_type=notebook.provenance_type, home_id=notebook.home_id, - depth=notebook.depth, - immediate_upstream=notebook.immediate_upstream, - immediate_downstream=notebook.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -552,6 +503,7 @@ def _notebook_from_nested(nested: NotebookNested) -> Notebook: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -560,9 +512,6 @@ def _notebook_from_nested(nested: NotebookNested) -> Notebook: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_notebook_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -600,6 +549,9 @@ def _notebook_from_nested_bytes(data: bytes, serde: Serde) -> Notebook: Notebook.METRICS = RelationField("metrics") Notebook.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Notebook.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Notebook.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Notebook.MEANINGS = RelationField("meanings") Notebook.MC_MONITORS = RelationField("mcMonitors") Notebook.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/object_store.py b/pyatlan_v9/model/assets/object_store.py index b81995095..d3d8b68bd 100644 --- a/pyatlan_v9/model/assets/object_store.py +++ b/pyatlan_v9/model/assets/object_store.py @@ -37,10 +37,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .catalog_related import RelatedObjectStore from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -78,6 +78,7 @@ class ObjectStore(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +96,8 @@ class ObjectStore(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ObjectStore" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -144,6 +147,11 @@ class ObjectStore(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -199,66 +207,6 @@ class ObjectStore(Asset): def __post_init__(self) -> None: self.type_name = "ObjectStore" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ObjectStore instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ObjectStore validation failed: {errors}") - - def minimize(self) -> "ObjectStore": - """ - Return a minimal copy of this ObjectStore with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ObjectStore with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ObjectStore instance with only the minimum required fields. - """ - self.validate() - return ObjectStore(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedObjectStore": - """ - Create a :class:`RelatedObjectStore` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedObjectStore reference to this asset. - """ - if self.guid is not UNSET: - return RelatedObjectStore(guid=self.guid) - return RelatedObjectStore(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -367,6 +315,11 @@ class ObjectStoreRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -453,6 +406,7 @@ class ObjectStoreNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -520,9 +474,6 @@ def _object_store_to_nested(object_store: ObjectStore) -> ObjectStoreNested: is_incomplete=object_store.is_incomplete, provenance_type=object_store.provenance_type, home_id=object_store.home_id, - depth=object_store.depth, - immediate_upstream=object_store.immediate_upstream, - immediate_downstream=object_store.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -554,6 +505,7 @@ def _object_store_from_nested(nested: ObjectStoreNested) -> ObjectStore: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -562,9 +514,6 @@ def _object_store_from_nested(nested: ObjectStoreNested) -> ObjectStore: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_object_store_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -606,6 +555,9 @@ def _object_store_from_nested_bytes(data: bytes, serde: Serde) -> ObjectStore: ObjectStore.METRICS = RelationField("metrics") ObjectStore.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ObjectStore.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ObjectStore.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ObjectStore.MEANINGS = RelationField("meanings") ObjectStore.MC_MONITORS = RelationField("mcMonitors") ObjectStore.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/partial.py b/pyatlan_v9/model/assets/partial.py index 8da66e162..b4c6f816e 100644 --- a/pyatlan_v9/model/assets/partial.py +++ b/pyatlan_v9/model/assets/partial.py @@ -41,10 +41,11 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor -from .partial_related import RelatedPartial, RelatedPartialField, RelatedPartialObject +from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -83,6 +84,7 @@ class Partial(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -100,6 +102,8 @@ class Partial(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Partial" + partial_structure_json: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="partialStructureJSON" ) @@ -166,6 +170,11 @@ class Partial(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -221,66 +230,6 @@ class Partial(Asset): def __post_init__(self) -> None: self.type_name = "Partial" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Partial instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Partial validation failed: {errors}") - - def minimize(self) -> "Partial": - """ - Return a minimal copy of this Partial with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Partial with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Partial instance with only the minimum required fields. - """ - self.validate() - return Partial(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPartial": - """ - Create a :class:`RelatedPartial` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPartial reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPartial(guid=self.guid) - return RelatedPartial(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -406,6 +355,11 @@ class PartialRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -492,6 +446,7 @@ class PartialNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -569,9 +524,6 @@ def _partial_to_nested(partial: Partial) -> PartialNested: is_incomplete=partial.is_incomplete, provenance_type=partial.provenance_type, home_id=partial.home_id, - depth=partial.depth, - immediate_upstream=partial.immediate_upstream, - immediate_downstream=partial.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -601,6 +553,7 @@ def _partial_from_nested(nested: PartialNested) -> Partial: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -609,9 +562,6 @@ def _partial_from_nested(nested: PartialNested) -> Partial: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_partial_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -662,6 +612,9 @@ def _partial_from_nested_bytes(data: bytes, serde: Serde) -> Partial: Partial.METRICS = RelationField("metrics") Partial.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Partial.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Partial.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Partial.MEANINGS = RelationField("meanings") Partial.MC_MONITORS = RelationField("mcMonitors") Partial.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/partial_field.py b/pyatlan_v9/model/assets/partial_field.py index 1a3705386..ecc12df46 100644 --- a/pyatlan_v9/model/assets/partial_field.py +++ b/pyatlan_v9/model/assets/partial_field.py @@ -43,6 +43,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -86,6 +87,7 @@ class PartialField(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -104,6 +106,8 @@ class PartialField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PartialField" + partial_data_type: Union[str, None, UnsetType] = UNSET """Type of data captured as values in the field.""" @@ -173,6 +177,11 @@ class PartialField(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -237,70 +246,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PartialField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"PartialField validation failed: {errors}") - - def minimize(self) -> "PartialField": - """ - Return a minimal copy of this PartialField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PartialField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PartialField instance with only the minimum required fields. - """ - self.validate() - return PartialField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPartialField": - """ - Create a :class:`RelatedPartialField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPartialField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPartialField(guid=self.guid) - return RelatedPartialField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -429,6 +374,11 @@ class PartialFieldRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -520,6 +470,7 @@ class PartialFieldNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -602,9 +553,6 @@ def _partial_field_to_nested(partial_field: PartialField) -> PartialFieldNested: is_incomplete=partial_field.is_incomplete, provenance_type=partial_field.provenance_type, home_id=partial_field.home_id, - depth=partial_field.depth, - immediate_upstream=partial_field.immediate_upstream, - immediate_downstream=partial_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -638,6 +586,7 @@ def _partial_field_from_nested(nested: PartialFieldNested) -> PartialField: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -646,9 +595,6 @@ def _partial_field_from_nested(nested: PartialFieldNested) -> PartialField: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_partial_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -706,6 +652,9 @@ def _partial_field_from_nested_bytes(data: bytes, serde: Serde) -> PartialField: PartialField.METRICS = RelationField("metrics") PartialField.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PartialField.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PartialField.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PartialField.MEANINGS = RelationField("meanings") PartialField.MC_MONITORS = RelationField("mcMonitors") PartialField.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/partial_object.py b/pyatlan_v9/model/assets/partial_object.py index 533f1a0b5..309dec892 100644 --- a/pyatlan_v9/model/assets/partial_object.py +++ b/pyatlan_v9/model/assets/partial_object.py @@ -43,6 +43,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +86,7 @@ class PartialObject(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,6 +105,8 @@ class PartialObject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PartialObject" + partial_structure_json: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="partialStructureJSON" ) @@ -169,6 +173,11 @@ class PartialObject(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -233,70 +242,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PartialObject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"PartialObject validation failed: {errors}") - - def minimize(self) -> "PartialObject": - """ - Return a minimal copy of this PartialObject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PartialObject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PartialObject instance with only the minimum required fields. - """ - self.validate() - return PartialObject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPartialObject": - """ - Create a :class:`RelatedPartialObject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPartialObject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPartialObject(guid=self.guid) - return RelatedPartialObject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -422,6 +367,11 @@ class PartialObjectRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -513,6 +463,7 @@ class PartialObjectNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -593,9 +544,6 @@ def _partial_object_to_nested(partial_object: PartialObject) -> PartialObjectNes is_incomplete=partial_object.is_incomplete, provenance_type=partial_object.provenance_type, home_id=partial_object.home_id, - depth=partial_object.depth, - immediate_upstream=partial_object.immediate_upstream, - immediate_downstream=partial_object.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -629,6 +577,7 @@ def _partial_object_from_nested(nested: PartialObjectNested) -> PartialObject: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -637,9 +586,6 @@ def _partial_object_from_nested(nested: PartialObjectNested) -> PartialObject: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_partial_object_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -698,6 +644,9 @@ def _partial_object_from_nested_bytes(data: bytes, serde: Serde) -> PartialObjec PartialObject.METRICS = RelationField("metrics") PartialObject.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PartialObject.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PartialObject.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PartialObject.MEANINGS = RelationField("meanings") PartialObject.MC_MONITORS = RelationField("mcMonitors") PartialObject.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/persona.py b/pyatlan_v9/model/assets/persona.py index 31cfd4ccd..e64da2b9c 100644 --- a/pyatlan_v9/model/assets/persona.py +++ b/pyatlan_v9/model/assets/persona.py @@ -1,12 +1,20 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# ruff: noqa: ARG002 # SPDX-License-Identifier: Apache-2.0 -# Copyright 2026 Atlan Pte. Ltd. +# Copyright 2024 Atlan Pte. Ltd. -"""Persona asset model for pyatlan_v9.""" +""" +Persona asset model with flattened inheritance. + +This module provides: +- Persona: Flat asset class (easy to use) +- PersonaAttributes: Nested attributes struct (extends AssetAttributes) +- PersonaNested: Nested API format struct +""" from __future__ import annotations -from typing import Any, ClassVar, Set, Union -from warnings import warn +from typing import Any, ClassVar, List, Set, Union from msgspec import UNSET, UnsetType @@ -20,45 +28,204 @@ PersonaMetadataAction, ) from pyatlan_v9.model.conversion_utils import ( - build_attributes_kwargs, - build_flat_kwargs, + categorize_relationships, merge_relationships, ) from pyatlan_v9.model.serde import Serde, get_serde from pyatlan_v9.model.transform import register_asset from pyatlan_v9.utils import init_guid, validate_required_fields -from .asset import Asset, AssetAttributes, AssetNested -from .auth_policy import AuthPolicy +from .access_control_related import RelatedAuthPolicy +from .anomalo_related import RelatedAnomaloCheck +from .app_related import RelatedApplication, RelatedApplicationField +from .asset import ( + _ASSET_REL_FIELDS, + Asset, + AssetAttributes, + AssetNested, + AssetRelationshipAttributes, + _extract_asset_attrs, + _populate_asset_attrs, +) +from .data_contract_related import RelatedDataContract +from .data_mesh_related import RelatedDataProduct +from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType +from .gtc_related import RelatedAtlasGlossaryTerm +from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor +from .referenceable_related import RelatedReferenceable +from .resource_related import RelatedFile, RelatedLink, RelatedReadme +from .schema_registry_related import RelatedSchemaRegistrySubject +from .soda_related import RelatedSodaCheck + +# ============================================================================= +# FLAT ASSET CLASS +# ============================================================================= @register_asset class Persona(Asset): - """Persona asset in Atlan — an access-control construct scoping - visibility for users/groups across connections and glossaries.""" + """ + Atlan Type representing a Persona model + """ PERSONA_GROUPS: ClassVar[Any] = None PERSONA_USERS: ClassVar[Any] = None ROLE_ID: ClassVar[Any] = None - IS_ACCESS_CONTROL_ENABLED: ClassVar[Any] = None + CHANNEL_LINK: ClassVar[Any] = None + DEFAULT_NAVIGATION: ClassVar[Any] = None + DENY_ASSET_FILTERS: ClassVar[Any] = None + DENY_ASSET_METADATA_TYPES: ClassVar[Any] = None + DENY_ASSET_TABS: ClassVar[Any] = None + DENY_ASSET_TYPES: ClassVar[Any] = None DENY_CUSTOM_METADATA_GUIDS: ClassVar[Any] = None + DENY_NAVIGATION_PAGES: ClassVar[Any] = None + DENY_SIDEBAR_TABS: ClassVar[Any] = None + DISPLAY_PREFERENCES: ClassVar[Any] = None + IS_ACCESS_CONTROL_ENABLED: ClassVar[Any] = None + POLICIES: ClassVar[Any] = None + ANOMALO_CHECKS: ClassVar[Any] = None + APPLICATION: ClassVar[Any] = None + APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None + OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + METRICS: ClassVar[Any] = None + DQ_BASE_DATASET_RULES: ClassVar[Any] = None + DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None + MEANINGS: ClassVar[Any] = None + MC_MONITORS: ClassVar[Any] = None + MC_INCIDENTS: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_TO: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_FROM: ClassVar[Any] = None + FILES: ClassVar[Any] = None + LINKS: ClassVar[Any] = None + README: ClassVar[Any] = None + SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None + SODA_CHECKS: ClassVar[Any] = None type_name: Union[str, UnsetType] = "Persona" - persona_groups: Union[Set[str], None, UnsetType] = UNSET - persona_users: Union[Set[str], None, UnsetType] = UNSET + + persona_groups: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + persona_users: Union[List[str], None, UnsetType] = UNSET + """TBC""" + role_id: Union[str, None, UnsetType] = UNSET - is_access_control_enabled: Union[bool, None, UnsetType] = UNSET - deny_custom_metadata_guids: Union[Set[str], None, UnsetType] = UNSET - deny_asset_tabs: Union[Set[str], None, UnsetType] = UNSET - deny_asset_filters: Union[Set[str], None, UnsetType] = UNSET - deny_asset_types: Union[Set[str], None, UnsetType] = UNSET - deny_sidebar_tabs: Union[Set[str], None, UnsetType] = UNSET - deny_navigation_pages: Union[Set[str], None, UnsetType] = UNSET - default_navigation: Union[str, None, UnsetType] = UNSET - display_preferences: Union[Set[str], None, UnsetType] = UNSET + """TBC""" + channel_link: Union[str, None, UnsetType] = UNSET - deny_asset_metadata_types: Union[Set[str], None, UnsetType] = UNSET - policies: Union[list[AuthPolicy], None, UnsetType] = UNSET + """TBC""" + + default_navigation: Union[str, None, UnsetType] = UNSET + """TBC""" + + deny_asset_filters: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_metadata_types: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_tabs: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_types: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_custom_metadata_guids: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_navigation_pages: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_sidebar_tabs: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + display_preferences: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + is_access_control_enabled: Union[bool, None, UnsetType] = UNSET + """TBC""" + + policies: Union[List[RelatedAuthPolicy], None, UnsetType] = UNSET + """Access control entity to which this policy applies.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = ( + UNSET + ) + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = ( + UNSET + ) + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[ + List[RelatedSchemaRegistrySubject], None, UnsetType + ] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + def __post_init__(self) -> None: + self.type_name = "Persona" @classmethod @init_guid @@ -219,17 +386,46 @@ def create_domain_policy( def trim_to_required(self) -> "Persona": return Persona.updater(qualified_name=self.qualified_name, name=self.name) + # ========================================================================= + # Optimized Serialization Methods (override Asset base class) + # ========================================================================= + def to_json(self, nested: bool = True, serde: Serde | None = None) -> str: + """ + Convert to JSON string using optimized nested struct serialization. + + Args: + nested: If True (default), use nested API format. If False, use flat format. + serde: Optional Serde instance for encoder reuse. Uses shared singleton if None. + + Returns: + JSON string representation + """ if serde is None: serde = get_serde() if nested: - return _persona_to_nested_bytes(self, serde).decode("utf-8") - return serde.encode(self).decode("utf-8") + return self.to_nested_bytes(serde).decode("utf-8") + else: + return serde.encode(self).decode("utf-8") + + def to_nested_bytes(self, serde: Serde | None = None) -> bytes: + """Serialize to Atlas nested-format JSON bytes (pure msgspec, no dict intermediate).""" + if serde is None: + serde = get_serde() + return _persona_to_nested_bytes(self, serde) @staticmethod - def from_json( - json_data: Union[str, bytes], serde: Serde | None = None - ) -> "Persona": + def from_json(json_data: str | bytes, serde: Serde | None = None) -> Persona: + """ + Create from JSON string or bytes using optimized nested struct deserialization. + + Args: + json_data: JSON string or bytes to deserialize + serde: Optional Serde instance for decoder reuse. Uses shared singleton if None. + + Returns: + Persona instance + """ if isinstance(json_data, str): json_data = json_data.encode("utf-8") if serde is None: @@ -237,50 +433,216 @@ def from_json( return _persona_from_nested_bytes(json_data, serde) -# --------------------------------------------------------------------------- -# Deferred field descriptor initialization -# --------------------------------------------------------------------------- -from pyatlan.model.fields.atlan_fields import BooleanField, KeywordField # noqa: E402 - -Persona.PERSONA_GROUPS = KeywordField("personaGroups", "personaGroups") -Persona.PERSONA_USERS = KeywordField("personaUsers", "personaUsers") -Persona.ROLE_ID = KeywordField("roleId", "roleId") -Persona.IS_ACCESS_CONTROL_ENABLED = BooleanField( - "isAccessControlEnabled", "isAccessControlEnabled" -) -Persona.DENY_CUSTOM_METADATA_GUIDS = KeywordField( - "denyCustomMetadataGuids", "denyCustomMetadataGuids" -) - - # ============================================================================= # NESTED FORMAT CLASSES # ============================================================================= class PersonaAttributes(AssetAttributes): - """Persona-specific nested attributes.""" + """Persona-specific attributes for nested API format.""" + + persona_groups: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + persona_users: Union[List[str], None, UnsetType] = UNSET + """TBC""" - persona_groups: Union[Set[str], None, UnsetType] = UNSET - persona_users: Union[Set[str], None, UnsetType] = UNSET role_id: Union[str, None, UnsetType] = UNSET - is_access_control_enabled: Union[bool, None, UnsetType] = UNSET - deny_custom_metadata_guids: Union[Set[str], None, UnsetType] = UNSET - deny_asset_tabs: Union[Set[str], None, UnsetType] = UNSET - deny_asset_filters: Union[Set[str], None, UnsetType] = UNSET - deny_asset_types: Union[Set[str], None, UnsetType] = UNSET - deny_sidebar_tabs: Union[Set[str], None, UnsetType] = UNSET - deny_navigation_pages: Union[Set[str], None, UnsetType] = UNSET - default_navigation: Union[str, None, UnsetType] = UNSET - display_preferences: Union[Set[str], None, UnsetType] = UNSET + """TBC""" + channel_link: Union[str, None, UnsetType] = UNSET - deny_asset_metadata_types: Union[Set[str], None, UnsetType] = UNSET + """TBC""" + + default_navigation: Union[str, None, UnsetType] = UNSET + """TBC""" + + deny_asset_filters: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_metadata_types: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_tabs: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_types: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_custom_metadata_guids: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_navigation_pages: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_sidebar_tabs: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + display_preferences: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + is_access_control_enabled: Union[bool, None, UnsetType] = UNSET + """TBC""" + + +class PersonaRelationshipAttributes(AssetRelationshipAttributes): + """Persona-specific relationship attributes for nested API format.""" + + policies: Union[List[RelatedAuthPolicy], None, UnsetType] = UNSET + """Access control entity to which this policy applies.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = ( + UNSET + ) + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = ( + UNSET + ) + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[ + List[RelatedSchemaRegistrySubject], None, UnsetType + ] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" class PersonaNested(AssetNested): - """Persona entity in nested API format.""" + """Persona in nested API format for high-performance serialization.""" attributes: Union[PersonaAttributes, UnsetType] = UNSET + relationship_attributes: Union[PersonaRelationshipAttributes, UnsetType] = UNSET + append_relationship_attributes: Union[PersonaRelationshipAttributes, UnsetType] = ( + UNSET + ) + remove_relationship_attributes: Union[PersonaRelationshipAttributes, UnsetType] = ( + UNSET + ) + + +# ============================================================================= +# CONVERSION HELPERS & CONSTANTS +# ============================================================================= + +_PERSONA_REL_FIELDS: List[str] = [ + *_ASSET_REL_FIELDS, + "policies", + "anomalo_checks", + "application", + "application_field", + "data_contract_latest", + "data_contract_latest_certified", + "output_port_data_products", + "input_port_data_products", + "metrics", + "dq_base_dataset_rules", + "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", + "meanings", + "mc_monitors", + "mc_incidents", + "user_def_relationship_to", + "user_def_relationship_from", + "files", + "links", + "readme", + "schema_registry_subjects", + "soda_checks", +] + + +def _populate_persona_attrs(attrs: PersonaAttributes, obj: Persona) -> None: + """Populate Persona-specific attributes on the attrs struct.""" + _populate_asset_attrs(attrs, obj) + attrs.persona_groups = obj.persona_groups + attrs.persona_users = obj.persona_users + attrs.role_id = obj.role_id + attrs.channel_link = obj.channel_link + attrs.default_navigation = obj.default_navigation + attrs.deny_asset_filters = obj.deny_asset_filters + attrs.deny_asset_metadata_types = obj.deny_asset_metadata_types + attrs.deny_asset_tabs = obj.deny_asset_tabs + attrs.deny_asset_types = obj.deny_asset_types + attrs.deny_custom_metadata_guids = obj.deny_custom_metadata_guids + attrs.deny_navigation_pages = obj.deny_navigation_pages + attrs.deny_sidebar_tabs = obj.deny_sidebar_tabs + attrs.display_preferences = obj.display_preferences + attrs.is_access_control_enabled = obj.is_access_control_enabled + + +def _extract_persona_attrs(attrs: PersonaAttributes) -> dict: + """Extract all Persona attributes from the attrs struct into a flat dict.""" + result = _extract_asset_attrs(attrs) + result["persona_groups"] = attrs.persona_groups + result["persona_users"] = attrs.persona_users + result["role_id"] = attrs.role_id + result["channel_link"] = attrs.channel_link + result["default_navigation"] = attrs.default_navigation + result["deny_asset_filters"] = attrs.deny_asset_filters + result["deny_asset_metadata_types"] = attrs.deny_asset_metadata_types + result["deny_asset_tabs"] = attrs.deny_asset_tabs + result["deny_asset_types"] = attrs.deny_asset_types + result["deny_custom_metadata_guids"] = attrs.deny_custom_metadata_guids + result["deny_navigation_pages"] = attrs.deny_navigation_pages + result["deny_sidebar_tabs"] = attrs.deny_sidebar_tabs + result["display_preferences"] = attrs.display_preferences + result["is_access_control_enabled"] = attrs.is_access_control_enabled + return result # ============================================================================= @@ -289,8 +651,13 @@ class PersonaNested(AssetNested): def _persona_to_nested(persona: Persona) -> PersonaNested: - attrs_kwargs = build_attributes_kwargs(persona, PersonaAttributes) - attrs = PersonaAttributes(**attrs_kwargs) + """Convert flat Persona to nested format.""" + attrs = PersonaAttributes() + _populate_persona_attrs(attrs, persona) + # Categorize relationships by save semantic (REPLACE, APPEND, REMOVE) + replace_rels, append_rels, remove_rels = categorize_relationships( + persona, _PERSONA_REL_FIELDS, PersonaRelationshipAttributes + ) return PersonaNested( guid=persona.guid, type_name=persona.type_name, @@ -312,28 +679,112 @@ def _persona_to_nested(persona: Persona) -> PersonaNested: provenance_type=persona.provenance_type, home_id=persona.home_id, attributes=attrs, + relationship_attributes=replace_rels, + append_relationship_attributes=append_rels, + remove_relationship_attributes=remove_rels, ) def _persona_from_nested(nested: PersonaNested) -> Persona: + """Convert nested format to flat Persona.""" attrs = nested.attributes if nested.attributes is not UNSET else PersonaAttributes() + # Merge relationships from all three buckets merged_rels = merge_relationships( nested.relationship_attributes, nested.append_relationship_attributes, nested.remove_relationship_attributes, - [], - object, + _PERSONA_REL_FIELDS, + PersonaRelationshipAttributes, ) - kwargs = build_flat_kwargs( - nested, attrs, merged_rels, AssetNested, PersonaAttributes + return Persona( + guid=nested.guid, + type_name=nested.type_name, + status=nested.status, + version=nested.version, + create_time=nested.create_time, + update_time=nested.update_time, + created_by=nested.created_by, + updated_by=nested.updated_by, + classifications=nested.classifications, + classification_names=nested.classification_names, + meanings=nested.meanings, + labels=nested.labels, + business_attributes=nested.business_attributes, + custom_attributes=nested.custom_attributes, + pending_tasks=nested.pending_tasks, + proxy=nested.proxy, + is_incomplete=nested.is_incomplete, + provenance_type=nested.provenance_type, + home_id=nested.home_id, + **_extract_persona_attrs(attrs), + # Merged relationship attributes + **merged_rels, ) - return Persona(**kwargs) def _persona_to_nested_bytes(persona: Persona, serde: Serde) -> bytes: + """Convert flat Persona to nested JSON bytes.""" return serde.encode(_persona_to_nested(persona)) def _persona_from_nested_bytes(data: bytes, serde: Serde) -> Persona: + """Convert nested JSON bytes to flat Persona.""" nested = serde.decode(data, PersonaNested) return _persona_from_nested(nested) + + +# --------------------------------------------------------------------------- +# Deferred field descriptor initialization +# --------------------------------------------------------------------------- +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + BooleanField, + KeywordField, + RelationField, +) + +Persona.PERSONA_GROUPS = KeywordField("personaGroups", "personaGroups") +Persona.PERSONA_USERS = KeywordField("personaUsers", "personaUsers") +Persona.ROLE_ID = KeywordField("roleId", "roleId") +Persona.CHANNEL_LINK = KeywordField("channelLink", "channelLink") +Persona.DEFAULT_NAVIGATION = KeywordField("defaultNavigation", "defaultNavigation") +Persona.DENY_ASSET_FILTERS = KeywordField("denyAssetFilters", "denyAssetFilters") +Persona.DENY_ASSET_METADATA_TYPES = KeywordField( + "denyAssetMetadataTypes", "denyAssetMetadataTypes" +) +Persona.DENY_ASSET_TABS = KeywordField("denyAssetTabs", "denyAssetTabs") +Persona.DENY_ASSET_TYPES = KeywordField("denyAssetTypes", "denyAssetTypes") +Persona.DENY_CUSTOM_METADATA_GUIDS = KeywordField( + "denyCustomMetadataGuids", "denyCustomMetadataGuids" +) +Persona.DENY_NAVIGATION_PAGES = KeywordField( + "denyNavigationPages", "denyNavigationPages" +) +Persona.DENY_SIDEBAR_TABS = KeywordField("denySidebarTabs", "denySidebarTabs") +Persona.DISPLAY_PREFERENCES = KeywordField("displayPreferences", "displayPreferences") +Persona.IS_ACCESS_CONTROL_ENABLED = BooleanField( + "isAccessControlEnabled", "isAccessControlEnabled" +) +Persona.POLICIES = RelationField("policies") +Persona.ANOMALO_CHECKS = RelationField("anomaloChecks") +Persona.APPLICATION = RelationField("application") +Persona.APPLICATION_FIELD = RelationField("applicationField") +Persona.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +Persona.DATA_CONTRACT_LATEST_CERTIFIED = RelationField("dataContractLatestCertified") +Persona.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") +Persona.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") +Persona.METRICS = RelationField("metrics") +Persona.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") +Persona.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Persona.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) +Persona.MEANINGS = RelationField("meanings") +Persona.MC_MONITORS = RelationField("mcMonitors") +Persona.MC_INCIDENTS = RelationField("mcIncidents") +Persona.USER_DEF_RELATIONSHIP_TO = RelationField("userDefRelationshipTo") +Persona.USER_DEF_RELATIONSHIP_FROM = RelationField("userDefRelationshipFrom") +Persona.FILES = RelationField("files") +Persona.LINKS = RelationField("links") +Persona.README = RelationField("readme") +Persona.SCHEMA_REGISTRY_SUBJECTS = RelationField("schemaRegistrySubjects") +Persona.SODA_CHECKS = RelationField("sodaChecks") diff --git a/pyatlan_v9/model/assets/power_bi.py b/pyatlan_v9/model/assets/power_bi.py index cb86f3314..b3373254f 100644 --- a/pyatlan_v9/model/assets/power_bi.py +++ b/pyatlan_v9/model/assets/power_bi.py @@ -41,11 +41,11 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .power_bi_related import RelatedPowerBI from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -85,6 +85,7 @@ class PowerBI(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +103,8 @@ class PowerBI(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBI" + power_bi_is_hidden: Union[bool, None, UnsetType] = msgspec.field( default=UNSET, name="powerBIIsHidden" ) @@ -181,6 +184,11 @@ class PowerBI(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -236,66 +244,6 @@ class PowerBI(Asset): def __post_init__(self) -> None: self.type_name = "PowerBI" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBI instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"PowerBI validation failed: {errors}") - - def minimize(self) -> "PowerBI": - """ - Return a minimal copy of this PowerBI with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBI with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBI instance with only the minimum required fields. - """ - self.validate() - return PowerBI(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBI": - """ - Create a :class:`RelatedPowerBI` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBI reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBI(guid=self.guid) - return RelatedPowerBI(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -434,6 +382,11 @@ class PowerBIRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -520,6 +473,7 @@ class PowerBINested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -597,9 +551,6 @@ def _power_bi_to_nested(power_bi: PowerBI) -> PowerBINested: is_incomplete=power_bi.is_incomplete, provenance_type=power_bi.provenance_type, home_id=power_bi.home_id, - depth=power_bi.depth, - immediate_upstream=power_bi.immediate_upstream, - immediate_downstream=power_bi.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -629,6 +580,7 @@ def _power_bi_from_nested(nested: PowerBINested) -> PowerBI: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -637,9 +589,6 @@ def _power_bi_from_nested(nested: PowerBINested) -> PowerBI: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -695,6 +644,9 @@ def _power_bi_from_nested_bytes(data: bytes, serde: Serde) -> PowerBI: PowerBI.METRICS = RelationField("metrics") PowerBI.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PowerBI.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PowerBI.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PowerBI.MEANINGS = RelationField("meanings") PowerBI.MC_MONITORS = RelationField("mcMonitors") PowerBI.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/power_bi_app.py b/pyatlan_v9/model/assets/power_bi_app.py index 370fa8b0c..bb61f9915 100644 --- a/pyatlan_v9/model/assets/power_bi_app.py +++ b/pyatlan_v9/model/assets/power_bi_app.py @@ -41,15 +41,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .power_bi_related import ( - RelatedPowerBIApp, - RelatedPowerBIDashboard, - RelatedPowerBIReport, -) +from .power_bi_related import RelatedPowerBIDashboard, RelatedPowerBIReport from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -92,6 +89,7 @@ class PowerBIApp(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -111,6 +109,8 @@ class PowerBIApp(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIApp" + power_bi_app_id: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="powerBIAppId" ) @@ -205,6 +205,11 @@ class PowerBIApp(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -270,66 +275,6 @@ class PowerBIApp(Asset): def __post_init__(self) -> None: self.type_name = "PowerBIApp" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIApp instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"PowerBIApp validation failed: {errors}") - - def minimize(self) -> "PowerBIApp": - """ - Return a minimal copy of this PowerBIApp with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIApp with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIApp instance with only the minimum required fields. - """ - self.validate() - return PowerBIApp(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIApp": - """ - Create a :class:`RelatedPowerBIApp` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIApp reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIApp(guid=self.guid) - return RelatedPowerBIApp(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -483,6 +428,11 @@ class PowerBIAppRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -579,6 +529,7 @@ class PowerBIAppNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -664,9 +615,6 @@ def _power_bi_app_to_nested(power_bi_app: PowerBIApp) -> PowerBIAppNested: is_incomplete=power_bi_app.is_incomplete, provenance_type=power_bi_app.provenance_type, home_id=power_bi_app.home_id, - depth=power_bi_app.depth, - immediate_upstream=power_bi_app.immediate_upstream, - immediate_downstream=power_bi_app.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -698,6 +646,7 @@ def _power_bi_app_from_nested(nested: PowerBIAppNested) -> PowerBIApp: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -706,9 +655,6 @@ def _power_bi_app_from_nested(nested: PowerBIAppNested) -> PowerBIApp: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_app_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -771,6 +717,9 @@ def _power_bi_app_from_nested_bytes(data: bytes, serde: Serde) -> PowerBIApp: PowerBIApp.METRICS = RelationField("metrics") PowerBIApp.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PowerBIApp.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PowerBIApp.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PowerBIApp.MEANINGS = RelationField("meanings") PowerBIApp.MC_MONITORS = RelationField("mcMonitors") PowerBIApp.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/power_bi_column.py b/pyatlan_v9/model/assets/power_bi_column.py index 616b31081..4100e4303 100644 --- a/pyatlan_v9/model/assets/power_bi_column.py +++ b/pyatlan_v9/model/assets/power_bi_column.py @@ -42,15 +42,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .power_bi_related import ( - RelatedPowerBIColumn, - RelatedPowerBIMeasure, - RelatedPowerBITable, -) +from .power_bi_related import RelatedPowerBIMeasure, RelatedPowerBITable from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -96,6 +93,7 @@ class PowerBIColumn(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -115,6 +113,8 @@ class PowerBIColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIColumn" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this column exists.""" @@ -220,6 +220,11 @@ class PowerBIColumn(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -291,78 +296,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.table is UNSET: - errors.append("table is required for creation") - if self.power_bi_table_qualified_name is UNSET: - errors.append("power_bi_table_qualified_name is required for creation") - if self.dataset_qualified_name is UNSET: - errors.append("dataset_qualified_name is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBIColumn validation failed: {errors}") - - def minimize(self) -> "PowerBIColumn": - """ - Return a minimal copy of this PowerBIColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIColumn instance with only the minimum required fields. - """ - self.validate() - return PowerBIColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIColumn": - """ - Create a :class:`RelatedPowerBIColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIColumn(guid=self.guid) - return RelatedPowerBIColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -527,6 +460,11 @@ class PowerBIColumnRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -623,6 +561,7 @@ class PowerBIColumnNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -718,9 +657,6 @@ def _power_bi_column_to_nested(power_bi_column: PowerBIColumn) -> PowerBIColumnN is_incomplete=power_bi_column.is_incomplete, provenance_type=power_bi_column.provenance_type, home_id=power_bi_column.home_id, - depth=power_bi_column.depth, - immediate_upstream=power_bi_column.immediate_upstream, - immediate_downstream=power_bi_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -754,6 +690,7 @@ def _power_bi_column_from_nested(nested: PowerBIColumnNested) -> PowerBIColumn: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -762,9 +699,6 @@ def _power_bi_column_from_nested(nested: PowerBIColumnNested) -> PowerBIColumn: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -850,6 +784,9 @@ def _power_bi_column_from_nested_bytes(data: bytes, serde: Serde) -> PowerBIColu PowerBIColumn.METRICS = RelationField("metrics") PowerBIColumn.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PowerBIColumn.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PowerBIColumn.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PowerBIColumn.MEANINGS = RelationField("meanings") PowerBIColumn.MC_MONITORS = RelationField("mcMonitors") PowerBIColumn.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/power_bi_dashboard.py b/pyatlan_v9/model/assets/power_bi_dashboard.py index 1b4af4c1f..f44028572 100644 --- a/pyatlan_v9/model/assets/power_bi_dashboard.py +++ b/pyatlan_v9/model/assets/power_bi_dashboard.py @@ -42,13 +42,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .power_bi_related import ( RelatedPowerBIApp, - RelatedPowerBIDashboard, RelatedPowerBITile, RelatedPowerBIWorkspace, ) @@ -94,6 +94,7 @@ class PowerBIDashboard(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -114,6 +115,8 @@ class PowerBIDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIDashboard" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this dashboard exists.""" @@ -202,6 +205,11 @@ class PowerBIDashboard(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -274,74 +282,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.workspace is UNSET: - errors.append("workspace is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBIDashboard validation failed: {errors}") - - def minimize(self) -> "PowerBIDashboard": - """ - Return a minimal copy of this PowerBIDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIDashboard instance with only the minimum required fields. - """ - self.validate() - return PowerBIDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIDashboard": - """ - Create a :class:`RelatedPowerBIDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIDashboard(guid=self.guid) - return RelatedPowerBIDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -491,6 +431,11 @@ class PowerBIDashboardRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -590,6 +535,7 @@ class PowerBIDashboardNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -682,9 +628,6 @@ def _power_bi_dashboard_to_nested( is_incomplete=power_bi_dashboard.is_incomplete, provenance_type=power_bi_dashboard.provenance_type, home_id=power_bi_dashboard.home_id, - depth=power_bi_dashboard.depth, - immediate_upstream=power_bi_dashboard.immediate_upstream, - immediate_downstream=power_bi_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -718,6 +661,7 @@ def _power_bi_dashboard_from_nested(nested: PowerBIDashboardNested) -> PowerBIDa updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -726,9 +670,6 @@ def _power_bi_dashboard_from_nested(nested: PowerBIDashboardNested) -> PowerBIDa is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -805,6 +746,9 @@ def _power_bi_dashboard_from_nested_bytes( PowerBIDashboard.METRICS = RelationField("metrics") PowerBIDashboard.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PowerBIDashboard.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PowerBIDashboard.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PowerBIDashboard.MEANINGS = RelationField("meanings") PowerBIDashboard.MC_MONITORS = RelationField("mcMonitors") PowerBIDashboard.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/power_bi_dataflow.py b/pyatlan_v9/model/assets/power_bi_dataflow.py index e7181228d..b7e0e3fcb 100644 --- a/pyatlan_v9/model/assets/power_bi_dataflow.py +++ b/pyatlan_v9/model/assets/power_bi_dataflow.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -98,6 +99,7 @@ class PowerBIDataflow(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -123,6 +125,8 @@ class PowerBIDataflow(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIDataflow" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this dataflow exists.""" @@ -223,6 +227,11 @@ class PowerBIDataflow(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -318,74 +327,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIDataflow instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.workspace is UNSET: - errors.append("workspace is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBIDataflow validation failed: {errors}") - - def minimize(self) -> "PowerBIDataflow": - """ - Return a minimal copy of this PowerBIDataflow with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIDataflow with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIDataflow instance with only the minimum required fields. - """ - self.validate() - return PowerBIDataflow(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIDataflow": - """ - Create a :class:`RelatedPowerBIDataflow` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIDataflow reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIDataflow(guid=self.guid) - return RelatedPowerBIDataflow(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -547,6 +488,11 @@ class PowerBIDataflowRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -669,6 +615,7 @@ class PowerBIDataflowNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -782,9 +729,6 @@ def _power_bi_dataflow_to_nested( is_incomplete=power_bi_dataflow.is_incomplete, provenance_type=power_bi_dataflow.provenance_type, home_id=power_bi_dataflow.home_id, - depth=power_bi_dataflow.depth, - immediate_upstream=power_bi_dataflow.immediate_upstream, - immediate_downstream=power_bi_dataflow.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -818,6 +762,7 @@ def _power_bi_dataflow_from_nested(nested: PowerBIDataflowNested) -> PowerBIData updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -826,9 +771,6 @@ def _power_bi_dataflow_from_nested(nested: PowerBIDataflowNested) -> PowerBIData is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_dataflow_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -911,6 +853,9 @@ def _power_bi_dataflow_from_nested_bytes(data: bytes, serde: Serde) -> PowerBIDa PowerBIDataflow.METRICS = RelationField("metrics") PowerBIDataflow.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PowerBIDataflow.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PowerBIDataflow.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PowerBIDataflow.MEANINGS = RelationField("meanings") PowerBIDataflow.MC_MONITORS = RelationField("mcMonitors") PowerBIDataflow.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/power_bi_dataflow_entity_column.py b/pyatlan_v9/model/assets/power_bi_dataflow_entity_column.py index 15f489fec..9ba2d3442 100644 --- a/pyatlan_v9/model/assets/power_bi_dataflow_entity_column.py +++ b/pyatlan_v9/model/assets/power_bi_dataflow_entity_column.py @@ -42,11 +42,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .power_bi_related import RelatedPowerBIDataflow, RelatedPowerBIDataflowEntityColumn +from .power_bi_related import RelatedPowerBIDataflow from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -90,6 +91,7 @@ class PowerBIDataflowEntityColumn(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -108,6 +110,8 @@ class PowerBIDataflowEntityColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIDataflowEntityColumn" + power_bi_dataflow_entity_name: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="powerBIDataflowEntityName" ) @@ -207,6 +211,11 @@ class PowerBIDataflowEntityColumn(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -275,82 +284,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIDataflowEntityColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.power_bi_dataflow is UNSET: - errors.append("power_bi_dataflow is required for creation") - if self.power_bi_dataflow_qualified_name is UNSET: - errors.append( - "power_bi_dataflow_qualified_name is required for creation" - ) - if self.power_bi_workspace_qualified_name is UNSET: - errors.append( - "power_bi_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"PowerBIDataflowEntityColumn validation failed: {errors}") - - def minimize(self) -> "PowerBIDataflowEntityColumn": - """ - Return a minimal copy of this PowerBIDataflowEntityColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIDataflowEntityColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIDataflowEntityColumn instance with only the minimum required fields. - """ - self.validate() - return PowerBIDataflowEntityColumn( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedPowerBIDataflowEntityColumn": - """ - Create a :class:`RelatedPowerBIDataflowEntityColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIDataflowEntityColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIDataflowEntityColumn(guid=self.guid) - return RelatedPowerBIDataflowEntityColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -511,6 +444,11 @@ class PowerBIDataflowEntityColumnRelationshipAttributes(AssetRelationshipAttribu ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -604,6 +542,7 @@ class PowerBIDataflowEntityColumnNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -706,9 +645,6 @@ def _power_bi_dataflow_entity_column_to_nested( is_incomplete=power_bi_dataflow_entity_column.is_incomplete, provenance_type=power_bi_dataflow_entity_column.provenance_type, home_id=power_bi_dataflow_entity_column.home_id, - depth=power_bi_dataflow_entity_column.depth, - immediate_upstream=power_bi_dataflow_entity_column.immediate_upstream, - immediate_downstream=power_bi_dataflow_entity_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -744,6 +680,7 @@ def _power_bi_dataflow_entity_column_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -752,9 +689,6 @@ def _power_bi_dataflow_entity_column_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_dataflow_entity_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -854,6 +788,9 @@ def _power_bi_dataflow_entity_column_from_nested_bytes( PowerBIDataflowEntityColumn.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +PowerBIDataflowEntityColumn.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PowerBIDataflowEntityColumn.MEANINGS = RelationField("meanings") PowerBIDataflowEntityColumn.MC_MONITORS = RelationField("mcMonitors") PowerBIDataflowEntityColumn.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/power_bi_dataset.py b/pyatlan_v9/model/assets/power_bi_dataset.py index 81cf8587a..18a129af6 100644 --- a/pyatlan_v9/model/assets/power_bi_dataset.py +++ b/pyatlan_v9/model/assets/power_bi_dataset.py @@ -42,13 +42,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .power_bi_related import ( RelatedPowerBIDataflow, - RelatedPowerBIDataset, RelatedPowerBIDatasource, RelatedPowerBIReport, RelatedPowerBITable, @@ -96,6 +96,7 @@ class PowerBIDataset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -119,6 +120,8 @@ class PowerBIDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIDataset" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this dataset exists.""" @@ -204,6 +207,11 @@ class PowerBIDataset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -283,74 +291,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.workspace is UNSET: - errors.append("workspace is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBIDataset validation failed: {errors}") - - def minimize(self) -> "PowerBIDataset": - """ - Return a minimal copy of this PowerBIDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIDataset instance with only the minimum required fields. - """ - self.validate() - return PowerBIDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIDataset": - """ - Create a :class:`RelatedPowerBIDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIDataset(guid=self.guid) - return RelatedPowerBIDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -495,6 +435,11 @@ class PowerBIDatasetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -601,6 +546,7 @@ class PowerBIDatasetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -694,9 +640,6 @@ def _power_bi_dataset_to_nested( is_incomplete=power_bi_dataset.is_incomplete, provenance_type=power_bi_dataset.provenance_type, home_id=power_bi_dataset.home_id, - depth=power_bi_dataset.depth, - immediate_upstream=power_bi_dataset.immediate_upstream, - immediate_downstream=power_bi_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -730,6 +673,7 @@ def _power_bi_dataset_from_nested(nested: PowerBIDatasetNested) -> PowerBIDatase updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -738,9 +682,6 @@ def _power_bi_dataset_from_nested(nested: PowerBIDatasetNested) -> PowerBIDatase is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -814,6 +755,9 @@ def _power_bi_dataset_from_nested_bytes(data: bytes, serde: Serde) -> PowerBIDat PowerBIDataset.METRICS = RelationField("metrics") PowerBIDataset.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PowerBIDataset.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PowerBIDataset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PowerBIDataset.MEANINGS = RelationField("meanings") PowerBIDataset.MC_MONITORS = RelationField("mcMonitors") PowerBIDataset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/power_bi_datasource.py b/pyatlan_v9/model/assets/power_bi_datasource.py index 997f17f13..b82e10410 100644 --- a/pyatlan_v9/model/assets/power_bi_datasource.py +++ b/pyatlan_v9/model/assets/power_bi_datasource.py @@ -42,15 +42,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .power_bi_related import ( - RelatedPowerBIDataflow, - RelatedPowerBIDataset, - RelatedPowerBIDatasource, -) +from .power_bi_related import RelatedPowerBIDataflow, RelatedPowerBIDataset from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -91,6 +88,7 @@ class PowerBIDatasource(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -110,6 +108,8 @@ class PowerBIDatasource(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIDatasource" + connection_details: Union[Dict[str, str], None, UnsetType] = UNSET """Connection details of the datasource.""" @@ -192,6 +192,11 @@ class PowerBIDatasource(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -263,72 +268,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIDatasource instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.datasets is UNSET: - errors.append("datasets is required for creation") - if errors: - raise ValueError(f"PowerBIDatasource validation failed: {errors}") - - def minimize(self) -> "PowerBIDatasource": - """ - Return a minimal copy of this PowerBIDatasource with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIDatasource with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIDatasource instance with only the minimum required fields. - """ - self.validate() - return PowerBIDatasource(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIDatasource": - """ - Create a :class:`RelatedPowerBIDatasource` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIDatasource reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIDatasource(guid=self.guid) - return RelatedPowerBIDatasource(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -472,6 +411,11 @@ class PowerBIDatasourceRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -568,6 +512,7 @@ class PowerBIDatasourceNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -655,9 +600,6 @@ def _power_bi_datasource_to_nested( is_incomplete=power_bi_datasource.is_incomplete, provenance_type=power_bi_datasource.provenance_type, home_id=power_bi_datasource.home_id, - depth=power_bi_datasource.depth, - immediate_upstream=power_bi_datasource.immediate_upstream, - immediate_downstream=power_bi_datasource.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -693,6 +635,7 @@ def _power_bi_datasource_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -701,9 +644,6 @@ def _power_bi_datasource_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_datasource_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -780,6 +720,9 @@ def _power_bi_datasource_from_nested_bytes( PowerBIDatasource.METRICS = RelationField("metrics") PowerBIDatasource.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PowerBIDatasource.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PowerBIDatasource.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PowerBIDatasource.MEANINGS = RelationField("meanings") PowerBIDatasource.MC_MONITORS = RelationField("mcMonitors") PowerBIDatasource.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/power_bi_measure.py b/pyatlan_v9/model/assets/power_bi_measure.py index 2c7b05b7b..ec7103ab2 100644 --- a/pyatlan_v9/model/assets/power_bi_measure.py +++ b/pyatlan_v9/model/assets/power_bi_measure.py @@ -42,15 +42,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .power_bi_related import ( - RelatedPowerBIColumn, - RelatedPowerBIMeasure, - RelatedPowerBITable, -) +from .power_bi_related import RelatedPowerBIColumn, RelatedPowerBITable from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -94,6 +91,7 @@ class PowerBIMeasure(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -113,6 +111,8 @@ class PowerBIMeasure(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIMeasure" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this measure exists.""" @@ -208,6 +208,11 @@ class PowerBIMeasure(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -279,78 +284,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIMeasure instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.table is UNSET: - errors.append("table is required for creation") - if self.power_bi_table_qualified_name is UNSET: - errors.append("power_bi_table_qualified_name is required for creation") - if self.dataset_qualified_name is UNSET: - errors.append("dataset_qualified_name is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBIMeasure validation failed: {errors}") - - def minimize(self) -> "PowerBIMeasure": - """ - Return a minimal copy of this PowerBIMeasure with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIMeasure with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIMeasure instance with only the minimum required fields. - """ - self.validate() - return PowerBIMeasure(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIMeasure": - """ - Create a :class:`RelatedPowerBIMeasure` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIMeasure reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIMeasure(guid=self.guid) - return RelatedPowerBIMeasure(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -505,6 +438,11 @@ class PowerBIMeasureRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -601,6 +539,7 @@ class PowerBIMeasureNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -694,9 +633,6 @@ def _power_bi_measure_to_nested( is_incomplete=power_bi_measure.is_incomplete, provenance_type=power_bi_measure.provenance_type, home_id=power_bi_measure.home_id, - depth=power_bi_measure.depth, - immediate_upstream=power_bi_measure.immediate_upstream, - immediate_downstream=power_bi_measure.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -730,6 +666,7 @@ def _power_bi_measure_from_nested(nested: PowerBIMeasureNested) -> PowerBIMeasur updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -738,9 +675,6 @@ def _power_bi_measure_from_nested(nested: PowerBIMeasureNested) -> PowerBIMeasur is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_measure_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -822,6 +756,9 @@ def _power_bi_measure_from_nested_bytes(data: bytes, serde: Serde) -> PowerBIMea PowerBIMeasure.METRICS = RelationField("metrics") PowerBIMeasure.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PowerBIMeasure.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PowerBIMeasure.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PowerBIMeasure.MEANINGS = RelationField("meanings") PowerBIMeasure.MC_MONITORS = RelationField("mcMonitors") PowerBIMeasure.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/power_bi_page.py b/pyatlan_v9/model/assets/power_bi_page.py index 6995ac28c..2446ec0af 100644 --- a/pyatlan_v9/model/assets/power_bi_page.py +++ b/pyatlan_v9/model/assets/power_bi_page.py @@ -42,11 +42,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .power_bi_related import RelatedPowerBIPage, RelatedPowerBIReport +from .power_bi_related import RelatedPowerBIReport from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -88,6 +89,7 @@ class PowerBIPage(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -106,6 +108,8 @@ class PowerBIPage(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIPage" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this page exists.""" @@ -191,6 +195,11 @@ class PowerBIPage(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -257,76 +266,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIPage instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.report is UNSET: - errors.append("report is required for creation") - if self.report_qualified_name is UNSET: - errors.append("report_qualified_name is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBIPage validation failed: {errors}") - - def minimize(self) -> "PowerBIPage": - """ - Return a minimal copy of this PowerBIPage with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIPage with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIPage instance with only the minimum required fields. - """ - self.validate() - return PowerBIPage(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIPage": - """ - Create a :class:`RelatedPowerBIPage` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIPage reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIPage(guid=self.guid) - return RelatedPowerBIPage(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -471,6 +410,11 @@ class PowerBIPageRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -560,6 +504,7 @@ class PowerBIPageNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -644,9 +589,6 @@ def _power_bi_page_to_nested(power_bi_page: PowerBIPage) -> PowerBIPageNested: is_incomplete=power_bi_page.is_incomplete, provenance_type=power_bi_page.provenance_type, home_id=power_bi_page.home_id, - depth=power_bi_page.depth, - immediate_upstream=power_bi_page.immediate_upstream, - immediate_downstream=power_bi_page.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -678,6 +620,7 @@ def _power_bi_page_from_nested(nested: PowerBIPageNested) -> PowerBIPage: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -686,9 +629,6 @@ def _power_bi_page_from_nested(nested: PowerBIPageNested) -> PowerBIPage: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_page_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -760,6 +700,9 @@ def _power_bi_page_from_nested_bytes(data: bytes, serde: Serde) -> PowerBIPage: PowerBIPage.METRICS = RelationField("metrics") PowerBIPage.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PowerBIPage.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PowerBIPage.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PowerBIPage.MEANINGS = RelationField("meanings") PowerBIPage.MC_MONITORS = RelationField("mcMonitors") PowerBIPage.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/power_bi_report.py b/pyatlan_v9/model/assets/power_bi_report.py index 56560765d..9b9bd8eb7 100644 --- a/pyatlan_v9/model/assets/power_bi_report.py +++ b/pyatlan_v9/model/assets/power_bi_report.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,6 @@ RelatedPowerBIApp, RelatedPowerBIDataset, RelatedPowerBIPage, - RelatedPowerBIReport, RelatedPowerBITile, RelatedPowerBIWorkspace, ) @@ -97,6 +97,7 @@ class PowerBIReport(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -119,6 +120,8 @@ class PowerBIReport(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIReport" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this report exists.""" @@ -210,6 +213,11 @@ class PowerBIReport(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -288,74 +296,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIReport instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.workspace is UNSET: - errors.append("workspace is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBIReport validation failed: {errors}") - - def minimize(self) -> "PowerBIReport": - """ - Return a minimal copy of this PowerBIReport with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIReport with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIReport instance with only the minimum required fields. - """ - self.validate() - return PowerBIReport(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIReport": - """ - Create a :class:`RelatedPowerBIReport` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIReport reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIReport(guid=self.guid) - return RelatedPowerBIReport(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -506,6 +446,11 @@ class PowerBIReportRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -611,6 +556,7 @@ class PowerBIReportNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -705,9 +651,6 @@ def _power_bi_report_to_nested(power_bi_report: PowerBIReport) -> PowerBIReportN is_incomplete=power_bi_report.is_incomplete, provenance_type=power_bi_report.provenance_type, home_id=power_bi_report.home_id, - depth=power_bi_report.depth, - immediate_upstream=power_bi_report.immediate_upstream, - immediate_downstream=power_bi_report.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -741,6 +684,7 @@ def _power_bi_report_from_nested(nested: PowerBIReportNested) -> PowerBIReport: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -749,9 +693,6 @@ def _power_bi_report_from_nested(nested: PowerBIReportNested) -> PowerBIReport: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_report_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -827,6 +768,9 @@ def _power_bi_report_from_nested_bytes(data: bytes, serde: Serde) -> PowerBIRepo PowerBIReport.METRICS = RelationField("metrics") PowerBIReport.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PowerBIReport.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PowerBIReport.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PowerBIReport.MEANINGS = RelationField("meanings") PowerBIReport.MC_MONITORS = RelationField("mcMonitors") PowerBIReport.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/power_bi_table.py b/pyatlan_v9/model/assets/power_bi_table.py index 7b643538c..36e6d1235 100644 --- a/pyatlan_v9/model/assets/power_bi_table.py +++ b/pyatlan_v9/model/assets/power_bi_table.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -51,7 +52,6 @@ RelatedPowerBIDataflow, RelatedPowerBIDataset, RelatedPowerBIMeasure, - RelatedPowerBITable, ) from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable @@ -98,6 +98,7 @@ class PowerBITable(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -119,6 +120,8 @@ class PowerBITable(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBITable" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this table exists.""" @@ -222,6 +225,11 @@ class PowerBITable(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -297,76 +305,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBITable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dataset is UNSET: - errors.append("dataset is required for creation") - if self.dataset_qualified_name is UNSET: - errors.append("dataset_qualified_name is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBITable validation failed: {errors}") - - def minimize(self) -> "PowerBITable": - """ - Return a minimal copy of this PowerBITable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBITable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBITable instance with only the minimum required fields. - """ - self.validate() - return PowerBITable(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBITable": - """ - Create a :class:`RelatedPowerBITable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBITable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBITable(guid=self.guid) - return RelatedPowerBITable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -529,6 +467,11 @@ class PowerBITableRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -629,6 +572,7 @@ class PowerBITableNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -726,9 +670,6 @@ def _power_bi_table_to_nested(power_bi_table: PowerBITable) -> PowerBITableNeste is_incomplete=power_bi_table.is_incomplete, provenance_type=power_bi_table.provenance_type, home_id=power_bi_table.home_id, - depth=power_bi_table.depth, - immediate_upstream=power_bi_table.immediate_upstream, - immediate_downstream=power_bi_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -762,6 +703,7 @@ def _power_bi_table_from_nested(nested: PowerBITableNested) -> PowerBITable: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -770,9 +712,6 @@ def _power_bi_table_from_nested(nested: PowerBITableNested) -> PowerBITable: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_table_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -858,6 +797,9 @@ def _power_bi_table_from_nested_bytes(data: bytes, serde: Serde) -> PowerBITable PowerBITable.METRICS = RelationField("metrics") PowerBITable.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PowerBITable.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PowerBITable.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PowerBITable.MEANINGS = RelationField("meanings") PowerBITable.MC_MONITORS = RelationField("mcMonitors") PowerBITable.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/power_bi_tile.py b/pyatlan_v9/model/assets/power_bi_tile.py index e3f274581..a81db1be6 100644 --- a/pyatlan_v9/model/assets/power_bi_tile.py +++ b/pyatlan_v9/model/assets/power_bi_tile.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,6 @@ RelatedPowerBIDashboard, RelatedPowerBIDataset, RelatedPowerBIReport, - RelatedPowerBITile, ) from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable @@ -93,6 +93,7 @@ class PowerBITile(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -113,6 +114,8 @@ class PowerBITile(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBITile" + workspace_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workspace in which this tile exists.""" @@ -198,6 +201,11 @@ class PowerBITile(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -270,76 +278,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBITile instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.dashboard is UNSET: - errors.append("dashboard is required for creation") - if self.dashboard_qualified_name is UNSET: - errors.append("dashboard_qualified_name is required for creation") - if self.workspace_qualified_name is UNSET: - errors.append("workspace_qualified_name is required for creation") - if errors: - raise ValueError(f"PowerBITile validation failed: {errors}") - - def minimize(self) -> "PowerBITile": - """ - Return a minimal copy of this PowerBITile with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBITile with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBITile instance with only the minimum required fields. - """ - self.validate() - return PowerBITile(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBITile": - """ - Create a :class:`RelatedPowerBITile` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBITile reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBITile(guid=self.guid) - return RelatedPowerBITile(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -484,6 +422,11 @@ class PowerBITileRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -579,6 +522,7 @@ class PowerBITileNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -665,9 +609,6 @@ def _power_bi_tile_to_nested(power_bi_tile: PowerBITile) -> PowerBITileNested: is_incomplete=power_bi_tile.is_incomplete, provenance_type=power_bi_tile.provenance_type, home_id=power_bi_tile.home_id, - depth=power_bi_tile.depth, - immediate_upstream=power_bi_tile.immediate_upstream, - immediate_downstream=power_bi_tile.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -699,6 +640,7 @@ def _power_bi_tile_from_nested(nested: PowerBITileNested) -> PowerBITile: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -707,9 +649,6 @@ def _power_bi_tile_from_nested(nested: PowerBITileNested) -> PowerBITile: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_tile_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -781,6 +720,9 @@ def _power_bi_tile_from_nested_bytes(data: bytes, serde: Serde) -> PowerBITile: PowerBITile.METRICS = RelationField("metrics") PowerBITile.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PowerBITile.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PowerBITile.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PowerBITile.MEANINGS = RelationField("meanings") PowerBITile.MC_MONITORS = RelationField("mcMonitors") PowerBITile.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/power_bi_workspace.py b/pyatlan_v9/model/assets/power_bi_workspace.py index 8ed485f5d..742bf3f0c 100644 --- a/pyatlan_v9/model/assets/power_bi_workspace.py +++ b/pyatlan_v9/model/assets/power_bi_workspace.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,6 @@ RelatedPowerBIDataflow, RelatedPowerBIDataset, RelatedPowerBIReport, - RelatedPowerBIWorkspace, ) from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable @@ -96,6 +96,7 @@ class PowerBIWorkspace(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -117,6 +118,8 @@ class PowerBIWorkspace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PowerBIWorkspace" + web_url: Union[str, None, UnsetType] = UNSET """Deprecated.""" @@ -211,6 +214,11 @@ class PowerBIWorkspace(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -278,66 +286,6 @@ class PowerBIWorkspace(Asset): def __post_init__(self) -> None: self.type_name = "PowerBIWorkspace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PowerBIWorkspace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"PowerBIWorkspace validation failed: {errors}") - - def minimize(self) -> "PowerBIWorkspace": - """ - Return a minimal copy of this PowerBIWorkspace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PowerBIWorkspace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PowerBIWorkspace instance with only the minimum required fields. - """ - self.validate() - return PowerBIWorkspace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPowerBIWorkspace": - """ - Create a :class:`RelatedPowerBIWorkspace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPowerBIWorkspace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPowerBIWorkspace(guid=self.guid) - return RelatedPowerBIWorkspace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -493,6 +441,11 @@ class PowerBIWorkspaceRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -593,6 +546,7 @@ class PowerBIWorkspaceNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -690,9 +644,6 @@ def _power_bi_workspace_to_nested( is_incomplete=power_bi_workspace.is_incomplete, provenance_type=power_bi_workspace.provenance_type, home_id=power_bi_workspace.home_id, - depth=power_bi_workspace.depth, - immediate_upstream=power_bi_workspace.immediate_upstream, - immediate_downstream=power_bi_workspace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -726,6 +677,7 @@ def _power_bi_workspace_from_nested(nested: PowerBIWorkspaceNested) -> PowerBIWo updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -734,9 +686,6 @@ def _power_bi_workspace_from_nested(nested: PowerBIWorkspaceNested) -> PowerBIWo is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_power_bi_workspace_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -813,6 +762,9 @@ def _power_bi_workspace_from_nested_bytes( PowerBIWorkspace.METRICS = RelationField("metrics") PowerBIWorkspace.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PowerBIWorkspace.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PowerBIWorkspace.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PowerBIWorkspace.MEANINGS = RelationField("meanings") PowerBIWorkspace.MC_MONITORS = RelationField("mcMonitors") PowerBIWorkspace.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/preset.py b/pyatlan_v9/model/assets/preset.py index b71ec8629..9976efb7e 100644 --- a/pyatlan_v9/model/assets/preset.py +++ b/pyatlan_v9/model/assets/preset.py @@ -40,11 +40,11 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .preset_related import RelatedPreset from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -82,6 +82,7 @@ class Preset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -99,6 +100,8 @@ class Preset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Preset" + preset_workspace_id: Union[int, None, UnsetType] = UNSET """Identifier of the workspace in which this asset exists, in Preset.""" @@ -160,6 +163,11 @@ class Preset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -215,66 +223,6 @@ class Preset(Asset): def __post_init__(self) -> None: self.type_name = "Preset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Preset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Preset validation failed: {errors}") - - def minimize(self) -> "Preset": - """ - Return a minimal copy of this Preset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Preset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Preset instance with only the minimum required fields. - """ - self.validate() - return Preset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPreset": - """ - Create a :class:`RelatedPreset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPreset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPreset(guid=self.guid) - return RelatedPreset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -395,6 +343,11 @@ class PresetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -481,6 +434,7 @@ class PresetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -554,9 +508,6 @@ def _preset_to_nested(preset: Preset) -> PresetNested: is_incomplete=preset.is_incomplete, provenance_type=preset.provenance_type, home_id=preset.home_id, - depth=preset.depth, - immediate_upstream=preset.immediate_upstream, - immediate_downstream=preset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -586,6 +537,7 @@ def _preset_from_nested(nested: PresetNested) -> Preset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -594,9 +546,6 @@ def _preset_from_nested(nested: PresetNested) -> Preset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_preset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -651,6 +600,9 @@ def _preset_from_nested_bytes(data: bytes, serde: Serde) -> Preset: Preset.METRICS = RelationField("metrics") Preset.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Preset.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Preset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Preset.MEANINGS = RelationField("meanings") Preset.MC_MONITORS = RelationField("mcMonitors") Preset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/preset_chart.py b/pyatlan_v9/model/assets/preset_chart.py index 31c9aca12..a356a2781 100644 --- a/pyatlan_v9/model/assets/preset_chart.py +++ b/pyatlan_v9/model/assets/preset_chart.py @@ -42,11 +42,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .preset_related import RelatedPresetChart, RelatedPresetDashboard +from .preset_related import RelatedPresetDashboard from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -86,6 +87,7 @@ class PresetChart(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -104,6 +106,8 @@ class PresetChart(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PresetChart" + preset_chart_description_markdown: Union[str, None, UnsetType] = UNSET """""" @@ -171,6 +175,11 @@ class PresetChart(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -237,80 +246,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PresetChart instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.preset_dashboard is UNSET: - errors.append("preset_dashboard is required for creation") - if self.preset_dashboard_qualified_name is UNSET: - errors.append( - "preset_dashboard_qualified_name is required for creation" - ) - if self.preset_workspace_qualified_name is UNSET: - errors.append( - "preset_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"PresetChart validation failed: {errors}") - - def minimize(self) -> "PresetChart": - """ - Return a minimal copy of this PresetChart with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PresetChart with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PresetChart instance with only the minimum required fields. - """ - self.validate() - return PresetChart(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPresetChart": - """ - Create a :class:`RelatedPresetChart` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPresetChart reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPresetChart(guid=self.guid) - return RelatedPresetChart(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -466,6 +401,11 @@ class PresetChartRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -555,6 +495,7 @@ class PresetChartNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -637,9 +578,6 @@ def _preset_chart_to_nested(preset_chart: PresetChart) -> PresetChartNested: is_incomplete=preset_chart.is_incomplete, provenance_type=preset_chart.provenance_type, home_id=preset_chart.home_id, - depth=preset_chart.depth, - immediate_upstream=preset_chart.immediate_upstream, - immediate_downstream=preset_chart.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -671,6 +609,7 @@ def _preset_chart_from_nested(nested: PresetChartNested) -> PresetChart: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -679,9 +618,6 @@ def _preset_chart_from_nested(nested: PresetChartNested) -> PresetChart: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_preset_chart_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -746,6 +682,9 @@ def _preset_chart_from_nested_bytes(data: bytes, serde: Serde) -> PresetChart: PresetChart.METRICS = RelationField("metrics") PresetChart.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PresetChart.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PresetChart.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PresetChart.MEANINGS = RelationField("meanings") PresetChart.MC_MONITORS = RelationField("mcMonitors") PresetChart.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/preset_dashboard.py b/pyatlan_v9/model/assets/preset_dashboard.py index 8c239b6e1..7a0952484 100644 --- a/pyatlan_v9/model/assets/preset_dashboard.py +++ b/pyatlan_v9/model/assets/preset_dashboard.py @@ -43,13 +43,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .preset_related import ( RelatedPresetChart, - RelatedPresetDashboard, RelatedPresetDataset, RelatedPresetWorkspace, ) @@ -96,6 +96,7 @@ class PresetDashboard(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -116,6 +117,8 @@ class PresetDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PresetDashboard" + preset_dashboard_changed_by_name: Union[str, None, UnsetType] = UNSET """""" @@ -199,6 +202,11 @@ class PresetDashboard(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -269,76 +277,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PresetDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.preset_workspace is UNSET: - errors.append("preset_workspace is required for creation") - if self.preset_workspace_qualified_name is UNSET: - errors.append( - "preset_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"PresetDashboard validation failed: {errors}") - - def minimize(self) -> "PresetDashboard": - """ - Return a minimal copy of this PresetDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PresetDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PresetDashboard instance with only the minimum required fields. - """ - self.validate() - return PresetDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPresetDashboard": - """ - Create a :class:`RelatedPresetDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPresetDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPresetDashboard(guid=self.guid) - return RelatedPresetDashboard(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -512,6 +450,11 @@ class PresetDashboardRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -609,6 +552,7 @@ class PresetDashboardNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -707,9 +651,6 @@ def _preset_dashboard_to_nested( is_incomplete=preset_dashboard.is_incomplete, provenance_type=preset_dashboard.provenance_type, home_id=preset_dashboard.home_id, - depth=preset_dashboard.depth, - immediate_upstream=preset_dashboard.immediate_upstream, - immediate_downstream=preset_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -743,6 +684,7 @@ def _preset_dashboard_from_nested(nested: PresetDashboardNested) -> PresetDashbo updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -751,9 +693,6 @@ def _preset_dashboard_from_nested(nested: PresetDashboardNested) -> PresetDashbo is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_preset_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -839,6 +778,9 @@ def _preset_dashboard_from_nested_bytes(data: bytes, serde: Serde) -> PresetDash PresetDashboard.METRICS = RelationField("metrics") PresetDashboard.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PresetDashboard.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PresetDashboard.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PresetDashboard.MEANINGS = RelationField("meanings") PresetDashboard.MC_MONITORS = RelationField("mcMonitors") PresetDashboard.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/preset_dataset.py b/pyatlan_v9/model/assets/preset_dataset.py index be4adf76a..0abec091d 100644 --- a/pyatlan_v9/model/assets/preset_dataset.py +++ b/pyatlan_v9/model/assets/preset_dataset.py @@ -42,11 +42,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .preset_related import RelatedPresetDashboard, RelatedPresetDataset +from .preset_related import RelatedPresetDashboard from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -87,6 +88,7 @@ class PresetDataset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +107,8 @@ class PresetDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PresetDataset" + preset_dataset_datasource_name: Union[str, None, UnsetType] = UNSET """""" @@ -175,6 +179,11 @@ class PresetDataset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -241,80 +250,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PresetDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.preset_dashboard is UNSET: - errors.append("preset_dashboard is required for creation") - if self.preset_dashboard_qualified_name is UNSET: - errors.append( - "preset_dashboard_qualified_name is required for creation" - ) - if self.preset_workspace_qualified_name is UNSET: - errors.append( - "preset_workspace_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"PresetDataset validation failed: {errors}") - - def minimize(self) -> "PresetDataset": - """ - Return a minimal copy of this PresetDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PresetDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PresetDataset instance with only the minimum required fields. - """ - self.validate() - return PresetDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPresetDataset": - """ - Create a :class:`RelatedPresetDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPresetDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPresetDataset(guid=self.guid) - return RelatedPresetDataset(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -473,6 +408,11 @@ class PresetDatasetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -564,6 +504,7 @@ class PresetDatasetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -646,9 +587,6 @@ def _preset_dataset_to_nested(preset_dataset: PresetDataset) -> PresetDatasetNes is_incomplete=preset_dataset.is_incomplete, provenance_type=preset_dataset.provenance_type, home_id=preset_dataset.home_id, - depth=preset_dataset.depth, - immediate_upstream=preset_dataset.immediate_upstream, - immediate_downstream=preset_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -682,6 +620,7 @@ def _preset_dataset_from_nested(nested: PresetDatasetNested) -> PresetDataset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -690,9 +629,6 @@ def _preset_dataset_from_nested(nested: PresetDatasetNested) -> PresetDataset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_preset_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -764,6 +700,9 @@ def _preset_dataset_from_nested_bytes(data: bytes, serde: Serde) -> PresetDatase PresetDataset.METRICS = RelationField("metrics") PresetDataset.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PresetDataset.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PresetDataset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PresetDataset.MEANINGS = RelationField("meanings") PresetDataset.MC_MONITORS = RelationField("mcMonitors") PresetDataset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/preset_workspace.py b/pyatlan_v9/model/assets/preset_workspace.py index bcd6f1b79..b0d6a8044 100644 --- a/pyatlan_v9/model/assets/preset_workspace.py +++ b/pyatlan_v9/model/assets/preset_workspace.py @@ -41,11 +41,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject -from .preset_related import RelatedPresetDashboard, RelatedPresetWorkspace +from .preset_related import RelatedPresetDashboard from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -92,6 +93,7 @@ class PresetWorkspace(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -110,6 +112,8 @@ class PresetWorkspace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "PresetWorkspace" + preset_workspace_public_dashboards_allowed: Union[bool, None, UnsetType] = UNSET """""" @@ -198,6 +202,11 @@ class PresetWorkspace(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -256,66 +265,6 @@ class PresetWorkspace(Asset): def __post_init__(self) -> None: self.type_name = "PresetWorkspace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this PresetWorkspace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"PresetWorkspace validation failed: {errors}") - - def minimize(self) -> "PresetWorkspace": - """ - Return a minimal copy of this PresetWorkspace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new PresetWorkspace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new PresetWorkspace instance with only the minimum required fields. - """ - self.validate() - return PresetWorkspace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedPresetWorkspace": - """ - Create a :class:`RelatedPresetWorkspace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedPresetWorkspace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedPresetWorkspace(guid=self.guid) - return RelatedPresetWorkspace(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -494,6 +443,11 @@ class PresetWorkspaceRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -585,6 +539,7 @@ class PresetWorkspaceNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -691,9 +646,6 @@ def _preset_workspace_to_nested( is_incomplete=preset_workspace.is_incomplete, provenance_type=preset_workspace.provenance_type, home_id=preset_workspace.home_id, - depth=preset_workspace.depth, - immediate_upstream=preset_workspace.immediate_upstream, - immediate_downstream=preset_workspace.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -727,6 +679,7 @@ def _preset_workspace_from_nested(nested: PresetWorkspaceNested) -> PresetWorksp updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -735,9 +688,6 @@ def _preset_workspace_from_nested(nested: PresetWorkspaceNested) -> PresetWorksp is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_preset_workspace_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -832,6 +782,9 @@ def _preset_workspace_from_nested_bytes(data: bytes, serde: Serde) -> PresetWork PresetWorkspace.METRICS = RelationField("metrics") PresetWorkspace.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") PresetWorkspace.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +PresetWorkspace.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) PresetWorkspace.MEANINGS = RelationField("meanings") PresetWorkspace.MC_MONITORS = RelationField("mcMonitors") PresetWorkspace.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/procedure.py b/pyatlan_v9/model/assets/procedure.py index fac52ce53..3aadcb763 100644 --- a/pyatlan_v9/model/assets/procedure.py +++ b/pyatlan_v9/model/assets/procedure.py @@ -49,6 +49,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -64,7 +65,7 @@ RelatedSqlInsightBusinessQuestion, RelatedSqlInsightJoin, ) -from .sql_related import RelatedProcedure, RelatedSchema +from .sql_related import RelatedSchema # ============================================================================= # FLAT ASSET CLASS @@ -134,6 +135,7 @@ class Procedure(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -157,6 +159,8 @@ class Procedure(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Procedure" + definition: Union[str, None, UnsetType] = UNSET """SQL definition of the procedure.""" @@ -336,6 +340,11 @@ class Procedure(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -425,82 +434,6 @@ def __post_init__(self) -> None: r"^.+/_procedures_/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Procedure instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.atlan_schema is UNSET: - errors.append("atlan_schema is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if self.definition is UNSET: - errors.append("definition is required for creation") - if errors: - raise ValueError(f"Procedure validation failed: {errors}") - - def minimize(self) -> "Procedure": - """ - Return a minimal copy of this Procedure with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Procedure with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Procedure instance with only the minimum required fields. - """ - self.validate() - return Procedure(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedProcedure": - """ - Create a :class:`RelatedProcedure` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedProcedure reference to this asset. - """ - if self.guid is not UNSET: - return RelatedProcedure(guid=self.guid) - return RelatedProcedure(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -818,6 +751,11 @@ class ProcedureRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -936,6 +874,7 @@ class ProcedureNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1093,9 +1032,6 @@ def _procedure_to_nested(procedure: Procedure) -> ProcedureNested: is_incomplete=procedure.is_incomplete, provenance_type=procedure.provenance_type, home_id=procedure.home_id, - depth=procedure.depth, - immediate_upstream=procedure.immediate_upstream, - immediate_downstream=procedure.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1127,6 +1063,7 @@ def _procedure_from_nested(nested: ProcedureNested) -> Procedure: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1135,9 +1072,6 @@ def _procedure_from_nested(nested: ProcedureNested) -> Procedure: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_procedure_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1262,6 +1196,9 @@ def _procedure_from_nested_bytes(data: bytes, serde: Serde) -> Procedure: Procedure.DBT_SOURCES = RelationField("dbtSources") Procedure.SQL_DBT_SOURCES = RelationField("sqlDBTSources") Procedure.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +Procedure.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Procedure.MEANINGS = RelationField("meanings") Procedure.MC_MONITORS = RelationField("mcMonitors") Procedure.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/process.py b/pyatlan_v9/model/assets/process.py index 333cda345..450dd5b20 100644 --- a/pyatlan_v9/model/assets/process.py +++ b/pyatlan_v9/model/assets/process.py @@ -49,11 +49,12 @@ from .fabric_related import RelatedFabricActivity from .fivetran_related import RelatedFivetranConnector from .flow_related import RelatedFlowControlOperation +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .matillion_related import RelatedMatillionComponent from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .power_bi_related import RelatedPowerBIDataflow -from .process_related import RelatedColumnProcess, RelatedProcess +from .process_related import RelatedColumnProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -78,6 +79,7 @@ class Process(Asset): AST: ClassVar[Any] = None ADDITIONAL_ETL_CONTEXT: ClassVar[Any] = None AI_DATASET_TYPE: ClassVar[Any] = None + IS_PASS_THROUGH: ClassVar[Any] = None ADF_ACTIVITY: ClassVar[Any] = None AIRFLOW_TASKS: ClassVar[Any] = None ANOMALO_CHECKS: ClassVar[Any] = None @@ -93,6 +95,7 @@ class Process(Asset): FABRIC_ACTIVITIES: ClassVar[Any] = None FIVETRAN_CONNECTOR: ClassVar[Any] = None FLOW_ORCHESTRATED_BY: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MATILLION_COMPONENT: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -112,6 +115,8 @@ class Process(Asset): SODA_CHECKS: ClassVar[Any] = None SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Process" + code: Union[str, None, UnsetType] = UNSET """Code that ran within the process.""" @@ -130,6 +135,9 @@ class Process(Asset): ai_dataset_type: Union[str, None, UnsetType] = UNSET """Dataset type for AI Model - dataset process.""" + is_pass_through: Union[bool, None, UnsetType] = UNSET + """Whether this process represents a pass-through data flow where data is moved without transformation, as opposed to a flow where data is actively modified.""" + adf_activity: Union[RelatedAdfActivity, None, UnsetType] = UNSET """ADF Activity that is associated with this lineage process.""" @@ -177,6 +185,11 @@ class Process(Asset): flow_orchestrated_by: Union[RelatedFlowControlOperation, None, UnsetType] = UNSET """Orchestrated control operation that ran these data flows (process).""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -240,66 +253,6 @@ class Process(Asset): def __post_init__(self) -> None: self.type_name = "Process" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Process instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Process validation failed: {errors}") - - def minimize(self) -> "Process": - """ - Return a minimal copy of this Process with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Process with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Process instance with only the minimum required fields. - """ - self.validate() - return Process(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedProcess": - """ - Create a :class:`RelatedProcess` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedProcess reference to this asset. - """ - if self.guid is not UNSET: - return RelatedProcess(guid=self.guid) - return RelatedProcess(qualified_name=self.qualified_name) - @staticmethod def _extract_guid(relationship: Any) -> Union[str, None]: """Extract guid from a relationship-like object.""" @@ -492,6 +445,9 @@ class ProcessAttributes(AssetAttributes): ai_dataset_type: Union[str, None, UnsetType] = UNSET """Dataset type for AI Model - dataset process.""" + is_pass_through: Union[bool, None, UnsetType] = UNSET + """Whether this process represents a pass-through data flow where data is moved without transformation, as opposed to a flow where data is actively modified.""" + class ProcessRelationshipAttributes(AssetRelationshipAttributes): """Process-specific relationship attributes for nested API format.""" @@ -543,6 +499,11 @@ class ProcessRelationshipAttributes(AssetRelationshipAttributes): flow_orchestrated_by: Union[RelatedFlowControlOperation, None, UnsetType] = UNSET """Orchestrated control operation that ran these data flows (process).""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -638,6 +599,7 @@ class ProcessNested(AssetNested): "fabric_activities", "fivetran_connector", "flow_orchestrated_by", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "matillion_component", "mc_monitors", @@ -670,6 +632,7 @@ def _populate_process_attrs(attrs: ProcessAttributes, obj: Process) -> None: attrs.ast = obj.ast attrs.additional_etl_context = obj.additional_etl_context attrs.ai_dataset_type = obj.ai_dataset_type + attrs.is_pass_through = obj.is_pass_through def _extract_process_attrs(attrs: ProcessAttributes) -> dict: @@ -683,6 +646,7 @@ def _extract_process_attrs(attrs: ProcessAttributes) -> dict: result["ast"] = attrs.ast result["additional_etl_context"] = attrs.additional_etl_context result["ai_dataset_type"] = attrs.ai_dataset_type + result["is_pass_through"] = attrs.is_pass_through return result @@ -719,9 +683,6 @@ def _process_to_nested(process: Process) -> ProcessNested: is_incomplete=process.is_incomplete, provenance_type=process.provenance_type, home_id=process.home_id, - depth=process.depth, - immediate_upstream=process.immediate_upstream, - immediate_downstream=process.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -751,6 +712,7 @@ def _process_from_nested(nested: ProcessNested) -> Process: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -759,9 +721,6 @@ def _process_from_nested(nested: ProcessNested) -> Process: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_process_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -782,7 +741,11 @@ def _process_from_nested_bytes(data: bytes, serde: Serde) -> Process: # --------------------------------------------------------------------------- # Deferred field descriptor initialization # --------------------------------------------------------------------------- -from pyatlan.model.fields.atlan_fields import KeywordField, RelationField # noqa: E402 +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + BooleanField, + KeywordField, + RelationField, +) Process.CODE = KeywordField("code", "code") Process.SQL = KeywordField("sql", "sql") @@ -794,6 +757,7 @@ def _process_from_nested_bytes(data: bytes, serde: Serde) -> Process: "additionalEtlContext", "additionalEtlContext" ) Process.AI_DATASET_TYPE = KeywordField("aiDatasetType", "aiDatasetType") +Process.IS_PASS_THROUGH = BooleanField("isPassThrough", "isPassThrough") Process.ADF_ACTIVITY = RelationField("adfActivity") Process.AIRFLOW_TASKS = RelationField("airflowTasks") Process.ANOMALO_CHECKS = RelationField("anomaloChecks") @@ -809,6 +773,9 @@ def _process_from_nested_bytes(data: bytes, serde: Serde) -> Process: Process.FABRIC_ACTIVITIES = RelationField("fabricActivities") Process.FIVETRAN_CONNECTOR = RelationField("fivetranConnector") Process.FLOW_ORCHESTRATED_BY = RelationField("flowOrchestratedBy") +Process.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Process.MEANINGS = RelationField("meanings") Process.MATILLION_COMPONENT = RelationField("matillionComponent") Process.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/process_execution.py b/pyatlan_v9/model/assets/process_execution.py index 5a005537c..82db3171d 100644 --- a/pyatlan_v9/model/assets/process_execution.py +++ b/pyatlan_v9/model/assets/process_execution.py @@ -27,10 +27,10 @@ from .anomalo_related import RelatedAnomaloCheck from .app_related import RelatedApplication, RelatedApplicationField -from .asset_related import RelatedProcessExecution from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable import ( @@ -194,6 +194,11 @@ class ProcessExecution(Referenceable): ASSET_SODA_CHECK_STATUSES: ClassVar[Any] = None ASSET_SODA_SOURCE_URL: ClassVar[Any] = None ASSET_ICON: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_SCORE_VALUE: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_ENTITIES: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_LATEST_SCORES: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_AVG_SCORES: ClassVar[Any] = None + ASSET_EXTERNAL_DQ_TEST_MIN_SCORES: ClassVar[Any] = None ASSET_EXTERNAL_DQ_METADATA_DETAILS: ClassVar[Any] = None IS_PARTIAL: ClassVar[Any] = None IS_AI_GENERATED: ClassVar[Any] = None @@ -258,6 +263,7 @@ class ProcessExecution(Referenceable): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -269,6 +275,8 @@ class ProcessExecution(Referenceable): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ProcessExecution" + name: Union[str, None, UnsetType] = UNSET """Name of this asset. Fallback for display purposes, if displayName is empty.""" @@ -712,6 +720,31 @@ class ProcessExecution(Referenceable): asset_icon: Union[str, None, UnsetType] = UNSET """Name of the icon to use for this asset. (Only applies to glossaries, currently.)""" + asset_external_dq_score_value: Union[float, None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQScoreValue" + ) + """Single asset-level DQ score (0–100). Populated natively by tools that provide one.""" + + asset_external_dq_test_entities: Union[List[str], None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQTestEntities" + ) + """Ordered list of DQ test/scan names on this asset. Positionally aligned with the score metrics.""" + + asset_external_dq_test_latest_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestLatestScores") + ) + """List of scores of the most recent run for each DQ test.""" + + asset_external_dq_test_avg_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestAvgScores") + ) + """List of mean scores across all runs for each DQ test.""" + + asset_external_dq_test_min_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestMinScores") + ) + """List of minimum (floor) score across all runs for each DQ test.""" + asset_external_dq_metadata_details: Union[ Dict[str, Dict[str, Any]], None, UnsetType ] = msgspec.field(default=UNSET, name="assetExternalDQMetadataDetails") @@ -980,6 +1013,11 @@ class ProcessExecution(Referenceable): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1017,66 +1055,6 @@ class ProcessExecution(Referenceable): def __post_init__(self) -> None: self.type_name = "ProcessExecution" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ProcessExecution instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ProcessExecution validation failed: {errors}") - - def minimize(self) -> "ProcessExecution": - """ - Return a minimal copy of this ProcessExecution with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ProcessExecution with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ProcessExecution instance with only the minimum required fields. - """ - self.validate() - return ProcessExecution(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedProcessExecution": - """ - Create a :class:`RelatedProcessExecution` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedProcessExecution reference to this asset. - """ - if self.guid is not UNSET: - return RelatedProcessExecution(guid=self.guid) - return RelatedProcessExecution(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1577,6 +1555,31 @@ class ProcessExecutionAttributes(ReferenceableAttributes): asset_icon: Union[str, None, UnsetType] = UNSET """Name of the icon to use for this asset. (Only applies to glossaries, currently.)""" + asset_external_dq_score_value: Union[float, None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQScoreValue" + ) + """Single asset-level DQ score (0–100). Populated natively by tools that provide one.""" + + asset_external_dq_test_entities: Union[List[str], None, UnsetType] = msgspec.field( + default=UNSET, name="assetExternalDQTestEntities" + ) + """Ordered list of DQ test/scan names on this asset. Positionally aligned with the score metrics.""" + + asset_external_dq_test_latest_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestLatestScores") + ) + """List of scores of the most recent run for each DQ test.""" + + asset_external_dq_test_avg_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestAvgScores") + ) + """List of mean scores across all runs for each DQ test.""" + + asset_external_dq_test_min_scores: Union[List[float], None, UnsetType] = ( + msgspec.field(default=UNSET, name="assetExternalDQTestMinScores") + ) + """List of minimum (floor) score across all runs for each DQ test.""" + asset_external_dq_metadata_details: Union[ Dict[str, Dict[str, Any]], None, UnsetType ] = msgspec.field(default=UNSET, name="assetExternalDQMetadataDetails") @@ -1849,6 +1852,11 @@ class ProcessExecutionRelationshipAttributes(ReferenceableRelationshipAttributes ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1915,6 +1923,7 @@ class ProcessExecutionNested(ReferenceableNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -2110,6 +2119,13 @@ def _populate_process_execution_attrs( attrs.asset_soda_check_statuses = obj.asset_soda_check_statuses attrs.asset_soda_source_url = obj.asset_soda_source_url attrs.asset_icon = obj.asset_icon + attrs.asset_external_dq_score_value = obj.asset_external_dq_score_value + attrs.asset_external_dq_test_entities = obj.asset_external_dq_test_entities + attrs.asset_external_dq_test_latest_scores = ( + obj.asset_external_dq_test_latest_scores + ) + attrs.asset_external_dq_test_avg_scores = obj.asset_external_dq_test_avg_scores + attrs.asset_external_dq_test_min_scores = obj.asset_external_dq_test_min_scores attrs.asset_external_dq_metadata_details = obj.asset_external_dq_metadata_details attrs.is_partial = obj.is_partial attrs.is_ai_generated = obj.is_ai_generated @@ -2384,6 +2400,17 @@ def _extract_process_execution_attrs(attrs: ProcessExecutionAttributes) -> dict: result["asset_soda_check_statuses"] = attrs.asset_soda_check_statuses result["asset_soda_source_url"] = attrs.asset_soda_source_url result["asset_icon"] = attrs.asset_icon + result["asset_external_dq_score_value"] = attrs.asset_external_dq_score_value + result["asset_external_dq_test_entities"] = attrs.asset_external_dq_test_entities + result["asset_external_dq_test_latest_scores"] = ( + attrs.asset_external_dq_test_latest_scores + ) + result["asset_external_dq_test_avg_scores"] = ( + attrs.asset_external_dq_test_avg_scores + ) + result["asset_external_dq_test_min_scores"] = ( + attrs.asset_external_dq_test_min_scores + ) result["asset_external_dq_metadata_details"] = ( attrs.asset_external_dq_metadata_details ) @@ -2502,9 +2529,6 @@ def _process_execution_to_nested( is_incomplete=process_execution.is_incomplete, provenance_type=process_execution.provenance_type, home_id=process_execution.home_id, - depth=process_execution.depth, - immediate_upstream=process_execution.immediate_upstream, - immediate_downstream=process_execution.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -2538,6 +2562,7 @@ def _process_execution_from_nested(nested: ProcessExecutionNested) -> ProcessExe updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -2546,9 +2571,6 @@ def _process_execution_from_nested(nested: ProcessExecutionNested) -> ProcessExe is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_process_execution_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -2932,6 +2954,21 @@ def _process_execution_from_nested_bytes(data: bytes, serde: Serde) -> ProcessEx "assetSodaSourceURL", "assetSodaSourceURL" ) ProcessExecution.ASSET_ICON = KeywordField("assetIcon", "assetIcon") +ProcessExecution.ASSET_EXTERNAL_DQ_SCORE_VALUE = NumericField( + "assetExternalDQScoreValue", "assetExternalDQScoreValue" +) +ProcessExecution.ASSET_EXTERNAL_DQ_TEST_ENTITIES = KeywordField( + "assetExternalDQTestEntities", "assetExternalDQTestEntities" +) +ProcessExecution.ASSET_EXTERNAL_DQ_TEST_LATEST_SCORES = NumericField( + "assetExternalDQTestLatestScores", "assetExternalDQTestLatestScores" +) +ProcessExecution.ASSET_EXTERNAL_DQ_TEST_AVG_SCORES = NumericField( + "assetExternalDQTestAvgScores", "assetExternalDQTestAvgScores" +) +ProcessExecution.ASSET_EXTERNAL_DQ_TEST_MIN_SCORES = NumericField( + "assetExternalDQTestMinScores", "assetExternalDQTestMinScores" +) ProcessExecution.ASSET_EXTERNAL_DQ_METADATA_DETAILS = KeywordField( "assetExternalDQMetadataDetails", "assetExternalDQMetadataDetails" ) @@ -3093,6 +3130,9 @@ def _process_execution_from_nested_bytes(data: bytes, serde: Serde) -> ProcessEx ProcessExecution.METRICS = RelationField("metrics") ProcessExecution.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ProcessExecution.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ProcessExecution.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ProcessExecution.MEANINGS = RelationField("meanings") ProcessExecution.MC_MONITORS = RelationField("mcMonitors") ProcessExecution.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/process_related.py b/pyatlan_v9/model/assets/process_related.py index e274164ae..1f80d5fac 100644 --- a/pyatlan_v9/model/assets/process_related.py +++ b/pyatlan_v9/model/assets/process_related.py @@ -54,6 +54,9 @@ class RelatedProcess(RelatedAsset): ai_dataset_type: Union[str, None, UnsetType] = UNSET """Dataset type for AI Model - dataset process.""" + is_pass_through: Union[bool, None, UnsetType] = UNSET + """Whether this process represents a pass-through data flow where data is moved without transformation, as opposed to a flow where data is actively modified.""" + def __post_init__(self) -> None: RelatedReferenceable.__post_init__(self) self.type_name = "Process" diff --git a/pyatlan_v9/model/assets/purpose.py b/pyatlan_v9/model/assets/purpose.py index 1d2ddbca5..8a6596aab 100644 --- a/pyatlan_v9/model/assets/purpose.py +++ b/pyatlan_v9/model/assets/purpose.py @@ -1,14 +1,21 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# ruff: noqa: ARG002 # SPDX-License-Identifier: Apache-2.0 -# Copyright 2026 Atlan Pte. Ltd. +# Copyright 2024 Atlan Pte. Ltd. -"""Purpose asset model for pyatlan_v9.""" +""" +Purpose asset model with flattened inheritance. + +This module provides: +- Purpose: Flat asset class (easy to use) +- PurposeAttributes: Nested attributes struct (extends AssetAttributes) +- PurposeNested: Nested API format struct +""" from __future__ import annotations -from typing import TYPE_CHECKING, Any, Optional, Set, Union -from warnings import warn +from typing import Any, ClassVar, List, Set, Union -import msgspec from msgspec import UNSET, UnsetType from pyatlan.model.enums import ( @@ -19,53 +26,197 @@ PurposeMetadataAction, ) from pyatlan_v9.model.conversion_utils import ( - build_attributes_kwargs, - build_flat_kwargs, + categorize_relationships, merge_relationships, ) from pyatlan_v9.model.core import AtlanTagName from pyatlan_v9.model.serde import Serde, get_serde -from pyatlan_v9.model.structs import SourceTagAttachment from pyatlan_v9.model.transform import register_asset from pyatlan_v9.utils import init_guid, validate_required_fields -from .asset import Asset, AssetAttributes, AssetNested -from .auth_policy import AuthPolicy +from .access_control_related import RelatedAuthPolicy +from .anomalo_related import RelatedAnomaloCheck +from .app_related import RelatedApplication, RelatedApplicationField +from .asset import ( + _ASSET_REL_FIELDS, + Asset, + AssetAttributes, + AssetNested, + AssetRelationshipAttributes, + _extract_asset_attrs, + _populate_asset_attrs, +) +from .data_contract_related import RelatedDataContract +from .data_mesh_related import RelatedDataProduct +from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType +from .gtc_related import RelatedAtlasGlossaryTerm +from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor +from .referenceable_related import RelatedReferenceable +from .resource_related import RelatedFile, RelatedLink, RelatedReadme +from .schema_registry_related import RelatedSchemaRegistrySubject +from .soda_related import RelatedSodaCheck + +# ============================================================================= +# FLAT ASSET CLASS +# ============================================================================= -if TYPE_CHECKING: - from pyatlan_v9.client.atlan import AtlanClient +@register_asset +class Purpose(Asset): + """ + Atlan Type representing a Purpose model + """ + + PURPOSE_CLASSIFICATIONS: ClassVar[Any] = None + CHANNEL_LINK: ClassVar[Any] = None + DEFAULT_NAVIGATION: ClassVar[Any] = None + DENY_ASSET_FILTERS: ClassVar[Any] = None + DENY_ASSET_METADATA_TYPES: ClassVar[Any] = None + DENY_ASSET_TABS: ClassVar[Any] = None + DENY_ASSET_TYPES: ClassVar[Any] = None + DENY_CUSTOM_METADATA_GUIDS: ClassVar[Any] = None + DENY_NAVIGATION_PAGES: ClassVar[Any] = None + DENY_SIDEBAR_TABS: ClassVar[Any] = None + DISPLAY_PREFERENCES: ClassVar[Any] = None + IS_ACCESS_CONTROL_ENABLED: ClassVar[Any] = None + POLICIES: ClassVar[Any] = None + ANOMALO_CHECKS: ClassVar[Any] = None + APPLICATION: ClassVar[Any] = None + APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None + OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + METRICS: ClassVar[Any] = None + DQ_BASE_DATASET_RULES: ClassVar[Any] = None + DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None + MEANINGS: ClassVar[Any] = None + MC_MONITORS: ClassVar[Any] = None + MC_INCIDENTS: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_TO: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_FROM: ClassVar[Any] = None + FILES: ClassVar[Any] = None + LINKS: ClassVar[Any] = None + README: ClassVar[Any] = None + SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None + SODA_CHECKS: ClassVar[Any] = None -class PurposeClassification(msgspec.Struct, kw_only=True, rename="camel"): - """Classification view used by Purpose to retain source-tag attachments.""" + type_name: Union[str, UnsetType] = "Purpose" - type_name: Any = None - source_tag_attachments: list[SourceTagAttachment] = msgspec.field( - default_factory=list - ) - entity_status: Union[str, None] = None + purpose_classifications: Union[List[str], None, UnsetType] = UNSET + """TBC""" + channel_link: Union[str, None, UnsetType] = UNSET + """TBC""" -@register_asset -class Purpose(Asset): - """Purpose asset in Atlan.""" + default_navigation: Union[str, None, UnsetType] = UNSET + """TBC""" - type_name: Union[str, UnsetType] = "Purpose" + deny_asset_filters: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_metadata_types: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_tabs: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_types: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_custom_metadata_guids: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_navigation_pages: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_sidebar_tabs: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + display_preferences: Union[List[str], None, UnsetType] = UNSET + """TBC""" is_access_control_enabled: Union[bool, None, UnsetType] = UNSET - deny_custom_metadata_guids: Union[Set[str], None, UnsetType] = UNSET - deny_asset_tabs: Union[Set[str], None, UnsetType] = UNSET - deny_asset_filters: Union[Set[str], None, UnsetType] = UNSET - deny_asset_types: Union[Set[str], None, UnsetType] = UNSET - deny_sidebar_tabs: Union[Set[str], None, UnsetType] = UNSET - deny_navigation_pages: Union[Set[str], None, UnsetType] = UNSET - default_navigation: Union[str, None, UnsetType] = UNSET - display_preferences: Union[Set[str], None, UnsetType] = UNSET - channel_link: Union[str, None, UnsetType] = UNSET - deny_asset_metadata_types: Union[Set[str], None, UnsetType] = UNSET - policies: Union[list[AuthPolicy], None, UnsetType] = UNSET - purpose_classifications: Union[list[Any], None, UnsetType] = UNSET - classifications: Union[list[PurposeClassification], None, UnsetType] = UNSET + """TBC""" + + policies: Union[List[RelatedAuthPolicy], None, UnsetType] = UNSET + """Access control entity to which this policy applies.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = ( + UNSET + ) + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = ( + UNSET + ) + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[ + List[RelatedSchemaRegistrySubject], None, UnsetType + ] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + def __post_init__(self) -> None: + self.type_name = "Purpose" @property def purpose_atlan_tags(self) -> Union[list[AtlanTagName], None]: @@ -248,19 +399,46 @@ def trim_to_required(self) -> "Purpose": """Return only required fields for updates.""" return Purpose.updater(qualified_name=self.qualified_name, name=self.name) + # ========================================================================= + # Optimized Serialization Methods (override Asset base class) + # ========================================================================= + def to_json(self, nested: bool = True, serde: Serde | None = None) -> str: - """Serialize the Purpose to JSON.""" + """ + Convert to JSON string using optimized nested struct serialization. + + Args: + nested: If True (default), use nested API format. If False, use flat format. + serde: Optional Serde instance for encoder reuse. Uses shared singleton if None. + + Returns: + JSON string representation + """ if serde is None: serde = get_serde() if nested: - return _purpose_to_nested_bytes(self, serde).decode("utf-8") - return serde.encode(self).decode("utf-8") + return self.to_nested_bytes(serde).decode("utf-8") + else: + return serde.encode(self).decode("utf-8") + + def to_nested_bytes(self, serde: Serde | None = None) -> bytes: + """Serialize to Atlas nested-format JSON bytes (pure msgspec, no dict intermediate).""" + if serde is None: + serde = get_serde() + return _purpose_to_nested_bytes(self, serde) @staticmethod - def from_json( - json_data: Union[str, bytes], serde: Serde | None = None - ) -> "Purpose": - """Deserialize a Purpose from nested API JSON.""" + def from_json(json_data: str | bytes, serde: Serde | None = None) -> Purpose: + """ + Create from JSON string or bytes using optimized nested struct deserialization. + + Args: + json_data: JSON string or bytes to deserialize + serde: Optional Serde instance for decoder reuse. Uses shared singleton if None. + + Returns: + Purpose instance + """ if isinstance(json_data, str): json_data = json_data.encode("utf-8") if serde is None: @@ -268,32 +446,221 @@ def from_json( return _purpose_from_nested_bytes(json_data, serde) +# ============================================================================= +# NESTED FORMAT CLASSES +# ============================================================================= + + class PurposeAttributes(AssetAttributes): - """Purpose-specific nested attributes.""" + """Purpose-specific attributes for nested API format.""" + + purpose_classifications: Union[List[str], None, UnsetType] = UNSET + """TBC""" - is_access_control_enabled: Union[bool, None, UnsetType] = UNSET - deny_custom_metadata_guids: Union[Set[str], None, UnsetType] = UNSET - deny_asset_tabs: Union[Set[str], None, UnsetType] = UNSET - deny_asset_filters: Union[Set[str], None, UnsetType] = UNSET - deny_asset_types: Union[Set[str], None, UnsetType] = UNSET - deny_sidebar_tabs: Union[Set[str], None, UnsetType] = UNSET - deny_navigation_pages: Union[Set[str], None, UnsetType] = UNSET - default_navigation: Union[str, None, UnsetType] = UNSET - display_preferences: Union[Set[str], None, UnsetType] = UNSET channel_link: Union[str, None, UnsetType] = UNSET - deny_asset_metadata_types: Union[Set[str], None, UnsetType] = UNSET - purpose_classifications: Union[list[Any], None, UnsetType] = UNSET + """TBC""" + + default_navigation: Union[str, None, UnsetType] = UNSET + """TBC""" + + deny_asset_filters: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_metadata_types: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_tabs: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_asset_types: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_custom_metadata_guids: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_navigation_pages: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + deny_sidebar_tabs: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + display_preferences: Union[List[str], None, UnsetType] = UNSET + """TBC""" + + is_access_control_enabled: Union[bool, None, UnsetType] = UNSET + """TBC""" + + +class PurposeRelationshipAttributes(AssetRelationshipAttributes): + """Purpose-specific relationship attributes for nested API format.""" + + policies: Union[List[RelatedAuthPolicy], None, UnsetType] = UNSET + """Access control entity to which this policy applies.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = ( + UNSET + ) + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = ( + UNSET + ) + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[ + List[RelatedSchemaRegistrySubject], None, UnsetType + ] = UNSET + """Schema registry subjects associated with this asset.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" class PurposeNested(AssetNested): - """Purpose entity in nested API format.""" + """Purpose in nested API format for high-performance serialization.""" attributes: Union[PurposeAttributes, UnsetType] = UNSET + relationship_attributes: Union[PurposeRelationshipAttributes, UnsetType] = UNSET + append_relationship_attributes: Union[PurposeRelationshipAttributes, UnsetType] = ( + UNSET + ) + remove_relationship_attributes: Union[PurposeRelationshipAttributes, UnsetType] = ( + UNSET + ) + + +# ============================================================================= +# CONVERSION HELPERS & CONSTANTS +# ============================================================================= + +_PURPOSE_REL_FIELDS: List[str] = [ + *_ASSET_REL_FIELDS, + "policies", + "anomalo_checks", + "application", + "application_field", + "data_contract_latest", + "data_contract_latest_certified", + "output_port_data_products", + "input_port_data_products", + "metrics", + "dq_base_dataset_rules", + "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", + "meanings", + "mc_monitors", + "mc_incidents", + "user_def_relationship_to", + "user_def_relationship_from", + "files", + "links", + "readme", + "schema_registry_subjects", + "soda_checks", +] + + +def _populate_purpose_attrs(attrs: PurposeAttributes, obj: Purpose) -> None: + """Populate Purpose-specific attributes on the attrs struct.""" + _populate_asset_attrs(attrs, obj) + attrs.purpose_classifications = obj.purpose_classifications + attrs.channel_link = obj.channel_link + attrs.default_navigation = obj.default_navigation + attrs.deny_asset_filters = obj.deny_asset_filters + attrs.deny_asset_metadata_types = obj.deny_asset_metadata_types + attrs.deny_asset_tabs = obj.deny_asset_tabs + attrs.deny_asset_types = obj.deny_asset_types + attrs.deny_custom_metadata_guids = obj.deny_custom_metadata_guids + attrs.deny_navigation_pages = obj.deny_navigation_pages + attrs.deny_sidebar_tabs = obj.deny_sidebar_tabs + attrs.display_preferences = obj.display_preferences + attrs.is_access_control_enabled = obj.is_access_control_enabled + + +def _extract_purpose_attrs(attrs: PurposeAttributes) -> dict: + """Extract all Purpose attributes from the attrs struct into a flat dict.""" + result = _extract_asset_attrs(attrs) + result["purpose_classifications"] = attrs.purpose_classifications + result["channel_link"] = attrs.channel_link + result["default_navigation"] = attrs.default_navigation + result["deny_asset_filters"] = attrs.deny_asset_filters + result["deny_asset_metadata_types"] = attrs.deny_asset_metadata_types + result["deny_asset_tabs"] = attrs.deny_asset_tabs + result["deny_asset_types"] = attrs.deny_asset_types + result["deny_custom_metadata_guids"] = attrs.deny_custom_metadata_guids + result["deny_navigation_pages"] = attrs.deny_navigation_pages + result["deny_sidebar_tabs"] = attrs.deny_sidebar_tabs + result["display_preferences"] = attrs.display_preferences + result["is_access_control_enabled"] = attrs.is_access_control_enabled + return result + + +# ============================================================================= +# CONVERSION FUNCTIONS +# ============================================================================= def _purpose_to_nested(purpose: Purpose) -> PurposeNested: - attrs_kwargs = build_attributes_kwargs(purpose, PurposeAttributes) - attrs = PurposeAttributes(**attrs_kwargs) + """Convert flat Purpose to nested format.""" + attrs = PurposeAttributes() + _populate_purpose_attrs(attrs, purpose) + # Categorize relationships by save semantic (REPLACE, APPEND, REMOVE) + replace_rels, append_rels, remove_rels = categorize_relationships( + purpose, _PURPOSE_REL_FIELDS, PurposeRelationshipAttributes + ) return PurposeNested( guid=purpose.guid, type_name=purpose.type_name, @@ -315,39 +682,112 @@ def _purpose_to_nested(purpose: Purpose) -> PurposeNested: provenance_type=purpose.provenance_type, home_id=purpose.home_id, attributes=attrs, + relationship_attributes=replace_rels, + append_relationship_attributes=append_rels, + remove_relationship_attributes=remove_rels, ) def _purpose_from_nested(nested: PurposeNested) -> Purpose: + """Convert nested format to flat Purpose.""" attrs = nested.attributes if nested.attributes is not UNSET else PurposeAttributes() + # Merge relationships from all three buckets merged_rels = merge_relationships( nested.relationship_attributes, nested.append_relationship_attributes, nested.remove_relationship_attributes, - [], - object, + _PURPOSE_REL_FIELDS, + PurposeRelationshipAttributes, ) - kwargs = build_flat_kwargs( - nested, attrs, merged_rels, AssetNested, PurposeAttributes + return Purpose( + guid=nested.guid, + type_name=nested.type_name, + status=nested.status, + version=nested.version, + create_time=nested.create_time, + update_time=nested.update_time, + created_by=nested.created_by, + updated_by=nested.updated_by, + classifications=nested.classifications, + classification_names=nested.classification_names, + meanings=nested.meanings, + labels=nested.labels, + business_attributes=nested.business_attributes, + custom_attributes=nested.custom_attributes, + pending_tasks=nested.pending_tasks, + proxy=nested.proxy, + is_incomplete=nested.is_incomplete, + provenance_type=nested.provenance_type, + home_id=nested.home_id, + **_extract_purpose_attrs(attrs), + # Merged relationship attributes + **merged_rels, ) - purpose = Purpose(**kwargs) - if ( - purpose.classifications is not UNSET - and purpose.classifications is not None - and purpose.classifications - and isinstance(purpose.classifications[0], dict) - ): - purpose.classifications = [ - msgspec.convert(classification, type=PurposeClassification) - for classification in purpose.classifications - ] - return purpose def _purpose_to_nested_bytes(purpose: Purpose, serde: Serde) -> bytes: + """Convert flat Purpose to nested JSON bytes.""" return serde.encode(_purpose_to_nested(purpose)) def _purpose_from_nested_bytes(data: bytes, serde: Serde) -> Purpose: + """Convert nested JSON bytes to flat Purpose.""" nested = serde.decode(data, PurposeNested) return _purpose_from_nested(nested) + + +# --------------------------------------------------------------------------- +# Deferred field descriptor initialization +# --------------------------------------------------------------------------- +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + BooleanField, + KeywordField, + RelationField, +) + +Purpose.PURPOSE_CLASSIFICATIONS = KeywordField( + "purposeClassifications", "purposeClassifications" +) +Purpose.CHANNEL_LINK = KeywordField("channelLink", "channelLink") +Purpose.DEFAULT_NAVIGATION = KeywordField("defaultNavigation", "defaultNavigation") +Purpose.DENY_ASSET_FILTERS = KeywordField("denyAssetFilters", "denyAssetFilters") +Purpose.DENY_ASSET_METADATA_TYPES = KeywordField( + "denyAssetMetadataTypes", "denyAssetMetadataTypes" +) +Purpose.DENY_ASSET_TABS = KeywordField("denyAssetTabs", "denyAssetTabs") +Purpose.DENY_ASSET_TYPES = KeywordField("denyAssetTypes", "denyAssetTypes") +Purpose.DENY_CUSTOM_METADATA_GUIDS = KeywordField( + "denyCustomMetadataGuids", "denyCustomMetadataGuids" +) +Purpose.DENY_NAVIGATION_PAGES = KeywordField( + "denyNavigationPages", "denyNavigationPages" +) +Purpose.DENY_SIDEBAR_TABS = KeywordField("denySidebarTabs", "denySidebarTabs") +Purpose.DISPLAY_PREFERENCES = KeywordField("displayPreferences", "displayPreferences") +Purpose.IS_ACCESS_CONTROL_ENABLED = BooleanField( + "isAccessControlEnabled", "isAccessControlEnabled" +) +Purpose.POLICIES = RelationField("policies") +Purpose.ANOMALO_CHECKS = RelationField("anomaloChecks") +Purpose.APPLICATION = RelationField("application") +Purpose.APPLICATION_FIELD = RelationField("applicationField") +Purpose.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +Purpose.DATA_CONTRACT_LATEST_CERTIFIED = RelationField("dataContractLatestCertified") +Purpose.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") +Purpose.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") +Purpose.METRICS = RelationField("metrics") +Purpose.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") +Purpose.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Purpose.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) +Purpose.MEANINGS = RelationField("meanings") +Purpose.MC_MONITORS = RelationField("mcMonitors") +Purpose.MC_INCIDENTS = RelationField("mcIncidents") +Purpose.USER_DEF_RELATIONSHIP_TO = RelationField("userDefRelationshipTo") +Purpose.USER_DEF_RELATIONSHIP_FROM = RelationField("userDefRelationshipFrom") +Purpose.FILES = RelationField("files") +Purpose.LINKS = RelationField("links") +Purpose.README = RelationField("readme") +Purpose.SCHEMA_REGISTRY_SUBJECTS = RelationField("schemaRegistrySubjects") +Purpose.SODA_CHECKS = RelationField("sodaChecks") diff --git a/pyatlan_v9/model/assets/qlik.py b/pyatlan_v9/model/assets/qlik.py index 774fef923..37ee82f0f 100644 --- a/pyatlan_v9/model/assets/qlik.py +++ b/pyatlan_v9/model/assets/qlik.py @@ -41,12 +41,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .qlik_related import RelatedQlik from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -87,6 +87,7 @@ class Qlik(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -104,6 +105,8 @@ class Qlik(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Qlik" + qlik_id: Union[str, None, UnsetType] = UNSET """Identifier of this asset, from Qlik.""" @@ -177,6 +180,11 @@ class Qlik(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -232,66 +240,6 @@ class Qlik(Asset): def __post_init__(self) -> None: self.type_name = "Qlik" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Qlik instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Qlik validation failed: {errors}") - - def minimize(self) -> "Qlik": - """ - Return a minimal copy of this Qlik with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Qlik with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Qlik instance with only the minimum required fields. - """ - self.validate() - return Qlik(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQlik": - """ - Create a :class:`RelatedQlik` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQlik reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQlik(guid=self.guid) - return RelatedQlik(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -424,6 +372,11 @@ class QlikRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -506,6 +459,7 @@ class QlikNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -587,9 +541,6 @@ def _qlik_to_nested(qlik: Qlik) -> QlikNested: is_incomplete=qlik.is_incomplete, provenance_type=qlik.provenance_type, home_id=qlik.home_id, - depth=qlik.depth, - immediate_upstream=qlik.immediate_upstream, - immediate_downstream=qlik.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -619,6 +570,7 @@ def _qlik_from_nested(nested: QlikNested) -> Qlik: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -627,9 +579,6 @@ def _qlik_from_nested(nested: QlikNested) -> Qlik: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_qlik_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -684,6 +633,9 @@ def _qlik_from_nested_bytes(data: bytes, serde: Serde) -> Qlik: Qlik.METRICS = RelationField("metrics") Qlik.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Qlik.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Qlik.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Qlik.MEANINGS = RelationField("meanings") Qlik.MC_MONITORS = RelationField("mcMonitors") Qlik.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/qlik_app.py b/pyatlan_v9/model/assets/qlik_app.py index 4078e5b19..6cff4ab2f 100644 --- a/pyatlan_v9/model/assets/qlik_app.py +++ b/pyatlan_v9/model/assets/qlik_app.py @@ -42,12 +42,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .qlik_related import RelatedQlikApp, RelatedQlikSheet, RelatedQlikSpace +from .qlik_related import RelatedQlikSheet, RelatedQlikSpace from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -93,6 +94,7 @@ class QlikApp(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -112,6 +114,8 @@ class QlikApp(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "QlikApp" + qlik_has_section_access: Union[bool, None, UnsetType] = UNSET """Whether section access or data masking is enabled on the source (true) or not (false).""" @@ -200,6 +204,11 @@ class QlikApp(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -267,74 +276,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QlikApp instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.qlik_space is UNSET: - errors.append("qlik_space is required for creation") - if self.qlik_space_qualified_name is UNSET: - errors.append("qlik_space_qualified_name is required for creation") - if errors: - raise ValueError(f"QlikApp validation failed: {errors}") - - def minimize(self) -> "QlikApp": - """ - Return a minimal copy of this QlikApp with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QlikApp with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QlikApp instance with only the minimum required fields. - """ - self.validate() - return QlikApp(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQlikApp": - """ - Create a :class:`RelatedQlikApp` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQlikApp reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQlikApp(guid=self.guid) - return RelatedQlikApp(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -482,6 +423,11 @@ class QlikAppRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -574,6 +520,7 @@ class QlikAppNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -667,9 +614,6 @@ def _qlik_app_to_nested(qlik_app: QlikApp) -> QlikAppNested: is_incomplete=qlik_app.is_incomplete, provenance_type=qlik_app.provenance_type, home_id=qlik_app.home_id, - depth=qlik_app.depth, - immediate_upstream=qlik_app.immediate_upstream, - immediate_downstream=qlik_app.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -699,6 +643,7 @@ def _qlik_app_from_nested(nested: QlikAppNested) -> QlikApp: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -707,9 +652,6 @@ def _qlik_app_from_nested(nested: QlikAppNested) -> QlikApp: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_qlik_app_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -776,6 +718,9 @@ def _qlik_app_from_nested_bytes(data: bytes, serde: Serde) -> QlikApp: QlikApp.METRICS = RelationField("metrics") QlikApp.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") QlikApp.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +QlikApp.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) QlikApp.MEANINGS = RelationField("meanings") QlikApp.MC_MONITORS = RelationField("mcMonitors") QlikApp.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/qlik_chart.py b/pyatlan_v9/model/assets/qlik_chart.py index 6515d9b4c..cf415705d 100644 --- a/pyatlan_v9/model/assets/qlik_chart.py +++ b/pyatlan_v9/model/assets/qlik_chart.py @@ -42,12 +42,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .qlik_related import RelatedQlikChart, RelatedQlikColumn, RelatedQlikSheet +from .qlik_related import RelatedQlikColumn, RelatedQlikSheet from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -67,8 +68,8 @@ class QlikChart(Asset): QLIK_CHART_SUBTITLE: ClassVar[Any] = None QLIK_CHART_FOOTNOTE: ClassVar[Any] = None - QLIK_CHART_ORIENTATION: ClassVar[Any] = None - QLIK_CHART_TYPE: ClassVar[Any] = None + QLIK_ORIENTATION: ClassVar[Any] = None + QLIK_TYPE: ClassVar[Any] = None QLIK_ID: ClassVar[Any] = None QLIK_QRI: ClassVar[Any] = None QLIK_SPACE_ID: ClassVar[Any] = None @@ -92,6 +93,7 @@ class QlikChart(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -111,16 +113,18 @@ class QlikChart(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "QlikChart" + qlik_chart_subtitle: Union[str, None, UnsetType] = UNSET """Subtitle of this chart.""" qlik_chart_footnote: Union[str, None, UnsetType] = UNSET """Footnote of this chart.""" - qlik_chart_orientation: Union[str, None, UnsetType] = UNSET + qlik_orientation: Union[str, None, UnsetType] = UNSET """Orientation of this chart.""" - qlik_chart_type: Union[str, None, UnsetType] = UNSET + qlik_type: Union[str, None, UnsetType] = UNSET """Subtype of this chart, for example: bar, graph, pie, etc.""" qlik_id: Union[str, None, UnsetType] = UNSET @@ -196,6 +200,11 @@ class QlikChart(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -265,76 +274,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QlikChart instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.qlik_sheet is UNSET: - errors.append("qlik_sheet is required for creation") - if self.qlik_app_qualified_name is UNSET: - errors.append("qlik_app_qualified_name is required for creation") - if self.qlik_space_qualified_name is UNSET: - errors.append("qlik_space_qualified_name is required for creation") - if errors: - raise ValueError(f"QlikChart validation failed: {errors}") - - def minimize(self) -> "QlikChart": - """ - Return a minimal copy of this QlikChart with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QlikChart with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QlikChart instance with only the minimum required fields. - """ - self.validate() - return QlikChart(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQlikChart": - """ - Create a :class:`RelatedQlikChart` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQlikChart reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQlikChart(guid=self.guid) - return RelatedQlikChart(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -396,10 +335,10 @@ class QlikChartAttributes(AssetAttributes): qlik_chart_footnote: Union[str, None, UnsetType] = UNSET """Footnote of this chart.""" - qlik_chart_orientation: Union[str, None, UnsetType] = UNSET + qlik_orientation: Union[str, None, UnsetType] = UNSET """Orientation of this chart.""" - qlik_chart_type: Union[str, None, UnsetType] = UNSET + qlik_type: Union[str, None, UnsetType] = UNSET """Subtype of this chart, for example: bar, graph, pie, etc.""" qlik_id: Union[str, None, UnsetType] = UNSET @@ -479,6 +418,11 @@ class QlikChartRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -571,6 +515,7 @@ class QlikChartNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -597,8 +542,8 @@ def _populate_qlik_chart_attrs(attrs: QlikChartAttributes, obj: QlikChart) -> No _populate_asset_attrs(attrs, obj) attrs.qlik_chart_subtitle = obj.qlik_chart_subtitle attrs.qlik_chart_footnote = obj.qlik_chart_footnote - attrs.qlik_chart_orientation = obj.qlik_chart_orientation - attrs.qlik_chart_type = obj.qlik_chart_type + attrs.qlik_orientation = obj.qlik_orientation + attrs.qlik_type = obj.qlik_type attrs.qlik_id = obj.qlik_id attrs.qlik_qri = obj.qlik_qri attrs.qlik_space_id = obj.qlik_space_id @@ -615,8 +560,8 @@ def _extract_qlik_chart_attrs(attrs: QlikChartAttributes) -> dict: result = _extract_asset_attrs(attrs) result["qlik_chart_subtitle"] = attrs.qlik_chart_subtitle result["qlik_chart_footnote"] = attrs.qlik_chart_footnote - result["qlik_chart_orientation"] = attrs.qlik_chart_orientation - result["qlik_chart_type"] = attrs.qlik_chart_type + result["qlik_orientation"] = attrs.qlik_orientation + result["qlik_type"] = attrs.qlik_type result["qlik_id"] = attrs.qlik_id result["qlik_qri"] = attrs.qlik_qri result["qlik_space_id"] = attrs.qlik_space_id @@ -662,9 +607,6 @@ def _qlik_chart_to_nested(qlik_chart: QlikChart) -> QlikChartNested: is_incomplete=qlik_chart.is_incomplete, provenance_type=qlik_chart.provenance_type, home_id=qlik_chart.home_id, - depth=qlik_chart.depth, - immediate_upstream=qlik_chart.immediate_upstream, - immediate_downstream=qlik_chart.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -696,6 +638,7 @@ def _qlik_chart_from_nested(nested: QlikChartNested) -> QlikChart: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -704,9 +647,6 @@ def _qlik_chart_from_nested(nested: QlikChartNested) -> QlikChart: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_qlik_chart_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -736,10 +676,8 @@ def _qlik_chart_from_nested_bytes(data: bytes, serde: Serde) -> QlikChart: QlikChart.QLIK_CHART_SUBTITLE = KeywordField("qlikChartSubtitle", "qlikChartSubtitle") QlikChart.QLIK_CHART_FOOTNOTE = KeywordField("qlikChartFootnote", "qlikChartFootnote") -QlikChart.QLIK_CHART_ORIENTATION = KeywordField( - "qlikChartOrientation", "qlikChartOrientation" -) -QlikChart.QLIK_CHART_TYPE = KeywordField("qlikChartType", "qlikChartType") +QlikChart.QLIK_ORIENTATION = KeywordField("qlikOrientation", "qlikOrientation") +QlikChart.QLIK_TYPE = KeywordField("qlikType", "qlikType") QlikChart.QLIK_ID = KeywordField("qlikId", "qlikId") QlikChart.QLIK_QRI = KeywordTextField("qlikQRI", "qlikQRI", "qlikQRI.text") QlikChart.QLIK_SPACE_ID = KeywordField("qlikSpaceId", "qlikSpaceId") @@ -769,6 +707,9 @@ def _qlik_chart_from_nested_bytes(data: bytes, serde: Serde) -> QlikChart: QlikChart.METRICS = RelationField("metrics") QlikChart.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") QlikChart.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +QlikChart.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) QlikChart.MEANINGS = RelationField("meanings") QlikChart.MC_MONITORS = RelationField("mcMonitors") QlikChart.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/qlik_column.py b/pyatlan_v9/model/assets/qlik_column.py index b961fb3ec..94afd2ea2 100644 --- a/pyatlan_v9/model/assets/qlik_column.py +++ b/pyatlan_v9/model/assets/qlik_column.py @@ -42,17 +42,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .qlik_related import ( - RelatedQlikChart, - RelatedQlikColumn, - RelatedQlikDataset, - RelatedQlikSheet, -) +from .qlik_related import RelatedQlikChart, RelatedQlikDataset, RelatedQlikSheet from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -97,6 +93,7 @@ class QlikColumn(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -117,6 +114,8 @@ class QlikColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "QlikColumn" + qlik_column_name: Union[str, None, UnsetType] = UNSET """Qlik Column name.""" @@ -202,6 +201,11 @@ class QlikColumn(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -274,76 +278,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QlikColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.qlik_sheet is UNSET: - errors.append("qlik_sheet is required for creation") - if self.qlik_app_qualified_name is UNSET: - errors.append("qlik_app_qualified_name is required for creation") - if self.qlik_space_qualified_name is UNSET: - errors.append("qlik_space_qualified_name is required for creation") - if errors: - raise ValueError(f"QlikColumn validation failed: {errors}") - - def minimize(self) -> "QlikColumn": - """ - Return a minimal copy of this QlikColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QlikColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QlikColumn instance with only the minimum required fields. - """ - self.validate() - return QlikColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQlikColumn": - """ - Create a :class:`RelatedQlikColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQlikColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQlikColumn(guid=self.guid) - return RelatedQlikColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -488,6 +422,11 @@ class QlikColumnRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -583,6 +522,7 @@ class QlikColumnNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -675,9 +615,6 @@ def _qlik_column_to_nested(qlik_column: QlikColumn) -> QlikColumnNested: is_incomplete=qlik_column.is_incomplete, provenance_type=qlik_column.provenance_type, home_id=qlik_column.home_id, - depth=qlik_column.depth, - immediate_upstream=qlik_column.immediate_upstream, - immediate_downstream=qlik_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -709,6 +646,7 @@ def _qlik_column_from_nested(nested: QlikColumnNested) -> QlikColumn: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -717,9 +655,6 @@ def _qlik_column_from_nested(nested: QlikColumnNested) -> QlikColumn: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_qlik_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -782,6 +717,9 @@ def _qlik_column_from_nested_bytes(data: bytes, serde: Serde) -> QlikColumn: QlikColumn.METRICS = RelationField("metrics") QlikColumn.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") QlikColumn.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +QlikColumn.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) QlikColumn.MEANINGS = RelationField("meanings") QlikColumn.MC_MONITORS = RelationField("mcMonitors") QlikColumn.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/qlik_dataset.py b/pyatlan_v9/model/assets/qlik_dataset.py index 7f6265f83..bc760aeeb 100644 --- a/pyatlan_v9/model/assets/qlik_dataset.py +++ b/pyatlan_v9/model/assets/qlik_dataset.py @@ -42,12 +42,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .qlik_related import RelatedQlikColumn, RelatedQlikDataset, RelatedQlikSpace +from .qlik_related import RelatedQlikColumn, RelatedQlikSpace from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -93,6 +94,7 @@ class QlikDataset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -112,6 +114,8 @@ class QlikDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "QlikDataset" + qlik_dataset_technical_name: Union[str, None, UnsetType] = UNSET """Technical name of this asset.""" @@ -200,6 +204,11 @@ class QlikDataset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -267,74 +276,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QlikDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.qlik_space is UNSET: - errors.append("qlik_space is required for creation") - if self.qlik_space_qualified_name is UNSET: - errors.append("qlik_space_qualified_name is required for creation") - if errors: - raise ValueError(f"QlikDataset validation failed: {errors}") - - def minimize(self) -> "QlikDataset": - """ - Return a minimal copy of this QlikDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QlikDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QlikDataset instance with only the minimum required fields. - """ - self.validate() - return QlikDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQlikDataset": - """ - Create a :class:`RelatedQlikDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQlikDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQlikDataset(guid=self.guid) - return RelatedQlikDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -482,6 +423,11 @@ class QlikDatasetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -574,6 +520,7 @@ class QlikDatasetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -669,9 +616,6 @@ def _qlik_dataset_to_nested(qlik_dataset: QlikDataset) -> QlikDatasetNested: is_incomplete=qlik_dataset.is_incomplete, provenance_type=qlik_dataset.provenance_type, home_id=qlik_dataset.home_id, - depth=qlik_dataset.depth, - immediate_upstream=qlik_dataset.immediate_upstream, - immediate_downstream=qlik_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -703,6 +647,7 @@ def _qlik_dataset_from_nested(nested: QlikDatasetNested) -> QlikDataset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -711,9 +656,6 @@ def _qlik_dataset_from_nested(nested: QlikDatasetNested) -> QlikDataset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_qlik_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -785,6 +727,9 @@ def _qlik_dataset_from_nested_bytes(data: bytes, serde: Serde) -> QlikDataset: QlikDataset.METRICS = RelationField("metrics") QlikDataset.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") QlikDataset.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +QlikDataset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) QlikDataset.MEANINGS = RelationField("meanings") QlikDataset.MC_MONITORS = RelationField("mcMonitors") QlikDataset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/qlik_related.py b/pyatlan_v9/model/assets/qlik_related.py index c22f845a3..d84c99c01 100644 --- a/pyatlan_v9/model/assets/qlik_related.py +++ b/pyatlan_v9/model/assets/qlik_related.py @@ -86,10 +86,10 @@ class RelatedQlikChart(RelatedQlik): qlik_chart_footnote: Union[str, None, UnsetType] = UNSET """Footnote of this chart.""" - qlik_chart_orientation: Union[str, None, UnsetType] = UNSET + qlik_orientation: Union[str, None, UnsetType] = UNSET """Orientation of this chart.""" - qlik_chart_type: Union[str, None, UnsetType] = UNSET + qlik_type: Union[str, None, UnsetType] = UNSET """Subtype of this chart, for example: bar, graph, pie, etc.""" def __post_init__(self) -> None: @@ -107,7 +107,7 @@ class RelatedQlikSheet(RelatedQlik): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "QlikSheet" so it serializes correctly - qlik_sheet_is_approved: Union[bool, None, UnsetType] = UNSET + qlik_is_approved: Union[bool, None, UnsetType] = UNSET """Whether this is approved (true) or not (false).""" def __post_init__(self) -> None: @@ -125,7 +125,7 @@ class RelatedQlikSpace(RelatedQlik): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "QlikSpace" so it serializes correctly - qlik_space_type: Union[str, None, UnsetType] = UNSET + qlik_type: Union[str, None, UnsetType] = UNSET """Type of this space, for exmaple: Private, Shared, etc.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/qlik_sheet.py b/pyatlan_v9/model/assets/qlik_sheet.py index 503a83470..1b70d7f4e 100644 --- a/pyatlan_v9/model/assets/qlik_sheet.py +++ b/pyatlan_v9/model/assets/qlik_sheet.py @@ -42,17 +42,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .qlik_related import ( - RelatedQlikApp, - RelatedQlikChart, - RelatedQlikColumn, - RelatedQlikSheet, -) +from .qlik_related import RelatedQlikApp, RelatedQlikChart, RelatedQlikColumn from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -70,7 +66,7 @@ class QlikSheet(Asset): Instance of a Qlik sheet in Atlan. """ - QLIK_SHEET_IS_APPROVED: ClassVar[Any] = None + QLIK_IS_APPROVED: ClassVar[Any] = None QLIK_ID: ClassVar[Any] = None QLIK_QRI: ClassVar[Any] = None QLIK_SPACE_ID: ClassVar[Any] = None @@ -94,6 +90,7 @@ class QlikSheet(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -114,7 +111,9 @@ class QlikSheet(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - qlik_sheet_is_approved: Union[bool, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "QlikSheet" + + qlik_is_approved: Union[bool, None, UnsetType] = UNSET """Whether this is approved (true) or not (false).""" qlik_id: Union[str, None, UnsetType] = UNSET @@ -190,6 +189,11 @@ class QlikSheet(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -262,76 +266,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QlikSheet instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.qlik_app is UNSET: - errors.append("qlik_app is required for creation") - if self.qlik_app_qualified_name is UNSET: - errors.append("qlik_app_qualified_name is required for creation") - if self.qlik_space_qualified_name is UNSET: - errors.append("qlik_space_qualified_name is required for creation") - if errors: - raise ValueError(f"QlikSheet validation failed: {errors}") - - def minimize(self) -> "QlikSheet": - """ - Return a minimal copy of this QlikSheet with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QlikSheet with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QlikSheet instance with only the minimum required fields. - """ - self.validate() - return QlikSheet(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQlikSheet": - """ - Create a :class:`RelatedQlikSheet` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQlikSheet reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQlikSheet(guid=self.guid) - return RelatedQlikSheet(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -387,7 +321,7 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> QlikSheet: class QlikSheetAttributes(AssetAttributes): """QlikSheet-specific attributes for nested API format.""" - qlik_sheet_is_approved: Union[bool, None, UnsetType] = UNSET + qlik_is_approved: Union[bool, None, UnsetType] = UNSET """Whether this is approved (true) or not (false).""" qlik_id: Union[str, None, UnsetType] = UNSET @@ -467,6 +401,11 @@ class QlikSheetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -562,6 +501,7 @@ class QlikSheetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -587,7 +527,7 @@ class QlikSheetNested(AssetNested): def _populate_qlik_sheet_attrs(attrs: QlikSheetAttributes, obj: QlikSheet) -> None: """Populate QlikSheet-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.qlik_sheet_is_approved = obj.qlik_sheet_is_approved + attrs.qlik_is_approved = obj.qlik_is_approved attrs.qlik_id = obj.qlik_id attrs.qlik_qri = obj.qlik_qri attrs.qlik_space_id = obj.qlik_space_id @@ -602,7 +542,7 @@ def _populate_qlik_sheet_attrs(attrs: QlikSheetAttributes, obj: QlikSheet) -> No def _extract_qlik_sheet_attrs(attrs: QlikSheetAttributes) -> dict: """Extract all QlikSheet attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["qlik_sheet_is_approved"] = attrs.qlik_sheet_is_approved + result["qlik_is_approved"] = attrs.qlik_is_approved result["qlik_id"] = attrs.qlik_id result["qlik_qri"] = attrs.qlik_qri result["qlik_space_id"] = attrs.qlik_space_id @@ -648,9 +588,6 @@ def _qlik_sheet_to_nested(qlik_sheet: QlikSheet) -> QlikSheetNested: is_incomplete=qlik_sheet.is_incomplete, provenance_type=qlik_sheet.provenance_type, home_id=qlik_sheet.home_id, - depth=qlik_sheet.depth, - immediate_upstream=qlik_sheet.immediate_upstream, - immediate_downstream=qlik_sheet.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -682,6 +619,7 @@ def _qlik_sheet_from_nested(nested: QlikSheetNested) -> QlikSheet: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -690,9 +628,6 @@ def _qlik_sheet_from_nested(nested: QlikSheetNested) -> QlikSheet: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_qlik_sheet_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -720,9 +655,7 @@ def _qlik_sheet_from_nested_bytes(data: bytes, serde: Serde) -> QlikSheet: RelationField, ) -QlikSheet.QLIK_SHEET_IS_APPROVED = BooleanField( - "qlikSheetIsApproved", "qlikSheetIsApproved" -) +QlikSheet.QLIK_IS_APPROVED = BooleanField("qlikIsApproved", "qlikIsApproved") QlikSheet.QLIK_ID = KeywordField("qlikId", "qlikId") QlikSheet.QLIK_QRI = KeywordTextField("qlikQRI", "qlikQRI", "qlikQRI.text") QlikSheet.QLIK_SPACE_ID = KeywordField("qlikSpaceId", "qlikSpaceId") @@ -752,6 +685,9 @@ def _qlik_sheet_from_nested_bytes(data: bytes, serde: Serde) -> QlikSheet: QlikSheet.METRICS = RelationField("metrics") QlikSheet.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") QlikSheet.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +QlikSheet.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) QlikSheet.MEANINGS = RelationField("meanings") QlikSheet.MC_MONITORS = RelationField("mcMonitors") QlikSheet.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/qlik_space.py b/pyatlan_v9/model/assets/qlik_space.py index eb742b75d..2741db1ad 100644 --- a/pyatlan_v9/model/assets/qlik_space.py +++ b/pyatlan_v9/model/assets/qlik_space.py @@ -41,12 +41,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .qlik_related import RelatedQlikApp, RelatedQlikDataset, RelatedQlikSpace +from .qlik_related import RelatedQlikApp, RelatedQlikDataset from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -64,7 +65,7 @@ class QlikSpace(Asset): Instance of a Qlik space in Atlan. """ - QLIK_SPACE_TYPE: ClassVar[Any] = None + QLIK_TYPE: ClassVar[Any] = None QLIK_ID: ClassVar[Any] = None QLIK_QRI: ClassVar[Any] = None QLIK_SPACE_ID: ClassVar[Any] = None @@ -88,6 +89,7 @@ class QlikSpace(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -107,7 +109,9 @@ class QlikSpace(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - qlik_space_type: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "QlikSpace" + + qlik_type: Union[str, None, UnsetType] = UNSET """Type of this space, for exmaple: Private, Shared, etc.""" qlik_id: Union[str, None, UnsetType] = UNSET @@ -183,6 +187,11 @@ class QlikSpace(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -244,66 +253,6 @@ class QlikSpace(Asset): def __post_init__(self) -> None: self.type_name = "QlikSpace" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QlikSpace instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"QlikSpace validation failed: {errors}") - - def minimize(self) -> "QlikSpace": - """ - Return a minimal copy of this QlikSpace with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QlikSpace with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QlikSpace instance with only the minimum required fields. - """ - self.validate() - return QlikSpace(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQlikSpace": - """ - Create a :class:`RelatedQlikSpace` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQlikSpace reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQlikSpace(guid=self.guid) - return RelatedQlikSpace(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -359,7 +308,7 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> QlikSpace: class QlikSpaceAttributes(AssetAttributes): """QlikSpace-specific attributes for nested API format.""" - qlik_space_type: Union[str, None, UnsetType] = UNSET + qlik_type: Union[str, None, UnsetType] = UNSET """Type of this space, for exmaple: Private, Shared, etc.""" qlik_id: Union[str, None, UnsetType] = UNSET @@ -439,6 +388,11 @@ class QlikSpaceRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -531,6 +485,7 @@ class QlikSpaceNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -555,7 +510,7 @@ class QlikSpaceNested(AssetNested): def _populate_qlik_space_attrs(attrs: QlikSpaceAttributes, obj: QlikSpace) -> None: """Populate QlikSpace-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.qlik_space_type = obj.qlik_space_type + attrs.qlik_type = obj.qlik_type attrs.qlik_id = obj.qlik_id attrs.qlik_qri = obj.qlik_qri attrs.qlik_space_id = obj.qlik_space_id @@ -570,7 +525,7 @@ def _populate_qlik_space_attrs(attrs: QlikSpaceAttributes, obj: QlikSpace) -> No def _extract_qlik_space_attrs(attrs: QlikSpaceAttributes) -> dict: """Extract all QlikSpace attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["qlik_space_type"] = attrs.qlik_space_type + result["qlik_type"] = attrs.qlik_type result["qlik_id"] = attrs.qlik_id result["qlik_qri"] = attrs.qlik_qri result["qlik_space_id"] = attrs.qlik_space_id @@ -616,9 +571,6 @@ def _qlik_space_to_nested(qlik_space: QlikSpace) -> QlikSpaceNested: is_incomplete=qlik_space.is_incomplete, provenance_type=qlik_space.provenance_type, home_id=qlik_space.home_id, - depth=qlik_space.depth, - immediate_upstream=qlik_space.immediate_upstream, - immediate_downstream=qlik_space.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -650,6 +602,7 @@ def _qlik_space_from_nested(nested: QlikSpaceNested) -> QlikSpace: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -658,9 +611,6 @@ def _qlik_space_from_nested(nested: QlikSpaceNested) -> QlikSpace: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_qlik_space_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -688,7 +638,7 @@ def _qlik_space_from_nested_bytes(data: bytes, serde: Serde) -> QlikSpace: RelationField, ) -QlikSpace.QLIK_SPACE_TYPE = KeywordField("qlikSpaceType", "qlikSpaceType") +QlikSpace.QLIK_TYPE = KeywordField("qlikType", "qlikType") QlikSpace.QLIK_ID = KeywordField("qlikId", "qlikId") QlikSpace.QLIK_QRI = KeywordTextField("qlikQRI", "qlikQRI", "qlikQRI.text") QlikSpace.QLIK_SPACE_ID = KeywordField("qlikSpaceId", "qlikSpaceId") @@ -718,6 +668,9 @@ def _qlik_space_from_nested_bytes(data: bytes, serde: Serde) -> QlikSpace: QlikSpace.METRICS = RelationField("metrics") QlikSpace.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") QlikSpace.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +QlikSpace.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) QlikSpace.MEANINGS = RelationField("meanings") QlikSpace.MC_MONITORS = RelationField("mcMonitors") QlikSpace.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/query.py b/pyatlan_v9/model/assets/query.py index d0ac34b02..c592a5ced 100644 --- a/pyatlan_v9/model/assets/query.py +++ b/pyatlan_v9/model/assets/query.py @@ -50,6 +50,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -66,7 +67,7 @@ RelatedSqlInsightBusinessQuestion, RelatedSqlInsightJoin, ) -from .sql_related import RelatedColumn, RelatedQuery, RelatedTable, RelatedView +from .sql_related import RelatedColumn, RelatedTable, RelatedView # ============================================================================= # FLAT ASSET CLASS @@ -136,6 +137,7 @@ class Query(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -161,6 +163,8 @@ class Query(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Query" + raw_query: Union[str, None, UnsetType] = UNSET """Deprecated. See 'longRawQuery' instead.""" @@ -340,6 +344,11 @@ class Query(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -433,72 +442,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Query instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.parent is UNSET: - errors.append("parent is required for creation") - if errors: - raise ValueError(f"Query validation failed: {errors}") - - def minimize(self) -> "Query": - """ - Return a minimal copy of this Query with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Query with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Query instance with only the minimum required fields. - """ - self.validate() - return Query(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQuery": - """ - Create a :class:`RelatedQuery` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuery reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuery(guid=self.guid) - return RelatedQuery(qualified_name=self.qualified_name) - @classmethod def creator( cls, @@ -840,6 +783,11 @@ class QueryRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -964,6 +912,7 @@ class QueryNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1123,9 +1072,6 @@ def _query_to_nested(query: Query) -> QueryNested: is_incomplete=query.is_incomplete, provenance_type=query.provenance_type, home_id=query.home_id, - depth=query.depth, - immediate_upstream=query.immediate_upstream, - immediate_downstream=query.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1155,6 +1101,7 @@ def _query_from_nested(nested: QueryNested) -> Query: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1163,9 +1110,6 @@ def _query_from_nested(nested: QueryNested) -> Query: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_query_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1286,6 +1230,9 @@ def _query_from_nested_bytes(data: bytes, serde: Serde) -> Query: Query.DBT_SOURCES = RelationField("dbtSources") Query.SQL_DBT_SOURCES = RelationField("sqlDBTSources") Query.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +Query.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Query.MEANINGS = RelationField("meanings") Query.MC_MONITORS = RelationField("mcMonitors") Query.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/quick_sight.py b/pyatlan_v9/model/assets/quick_sight.py index 84deb8a14..a4c92cdeb 100644 --- a/pyatlan_v9/model/assets/quick_sight.py +++ b/pyatlan_v9/model/assets/quick_sight.py @@ -40,12 +40,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .quick_sight_related import RelatedQuickSight from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -81,6 +81,7 @@ class QuickSight(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -98,6 +99,8 @@ class QuickSight(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "QuickSight" + quick_sight_id: Union[str, None, UnsetType] = UNSET """Unique identifier for the QuickSight asset.""" @@ -156,6 +159,11 @@ class QuickSight(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -211,66 +219,6 @@ class QuickSight(Asset): def __post_init__(self) -> None: self.type_name = "QuickSight" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QuickSight instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"QuickSight validation failed: {errors}") - - def minimize(self) -> "QuickSight": - """ - Return a minimal copy of this QuickSight with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QuickSight with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QuickSight instance with only the minimum required fields. - """ - self.validate() - return QuickSight(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQuickSight": - """ - Create a :class:`RelatedQuickSight` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuickSight reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuickSight(guid=self.guid) - return RelatedQuickSight(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -388,6 +336,11 @@ class QuickSightRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -474,6 +427,7 @@ class QuickSightNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -545,9 +499,6 @@ def _quick_sight_to_nested(quick_sight: QuickSight) -> QuickSightNested: is_incomplete=quick_sight.is_incomplete, provenance_type=quick_sight.provenance_type, home_id=quick_sight.home_id, - depth=quick_sight.depth, - immediate_upstream=quick_sight.immediate_upstream, - immediate_downstream=quick_sight.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -579,6 +530,7 @@ def _quick_sight_from_nested(nested: QuickSightNested) -> QuickSight: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -587,9 +539,6 @@ def _quick_sight_from_nested(nested: QuickSightNested) -> QuickSight: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_quick_sight_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -638,6 +587,9 @@ def _quick_sight_from_nested_bytes(data: bytes, serde: Serde) -> QuickSight: QuickSight.METRICS = RelationField("metrics") QuickSight.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") QuickSight.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +QuickSight.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) QuickSight.MEANINGS = RelationField("meanings") QuickSight.MC_MONITORS = RelationField("mcMonitors") QuickSight.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/quick_sight_analysis.py b/pyatlan_v9/model/assets/quick_sight_analysis.py index 4d403384d..e6cab4d0e 100644 --- a/pyatlan_v9/model/assets/quick_sight_analysis.py +++ b/pyatlan_v9/model/assets/quick_sight_analysis.py @@ -42,13 +42,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess from .quick_sight_related import ( - RelatedQuickSightAnalysis, RelatedQuickSightAnalysisVisual, RelatedQuickSightFolder, ) @@ -69,7 +69,7 @@ class QuickSightAnalysis(Asset): Instance of a QuickSight analysis in Atlan. In QuickSight, you analyze and visualize your data in analyses, which can be published as a dashboard to share with others. """ - QUICK_SIGHT_ANALYSIS_STATUS: ClassVar[Any] = None + QUICK_SIGHT_STATUS: ClassVar[Any] = None QUICK_SIGHT_ANALYSIS_CALCULATED_FIELDS: ClassVar[Any] = None QUICK_SIGHT_ANALYSIS_PARAMETER_DECLARATIONS: ClassVar[Any] = None QUICK_SIGHT_ANALYSIS_FILTER_GROUPS: ClassVar[Any] = None @@ -91,6 +91,7 @@ class QuickSightAnalysis(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -110,7 +111,9 @@ class QuickSightAnalysis(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - quick_sight_analysis_status: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "QuickSightAnalysis" + + quick_sight_status: Union[str, None, UnsetType] = UNSET """Status of this analysis, for example: CREATION_IN_PROGRESS, UPDATE_SUCCESSFUL, etc.""" quick_sight_analysis_calculated_fields: Union[List[str], None, UnsetType] = UNSET @@ -182,6 +185,11 @@ class QuickSightAnalysis(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -253,72 +261,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QuickSightAnalysis instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.quick_sight_analysis_folders is UNSET: - errors.append("quick_sight_analysis_folders is required for creation") - if errors: - raise ValueError(f"QuickSightAnalysis validation failed: {errors}") - - def minimize(self) -> "QuickSightAnalysis": - """ - Return a minimal copy of this QuickSightAnalysis with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QuickSightAnalysis with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QuickSightAnalysis instance with only the minimum required fields. - """ - self.validate() - return QuickSightAnalysis(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQuickSightAnalysis": - """ - Create a :class:`RelatedQuickSightAnalysis` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuickSightAnalysis reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuickSightAnalysis(guid=self.guid) - return RelatedQuickSightAnalysis(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -413,7 +355,7 @@ def from_json( class QuickSightAnalysisAttributes(AssetAttributes): """QuickSightAnalysis-specific attributes for nested API format.""" - quick_sight_analysis_status: Union[str, None, UnsetType] = UNSET + quick_sight_status: Union[str, None, UnsetType] = UNSET """Status of this analysis, for example: CREATION_IN_PROGRESS, UPDATE_SUCCESSFUL, etc.""" quick_sight_analysis_calculated_fields: Union[List[str], None, UnsetType] = UNSET @@ -489,6 +431,11 @@ class QuickSightAnalysisRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -587,6 +534,7 @@ class QuickSightAnalysisNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -613,7 +561,7 @@ def _populate_quick_sight_analysis_attrs( ) -> None: """Populate QuickSightAnalysis-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.quick_sight_analysis_status = obj.quick_sight_analysis_status + attrs.quick_sight_status = obj.quick_sight_status attrs.quick_sight_analysis_calculated_fields = ( obj.quick_sight_analysis_calculated_fields ) @@ -630,7 +578,7 @@ def _populate_quick_sight_analysis_attrs( def _extract_quick_sight_analysis_attrs(attrs: QuickSightAnalysisAttributes) -> dict: """Extract all QuickSightAnalysis attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["quick_sight_analysis_status"] = attrs.quick_sight_analysis_status + result["quick_sight_status"] = attrs.quick_sight_status result["quick_sight_analysis_calculated_fields"] = ( attrs.quick_sight_analysis_calculated_fields ) @@ -684,9 +632,6 @@ def _quick_sight_analysis_to_nested( is_incomplete=quick_sight_analysis.is_incomplete, provenance_type=quick_sight_analysis.provenance_type, home_id=quick_sight_analysis.home_id, - depth=quick_sight_analysis.depth, - immediate_upstream=quick_sight_analysis.immediate_upstream, - immediate_downstream=quick_sight_analysis.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -722,6 +667,7 @@ def _quick_sight_analysis_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -730,9 +676,6 @@ def _quick_sight_analysis_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_quick_sight_analysis_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -763,8 +706,8 @@ def _quick_sight_analysis_from_nested_bytes( RelationField, ) -QuickSightAnalysis.QUICK_SIGHT_ANALYSIS_STATUS = KeywordField( - "quickSightAnalysisStatus", "quickSightAnalysisStatus" +QuickSightAnalysis.QUICK_SIGHT_STATUS = KeywordField( + "quickSightStatus", "quickSightStatus" ) QuickSightAnalysis.QUICK_SIGHT_ANALYSIS_CALCULATED_FIELDS = KeywordField( "quickSightAnalysisCalculatedFields", "quickSightAnalysisCalculatedFields" @@ -805,6 +748,9 @@ def _quick_sight_analysis_from_nested_bytes( QuickSightAnalysis.METRICS = RelationField("metrics") QuickSightAnalysis.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") QuickSightAnalysis.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +QuickSightAnalysis.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) QuickSightAnalysis.MEANINGS = RelationField("meanings") QuickSightAnalysis.MC_MONITORS = RelationField("mcMonitors") QuickSightAnalysis.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/quick_sight_analysis_visual.py b/pyatlan_v9/model/assets/quick_sight_analysis_visual.py index 3d573e84d..3f9ae51df 100644 --- a/pyatlan_v9/model/assets/quick_sight_analysis_visual.py +++ b/pyatlan_v9/model/assets/quick_sight_analysis_visual.py @@ -42,15 +42,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .quick_sight_related import ( - RelatedQuickSightAnalysis, - RelatedQuickSightAnalysisVisual, -) +from .quick_sight_related import RelatedQuickSightAnalysis from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -87,6 +85,7 @@ class QuickSightAnalysisVisual(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +104,8 @@ class QuickSightAnalysisVisual(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "QuickSightAnalysisVisual" + quick_sight_analysis_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the QuickSight analysis in which this visual exists.""" @@ -166,6 +167,11 @@ class QuickSightAnalysisVisual(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -232,78 +238,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QuickSightAnalysisVisual instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.quick_sight_analysis is UNSET: - errors.append("quick_sight_analysis is required for creation") - if self.quick_sight_analysis_qualified_name is UNSET: - errors.append( - "quick_sight_analysis_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"QuickSightAnalysisVisual validation failed: {errors}") - - def minimize(self) -> "QuickSightAnalysisVisual": - """ - Return a minimal copy of this QuickSightAnalysisVisual with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QuickSightAnalysisVisual with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QuickSightAnalysisVisual instance with only the minimum required fields. - """ - self.validate() - return QuickSightAnalysisVisual( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedQuickSightAnalysisVisual": - """ - Create a :class:`RelatedQuickSightAnalysisVisual` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuickSightAnalysisVisual reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuickSightAnalysisVisual(guid=self.guid) - return RelatedQuickSightAnalysisVisual(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -489,6 +423,11 @@ class QuickSightAnalysisVisualRelationshipAttributes(AssetRelationshipAttributes ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -580,6 +519,7 @@ class QuickSightAnalysisVisualNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -664,9 +604,6 @@ def _quick_sight_analysis_visual_to_nested( is_incomplete=quick_sight_analysis_visual.is_incomplete, provenance_type=quick_sight_analysis_visual.provenance_type, home_id=quick_sight_analysis_visual.home_id, - depth=quick_sight_analysis_visual.depth, - immediate_upstream=quick_sight_analysis_visual.immediate_upstream, - immediate_downstream=quick_sight_analysis_visual.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -702,6 +639,7 @@ def _quick_sight_analysis_visual_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -710,9 +648,6 @@ def _quick_sight_analysis_visual_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_quick_sight_analysis_visual_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -788,6 +723,9 @@ def _quick_sight_analysis_visual_from_nested_bytes( QuickSightAnalysisVisual.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +QuickSightAnalysisVisual.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) QuickSightAnalysisVisual.MEANINGS = RelationField("meanings") QuickSightAnalysisVisual.MC_MONITORS = RelationField("mcMonitors") QuickSightAnalysisVisual.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/quick_sight_dashboard.py b/pyatlan_v9/model/assets/quick_sight_dashboard.py index 3c1ed2602..7c31b1970 100644 --- a/pyatlan_v9/model/assets/quick_sight_dashboard.py +++ b/pyatlan_v9/model/assets/quick_sight_dashboard.py @@ -42,13 +42,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess from .quick_sight_related import ( - RelatedQuickSightDashboard, RelatedQuickSightDashboardVisual, RelatedQuickSightFolder, ) @@ -69,8 +69,8 @@ class QuickSightDashboard(Asset): Instance of a QuickSight dashboard in Atlan. These are reports in QuickSight, created from analyses. """ - QUICK_SIGHT_DASHBOARD_PUBLISHED_VERSION_NUMBER: ClassVar[Any] = None - QUICK_SIGHT_DASHBOARD_LAST_PUBLISHED_TIME: ClassVar[Any] = None + QUICK_SIGHT_PUBLISHED_VERSION_NUMBER: ClassVar[Any] = None + QUICK_SIGHT_LAST_PUBLISHED_TIME: ClassVar[Any] = None QUICK_SIGHT_ID: ClassVar[Any] = None QUICK_SIGHT_SHEET_ID: ClassVar[Any] = None QUICK_SIGHT_SHEET_NAME: ClassVar[Any] = None @@ -89,6 +89,7 @@ class QuickSightDashboard(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -108,10 +109,12 @@ class QuickSightDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - quick_sight_dashboard_published_version_number: Union[int, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "QuickSightDashboard" + + quick_sight_published_version_number: Union[int, None, UnsetType] = UNSET """Version number of the published dashboard.""" - quick_sight_dashboard_last_published_time: Union[int, None, UnsetType] = UNSET + quick_sight_last_published_time: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this dashboard was last published, in milliseconds.""" quick_sight_id: Union[str, None, UnsetType] = UNSET @@ -172,6 +175,11 @@ class QuickSightDashboard(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -243,72 +251,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QuickSightDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.quick_sight_dashboard_folders is UNSET: - errors.append("quick_sight_dashboard_folders is required for creation") - if errors: - raise ValueError(f"QuickSightDashboard validation failed: {errors}") - - def minimize(self) -> "QuickSightDashboard": - """ - Return a minimal copy of this QuickSightDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QuickSightDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QuickSightDashboard instance with only the minimum required fields. - """ - self.validate() - return QuickSightDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQuickSightDashboard": - """ - Create a :class:`RelatedQuickSightDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuickSightDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuickSightDashboard(guid=self.guid) - return RelatedQuickSightDashboard(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -412,10 +354,10 @@ def from_json( class QuickSightDashboardAttributes(AssetAttributes): """QuickSightDashboard-specific attributes for nested API format.""" - quick_sight_dashboard_published_version_number: Union[int, None, UnsetType] = UNSET + quick_sight_published_version_number: Union[int, None, UnsetType] = UNSET """Version number of the published dashboard.""" - quick_sight_dashboard_last_published_time: Union[int, None, UnsetType] = UNSET + quick_sight_last_published_time: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this dashboard was last published, in milliseconds.""" quick_sight_id: Union[str, None, UnsetType] = UNSET @@ -480,6 +422,11 @@ class QuickSightDashboardRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -578,6 +525,7 @@ class QuickSightDashboardNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -604,12 +552,10 @@ def _populate_quick_sight_dashboard_attrs( ) -> None: """Populate QuickSightDashboard-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.quick_sight_dashboard_published_version_number = ( - obj.quick_sight_dashboard_published_version_number - ) - attrs.quick_sight_dashboard_last_published_time = ( - obj.quick_sight_dashboard_last_published_time + attrs.quick_sight_published_version_number = ( + obj.quick_sight_published_version_number ) + attrs.quick_sight_last_published_time = obj.quick_sight_last_published_time attrs.quick_sight_id = obj.quick_sight_id attrs.quick_sight_sheet_id = obj.quick_sight_sheet_id attrs.quick_sight_sheet_name = obj.quick_sight_sheet_name @@ -619,12 +565,10 @@ def _populate_quick_sight_dashboard_attrs( def _extract_quick_sight_dashboard_attrs(attrs: QuickSightDashboardAttributes) -> dict: """Extract all QuickSightDashboard attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["quick_sight_dashboard_published_version_number"] = ( - attrs.quick_sight_dashboard_published_version_number - ) - result["quick_sight_dashboard_last_published_time"] = ( - attrs.quick_sight_dashboard_last_published_time + result["quick_sight_published_version_number"] = ( + attrs.quick_sight_published_version_number ) + result["quick_sight_last_published_time"] = attrs.quick_sight_last_published_time result["quick_sight_id"] = attrs.quick_sight_id result["quick_sight_sheet_id"] = attrs.quick_sight_sheet_id result["quick_sight_sheet_name"] = attrs.quick_sight_sheet_name @@ -669,9 +613,6 @@ def _quick_sight_dashboard_to_nested( is_incomplete=quick_sight_dashboard.is_incomplete, provenance_type=quick_sight_dashboard.provenance_type, home_id=quick_sight_dashboard.home_id, - depth=quick_sight_dashboard.depth, - immediate_upstream=quick_sight_dashboard.immediate_upstream, - immediate_downstream=quick_sight_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -707,6 +648,7 @@ def _quick_sight_dashboard_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -715,9 +657,6 @@ def _quick_sight_dashboard_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_quick_sight_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -749,12 +688,11 @@ def _quick_sight_dashboard_from_nested_bytes( RelationField, ) -QuickSightDashboard.QUICK_SIGHT_DASHBOARD_PUBLISHED_VERSION_NUMBER = NumericField( - "quickSightDashboardPublishedVersionNumber", - "quickSightDashboardPublishedVersionNumber", +QuickSightDashboard.QUICK_SIGHT_PUBLISHED_VERSION_NUMBER = NumericField( + "quickSightPublishedVersionNumber", "quickSightPublishedVersionNumber" ) -QuickSightDashboard.QUICK_SIGHT_DASHBOARD_LAST_PUBLISHED_TIME = NumericField( - "quickSightDashboardLastPublishedTime", "quickSightDashboardLastPublishedTime" +QuickSightDashboard.QUICK_SIGHT_LAST_PUBLISHED_TIME = NumericField( + "quickSightLastPublishedTime", "quickSightLastPublishedTime" ) QuickSightDashboard.QUICK_SIGHT_ID = KeywordField("quickSightId", "quickSightId") QuickSightDashboard.QUICK_SIGHT_SHEET_ID = KeywordField( @@ -788,6 +726,9 @@ def _quick_sight_dashboard_from_nested_bytes( QuickSightDashboard.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +QuickSightDashboard.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) QuickSightDashboard.MEANINGS = RelationField("meanings") QuickSightDashboard.MC_MONITORS = RelationField("mcMonitors") QuickSightDashboard.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/quick_sight_dashboard_visual.py b/pyatlan_v9/model/assets/quick_sight_dashboard_visual.py index 94cd0c7cb..9ac05a124 100644 --- a/pyatlan_v9/model/assets/quick_sight_dashboard_visual.py +++ b/pyatlan_v9/model/assets/quick_sight_dashboard_visual.py @@ -42,15 +42,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .quick_sight_related import ( - RelatedQuickSightDashboard, - RelatedQuickSightDashboardVisual, -) +from .quick_sight_related import RelatedQuickSightDashboard from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -87,6 +85,7 @@ class QuickSightDashboardVisual(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +104,8 @@ class QuickSightDashboardVisual(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "QuickSightDashboardVisual" + quick_sight_dashboard_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the dashboard in which this visual exists.""" @@ -166,6 +167,11 @@ class QuickSightDashboardVisual(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -232,78 +238,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QuickSightDashboardVisual instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.quick_sight_dashboard is UNSET: - errors.append("quick_sight_dashboard is required for creation") - if self.quick_sight_dashboard_qualified_name is UNSET: - errors.append( - "quick_sight_dashboard_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"QuickSightDashboardVisual validation failed: {errors}") - - def minimize(self) -> "QuickSightDashboardVisual": - """ - Return a minimal copy of this QuickSightDashboardVisual with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QuickSightDashboardVisual with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QuickSightDashboardVisual instance with only the minimum required fields. - """ - self.validate() - return QuickSightDashboardVisual( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedQuickSightDashboardVisual": - """ - Create a :class:`RelatedQuickSightDashboardVisual` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuickSightDashboardVisual reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuickSightDashboardVisual(guid=self.guid) - return RelatedQuickSightDashboardVisual(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -489,6 +423,11 @@ class QuickSightDashboardVisualRelationshipAttributes(AssetRelationshipAttribute ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -580,6 +519,7 @@ class QuickSightDashboardVisualNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -666,9 +606,6 @@ def _quick_sight_dashboard_visual_to_nested( is_incomplete=quick_sight_dashboard_visual.is_incomplete, provenance_type=quick_sight_dashboard_visual.provenance_type, home_id=quick_sight_dashboard_visual.home_id, - depth=quick_sight_dashboard_visual.depth, - immediate_upstream=quick_sight_dashboard_visual.immediate_upstream, - immediate_downstream=quick_sight_dashboard_visual.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -704,6 +641,7 @@ def _quick_sight_dashboard_visual_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -712,9 +650,6 @@ def _quick_sight_dashboard_visual_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_quick_sight_dashboard_visual_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -790,6 +725,9 @@ def _quick_sight_dashboard_visual_from_nested_bytes( QuickSightDashboardVisual.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +QuickSightDashboardVisual.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) QuickSightDashboardVisual.MEANINGS = RelationField("meanings") QuickSightDashboardVisual.MC_MONITORS = RelationField("mcMonitors") QuickSightDashboardVisual.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/quick_sight_dataset.py b/pyatlan_v9/model/assets/quick_sight_dataset.py index df12fd3f8..a672d633c 100644 --- a/pyatlan_v9/model/assets/quick_sight_dataset.py +++ b/pyatlan_v9/model/assets/quick_sight_dataset.py @@ -39,18 +39,16 @@ _extract_asset_attrs, _populate_asset_attrs, ) +from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .quick_sight_related import ( - RelatedQuickSightDataset, - RelatedQuickSightDatasetField, - RelatedQuickSightFolder, -) +from .quick_sight_related import RelatedQuickSightDatasetField, RelatedQuickSightFolder from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -68,16 +66,19 @@ class QuickSightDataset(Asset): Instance of a QuickSight dataset in Atlan. These are an internal data model built to be used by analysis. In a dataset, data can be pulled from different sources, joined, filtered, and columns translated to more business-friendly names when preparing the data for visualizing in the analysis layer. """ - QUICK_SIGHT_DATASET_IMPORT_MODE: ClassVar[Any] = None - QUICK_SIGHT_DATASET_COLUMN_COUNT: ClassVar[Any] = None + QUICK_SIGHT_IMPORT_MODE: ClassVar[Any] = None + QUICK_SIGHT_COLUMN_COUNT: ClassVar[Any] = None QUICK_SIGHT_ID: ClassVar[Any] = None QUICK_SIGHT_SHEET_ID: ClassVar[Any] = None QUICK_SIGHT_SHEET_NAME: ClassVar[Any] = None + CATALOG_DATASET_GUID: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None ANOMALO_CHECKS: ClassVar[Any] = None APPLICATION: ClassVar[Any] = None APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None MODEL_IMPLEMENTED_ENTITIES: ClassVar[Any] = None @@ -85,6 +86,7 @@ class QuickSightDataset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -104,10 +106,12 @@ class QuickSightDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - quick_sight_dataset_import_mode: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "QuickSightDataset" + + quick_sight_import_mode: Union[str, None, UnsetType] = UNSET """Import mode for this dataset, for example: SPICE or DIRECT_QUERY.""" - quick_sight_dataset_column_count: Union[int, None, UnsetType] = UNSET + quick_sight_column_count: Union[int, None, UnsetType] = UNSET """Number of columns present in this dataset.""" quick_sight_id: Union[str, None, UnsetType] = UNSET @@ -119,6 +123,9 @@ class QuickSightDataset(Asset): quick_sight_sheet_name: Union[str, None, UnsetType] = UNSET """Name of the QuickSight sheet.""" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -134,6 +141,12 @@ class QuickSightDataset(Asset): application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET """ApplicationField owning the Asset.""" + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET """Data products for which this asset is an output port.""" @@ -159,6 +172,11 @@ class QuickSightDataset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -210,7 +228,7 @@ class QuickSightDataset(Asset): schema_registry_subjects: Union[ List[RelatedSchemaRegistrySubject], None, UnsetType ] = UNSET - """""" + """Schema registry subjects associated with this asset.""" soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET """""" @@ -230,72 +248,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QuickSightDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.quick_sight_dataset_folders is UNSET: - errors.append("quick_sight_dataset_folders is required for creation") - if errors: - raise ValueError(f"QuickSightDataset validation failed: {errors}") - - def minimize(self) -> "QuickSightDataset": - """ - Return a minimal copy of this QuickSightDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QuickSightDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QuickSightDataset instance with only the minimum required fields. - """ - self.validate() - return QuickSightDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQuickSightDataset": - """ - Create a :class:`RelatedQuickSightDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuickSightDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuickSightDataset(guid=self.guid) - return RelatedQuickSightDataset(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -403,10 +355,10 @@ def from_json( class QuickSightDatasetAttributes(AssetAttributes): """QuickSightDataset-specific attributes for nested API format.""" - quick_sight_dataset_import_mode: Union[str, None, UnsetType] = UNSET + quick_sight_import_mode: Union[str, None, UnsetType] = UNSET """Import mode for this dataset, for example: SPICE or DIRECT_QUERY.""" - quick_sight_dataset_column_count: Union[int, None, UnsetType] = UNSET + quick_sight_column_count: Union[int, None, UnsetType] = UNSET """Number of columns present in this dataset.""" quick_sight_id: Union[str, None, UnsetType] = UNSET @@ -418,6 +370,9 @@ class QuickSightDatasetAttributes(AssetAttributes): quick_sight_sheet_name: Union[str, None, UnsetType] = UNSET """Name of the QuickSight sheet.""" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + class QuickSightDatasetRelationshipAttributes(AssetRelationshipAttributes): """QuickSightDataset-specific relationship attributes for nested API format.""" @@ -437,6 +392,12 @@ class QuickSightDatasetRelationshipAttributes(AssetRelationshipAttributes): application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET """ApplicationField owning the Asset.""" + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET """Data products for which this asset is an output port.""" @@ -462,6 +423,11 @@ class QuickSightDatasetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -513,7 +479,7 @@ class QuickSightDatasetRelationshipAttributes(AssetRelationshipAttributes): schema_registry_subjects: Union[ List[RelatedSchemaRegistrySubject], None, UnsetType ] = UNSET - """""" + """Schema registry subjects associated with this asset.""" soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET """""" @@ -551,6 +517,8 @@ class QuickSightDatasetNested(AssetNested): "anomalo_checks", "application", "application_field", + "data_contract_latest", + "data_contract_latest_certified", "output_port_data_products", "input_port_data_products", "model_implemented_entities", @@ -558,6 +526,7 @@ class QuickSightDatasetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -584,21 +553,23 @@ def _populate_quick_sight_dataset_attrs( ) -> None: """Populate QuickSightDataset-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.quick_sight_dataset_import_mode = obj.quick_sight_dataset_import_mode - attrs.quick_sight_dataset_column_count = obj.quick_sight_dataset_column_count + attrs.quick_sight_import_mode = obj.quick_sight_import_mode + attrs.quick_sight_column_count = obj.quick_sight_column_count attrs.quick_sight_id = obj.quick_sight_id attrs.quick_sight_sheet_id = obj.quick_sight_sheet_id attrs.quick_sight_sheet_name = obj.quick_sight_sheet_name + attrs.catalog_dataset_guid = obj.catalog_dataset_guid def _extract_quick_sight_dataset_attrs(attrs: QuickSightDatasetAttributes) -> dict: """Extract all QuickSightDataset attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["quick_sight_dataset_import_mode"] = attrs.quick_sight_dataset_import_mode - result["quick_sight_dataset_column_count"] = attrs.quick_sight_dataset_column_count + result["quick_sight_import_mode"] = attrs.quick_sight_import_mode + result["quick_sight_column_count"] = attrs.quick_sight_column_count result["quick_sight_id"] = attrs.quick_sight_id result["quick_sight_sheet_id"] = attrs.quick_sight_sheet_id result["quick_sight_sheet_name"] = attrs.quick_sight_sheet_name + result["catalog_dataset_guid"] = attrs.catalog_dataset_guid return result @@ -639,9 +610,6 @@ def _quick_sight_dataset_to_nested( is_incomplete=quick_sight_dataset.is_incomplete, provenance_type=quick_sight_dataset.provenance_type, home_id=quick_sight_dataset.home_id, - depth=quick_sight_dataset.depth, - immediate_upstream=quick_sight_dataset.immediate_upstream, - immediate_downstream=quick_sight_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -677,6 +645,7 @@ def _quick_sight_dataset_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -685,9 +654,6 @@ def _quick_sight_dataset_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_quick_sight_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -719,11 +685,11 @@ def _quick_sight_dataset_from_nested_bytes( RelationField, ) -QuickSightDataset.QUICK_SIGHT_DATASET_IMPORT_MODE = KeywordField( - "quickSightDatasetImportMode", "quickSightDatasetImportMode" +QuickSightDataset.QUICK_SIGHT_IMPORT_MODE = KeywordField( + "quickSightImportMode", "quickSightImportMode" ) -QuickSightDataset.QUICK_SIGHT_DATASET_COLUMN_COUNT = NumericField( - "quickSightDatasetColumnCount", "quickSightDatasetColumnCount" +QuickSightDataset.QUICK_SIGHT_COLUMN_COUNT = NumericField( + "quickSightColumnCount", "quickSightColumnCount" ) QuickSightDataset.QUICK_SIGHT_ID = KeywordField("quickSightId", "quickSightId") QuickSightDataset.QUICK_SIGHT_SHEET_ID = KeywordField( @@ -732,11 +698,18 @@ def _quick_sight_dataset_from_nested_bytes( QuickSightDataset.QUICK_SIGHT_SHEET_NAME = KeywordTextField( "quickSightSheetName", "quickSightSheetName", "quickSightSheetName.text" ) +QuickSightDataset.CATALOG_DATASET_GUID = KeywordField( + "catalogDatasetGuid", "catalogDatasetGuid" +) QuickSightDataset.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") QuickSightDataset.OUTPUT_FROM_AIRFLOW_TASKS = RelationField("outputFromAirflowTasks") QuickSightDataset.ANOMALO_CHECKS = RelationField("anomaloChecks") QuickSightDataset.APPLICATION = RelationField("application") QuickSightDataset.APPLICATION_FIELD = RelationField("applicationField") +QuickSightDataset.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +QuickSightDataset.DATA_CONTRACT_LATEST_CERTIFIED = RelationField( + "dataContractLatestCertified" +) QuickSightDataset.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") QuickSightDataset.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") QuickSightDataset.MODEL_IMPLEMENTED_ENTITIES = RelationField("modelImplementedEntities") @@ -746,6 +719,9 @@ def _quick_sight_dataset_from_nested_bytes( QuickSightDataset.METRICS = RelationField("metrics") QuickSightDataset.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") QuickSightDataset.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +QuickSightDataset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) QuickSightDataset.MEANINGS = RelationField("meanings") QuickSightDataset.MC_MONITORS = RelationField("mcMonitors") QuickSightDataset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/quick_sight_dataset_field.py b/pyatlan_v9/model/assets/quick_sight_dataset_field.py index ea83668b5..a66c8a8a6 100644 --- a/pyatlan_v9/model/assets/quick_sight_dataset_field.py +++ b/pyatlan_v9/model/assets/quick_sight_dataset_field.py @@ -39,14 +39,16 @@ _extract_asset_attrs, _populate_asset_attrs, ) +from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .quick_sight_related import RelatedQuickSightDataset, RelatedQuickSightDatasetField +from .quick_sight_related import RelatedQuickSightDataset from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -64,16 +66,19 @@ class QuickSightDatasetField(Asset): Instance of a QuickSight dataset field in Atlan. """ - QUICK_SIGHT_DATASET_FIELD_TYPE: ClassVar[Any] = None + QUICK_SIGHT_TYPE: ClassVar[Any] = None QUICK_SIGHT_DATASET_QUALIFIED_NAME: ClassVar[Any] = None QUICK_SIGHT_ID: ClassVar[Any] = None QUICK_SIGHT_SHEET_ID: ClassVar[Any] = None QUICK_SIGHT_SHEET_NAME: ClassVar[Any] = None + CATALOG_DATASET_GUID: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None ANOMALO_CHECKS: ClassVar[Any] = None APPLICATION: ClassVar[Any] = None APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None MODEL_IMPLEMENTED_ENTITIES: ClassVar[Any] = None @@ -81,6 +86,7 @@ class QuickSightDatasetField(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -99,7 +105,9 @@ class QuickSightDatasetField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - quick_sight_dataset_field_type: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "QuickSightDatasetField" + + quick_sight_type: Union[str, None, UnsetType] = UNSET """Datatype of this field, for example: STRING, INTEGER, etc.""" quick_sight_dataset_qualified_name: Union[str, None, UnsetType] = UNSET @@ -114,6 +122,9 @@ class QuickSightDatasetField(Asset): quick_sight_sheet_name: Union[str, None, UnsetType] = UNSET """Name of the QuickSight sheet.""" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -129,6 +140,12 @@ class QuickSightDatasetField(Asset): application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET """ApplicationField owning the Asset.""" + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET """Data products for which this asset is an output port.""" @@ -154,6 +171,11 @@ class QuickSightDatasetField(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -198,7 +220,7 @@ class QuickSightDatasetField(Asset): schema_registry_subjects: Union[ List[RelatedSchemaRegistrySubject], None, UnsetType ] = UNSET - """""" + """Schema registry subjects associated with this asset.""" soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET """""" @@ -220,78 +242,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QuickSightDatasetField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.quick_sight_dataset is UNSET: - errors.append("quick_sight_dataset is required for creation") - if self.quick_sight_dataset_qualified_name is UNSET: - errors.append( - "quick_sight_dataset_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"QuickSightDatasetField validation failed: {errors}") - - def minimize(self) -> "QuickSightDatasetField": - """ - Return a minimal copy of this QuickSightDatasetField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QuickSightDatasetField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QuickSightDatasetField instance with only the minimum required fields. - """ - self.validate() - return QuickSightDatasetField( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedQuickSightDatasetField": - """ - Create a :class:`RelatedQuickSightDatasetField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuickSightDatasetField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuickSightDatasetField(guid=self.guid) - return RelatedQuickSightDatasetField(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -404,7 +354,7 @@ def from_json( class QuickSightDatasetFieldAttributes(AssetAttributes): """QuickSightDatasetField-specific attributes for nested API format.""" - quick_sight_dataset_field_type: Union[str, None, UnsetType] = UNSET + quick_sight_type: Union[str, None, UnsetType] = UNSET """Datatype of this field, for example: STRING, INTEGER, etc.""" quick_sight_dataset_qualified_name: Union[str, None, UnsetType] = UNSET @@ -419,6 +369,9 @@ class QuickSightDatasetFieldAttributes(AssetAttributes): quick_sight_sheet_name: Union[str, None, UnsetType] = UNSET """Name of the QuickSight sheet.""" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + class QuickSightDatasetFieldRelationshipAttributes(AssetRelationshipAttributes): """QuickSightDatasetField-specific relationship attributes for nested API format.""" @@ -438,6 +391,12 @@ class QuickSightDatasetFieldRelationshipAttributes(AssetRelationshipAttributes): application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET """ApplicationField owning the Asset.""" + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET """Data products for which this asset is an output port.""" @@ -463,6 +422,11 @@ class QuickSightDatasetFieldRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -507,7 +471,7 @@ class QuickSightDatasetFieldRelationshipAttributes(AssetRelationshipAttributes): schema_registry_subjects: Union[ List[RelatedSchemaRegistrySubject], None, UnsetType ] = UNSET - """""" + """Schema registry subjects associated with this asset.""" soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET """""" @@ -545,6 +509,8 @@ class QuickSightDatasetFieldNested(AssetNested): "anomalo_checks", "application", "application_field", + "data_contract_latest", + "data_contract_latest_certified", "output_port_data_products", "input_port_data_products", "model_implemented_entities", @@ -552,6 +518,7 @@ class QuickSightDatasetFieldNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -577,11 +544,12 @@ def _populate_quick_sight_dataset_field_attrs( ) -> None: """Populate QuickSightDatasetField-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.quick_sight_dataset_field_type = obj.quick_sight_dataset_field_type + attrs.quick_sight_type = obj.quick_sight_type attrs.quick_sight_dataset_qualified_name = obj.quick_sight_dataset_qualified_name attrs.quick_sight_id = obj.quick_sight_id attrs.quick_sight_sheet_id = obj.quick_sight_sheet_id attrs.quick_sight_sheet_name = obj.quick_sight_sheet_name + attrs.catalog_dataset_guid = obj.catalog_dataset_guid def _extract_quick_sight_dataset_field_attrs( @@ -589,13 +557,14 @@ def _extract_quick_sight_dataset_field_attrs( ) -> dict: """Extract all QuickSightDatasetField attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["quick_sight_dataset_field_type"] = attrs.quick_sight_dataset_field_type + result["quick_sight_type"] = attrs.quick_sight_type result["quick_sight_dataset_qualified_name"] = ( attrs.quick_sight_dataset_qualified_name ) result["quick_sight_id"] = attrs.quick_sight_id result["quick_sight_sheet_id"] = attrs.quick_sight_sheet_id result["quick_sight_sheet_name"] = attrs.quick_sight_sheet_name + result["catalog_dataset_guid"] = attrs.catalog_dataset_guid return result @@ -636,9 +605,6 @@ def _quick_sight_dataset_field_to_nested( is_incomplete=quick_sight_dataset_field.is_incomplete, provenance_type=quick_sight_dataset_field.provenance_type, home_id=quick_sight_dataset_field.home_id, - depth=quick_sight_dataset_field.depth, - immediate_upstream=quick_sight_dataset_field.immediate_upstream, - immediate_downstream=quick_sight_dataset_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -674,6 +640,7 @@ def _quick_sight_dataset_field_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -682,9 +649,6 @@ def _quick_sight_dataset_field_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_quick_sight_dataset_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -715,8 +679,8 @@ def _quick_sight_dataset_field_from_nested_bytes( RelationField, ) -QuickSightDatasetField.QUICK_SIGHT_DATASET_FIELD_TYPE = KeywordField( - "quickSightDatasetFieldType", "quickSightDatasetFieldType" +QuickSightDatasetField.QUICK_SIGHT_TYPE = KeywordField( + "quickSightType", "quickSightType" ) QuickSightDatasetField.QUICK_SIGHT_DATASET_QUALIFIED_NAME = KeywordTextField( "quickSightDatasetQualifiedName", @@ -730,6 +694,9 @@ def _quick_sight_dataset_field_from_nested_bytes( QuickSightDatasetField.QUICK_SIGHT_SHEET_NAME = KeywordTextField( "quickSightSheetName", "quickSightSheetName", "quickSightSheetName.text" ) +QuickSightDatasetField.CATALOG_DATASET_GUID = KeywordField( + "catalogDatasetGuid", "catalogDatasetGuid" +) QuickSightDatasetField.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") QuickSightDatasetField.OUTPUT_FROM_AIRFLOW_TASKS = RelationField( "outputFromAirflowTasks" @@ -737,6 +704,10 @@ def _quick_sight_dataset_field_from_nested_bytes( QuickSightDatasetField.ANOMALO_CHECKS = RelationField("anomaloChecks") QuickSightDatasetField.APPLICATION = RelationField("application") QuickSightDatasetField.APPLICATION_FIELD = RelationField("applicationField") +QuickSightDatasetField.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +QuickSightDatasetField.DATA_CONTRACT_LATEST_CERTIFIED = RelationField( + "dataContractLatestCertified" +) QuickSightDatasetField.OUTPUT_PORT_DATA_PRODUCTS = RelationField( "outputPortDataProducts" ) @@ -752,6 +723,9 @@ def _quick_sight_dataset_field_from_nested_bytes( QuickSightDatasetField.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +QuickSightDatasetField.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) QuickSightDatasetField.MEANINGS = RelationField("meanings") QuickSightDatasetField.MC_MONITORS = RelationField("mcMonitors") QuickSightDatasetField.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/quick_sight_folder.py b/pyatlan_v9/model/assets/quick_sight_folder.py index ed8bcd6a9..03ef69ec9 100644 --- a/pyatlan_v9/model/assets/quick_sight_folder.py +++ b/pyatlan_v9/model/assets/quick_sight_folder.py @@ -38,8 +38,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) +from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -49,7 +51,6 @@ RelatedQuickSightAnalysis, RelatedQuickSightDashboard, RelatedQuickSightDataset, - RelatedQuickSightFolder, ) from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme @@ -68,16 +69,19 @@ class QuickSightFolder(Asset): Instance of a QuickSight folder in Atlan. """ - QUICK_SIGHT_FOLDER_TYPE: ClassVar[Any] = None + QUICK_SIGHT_TYPE: ClassVar[Any] = None QUICK_SIGHT_FOLDER_HIERARCHY: ClassVar[Any] = None QUICK_SIGHT_ID: ClassVar[Any] = None QUICK_SIGHT_SHEET_ID: ClassVar[Any] = None QUICK_SIGHT_SHEET_NAME: ClassVar[Any] = None + CATALOG_DATASET_GUID: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None ANOMALO_CHECKS: ClassVar[Any] = None APPLICATION: ClassVar[Any] = None APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None MODEL_IMPLEMENTED_ENTITIES: ClassVar[Any] = None @@ -85,6 +89,7 @@ class QuickSightFolder(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,8 +110,10 @@ class QuickSightFolder(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - quick_sight_folder_type: Union[str, None, UnsetType] = UNSET - """Type of this folder, for example: SHARED.""" + type_name: Union[str, UnsetType] = "QuickSightFolder" + + quick_sight_type: Union[str, None, UnsetType] = UNSET + """Type of this folder, for example: SHARED or RESTRICTED.""" quick_sight_folder_hierarchy: Union[List[Dict[str, str]], None, UnsetType] = UNSET """Detailed path of this folder.""" @@ -120,6 +127,9 @@ class QuickSightFolder(Asset): quick_sight_sheet_name: Union[str, None, UnsetType] = UNSET """Name of the QuickSight sheet.""" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET """Tasks to which this asset provides input.""" @@ -135,6 +145,12 @@ class QuickSightFolder(Asset): application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET """ApplicationField owning the Asset.""" + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET """Data products for which this asset is an output port.""" @@ -160,6 +176,11 @@ class QuickSightFolder(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -214,7 +235,7 @@ class QuickSightFolder(Asset): schema_registry_subjects: Union[ List[RelatedSchemaRegistrySubject], None, UnsetType ] = UNSET - """""" + """Schema registry subjects associated with this asset.""" soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET """""" @@ -228,66 +249,6 @@ class QuickSightFolder(Asset): def __post_init__(self) -> None: self.type_name = "QuickSightFolder" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this QuickSightFolder instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"QuickSightFolder validation failed: {errors}") - - def minimize(self) -> "QuickSightFolder": - """ - Return a minimal copy of this QuickSightFolder with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new QuickSightFolder with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new QuickSightFolder instance with only the minimum required fields. - """ - self.validate() - return QuickSightFolder(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedQuickSightFolder": - """ - Create a :class:`RelatedQuickSightFolder` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedQuickSightFolder reference to this asset. - """ - if self.guid is not UNSET: - return RelatedQuickSightFolder(guid=self.guid) - return RelatedQuickSightFolder(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -385,8 +346,8 @@ def from_json( class QuickSightFolderAttributes(AssetAttributes): """QuickSightFolder-specific attributes for nested API format.""" - quick_sight_folder_type: Union[str, None, UnsetType] = UNSET - """Type of this folder, for example: SHARED.""" + quick_sight_type: Union[str, None, UnsetType] = UNSET + """Type of this folder, for example: SHARED or RESTRICTED.""" quick_sight_folder_hierarchy: Union[List[Dict[str, str]], None, UnsetType] = UNSET """Detailed path of this folder.""" @@ -400,6 +361,9 @@ class QuickSightFolderAttributes(AssetAttributes): quick_sight_sheet_name: Union[str, None, UnsetType] = UNSET """Name of the QuickSight sheet.""" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + class QuickSightFolderRelationshipAttributes(AssetRelationshipAttributes): """QuickSightFolder-specific relationship attributes for nested API format.""" @@ -419,6 +383,12 @@ class QuickSightFolderRelationshipAttributes(AssetRelationshipAttributes): application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET """ApplicationField owning the Asset.""" + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET """Data products for which this asset is an output port.""" @@ -444,6 +414,11 @@ class QuickSightFolderRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -498,7 +473,7 @@ class QuickSightFolderRelationshipAttributes(AssetRelationshipAttributes): schema_registry_subjects: Union[ List[RelatedSchemaRegistrySubject], None, UnsetType ] = UNSET - """""" + """Schema registry subjects associated with this asset.""" soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET """""" @@ -536,6 +511,8 @@ class QuickSightFolderNested(AssetNested): "anomalo_checks", "application", "application_field", + "data_contract_latest", + "data_contract_latest_certified", "output_port_data_products", "input_port_data_products", "model_implemented_entities", @@ -543,6 +520,7 @@ class QuickSightFolderNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -570,21 +548,23 @@ def _populate_quick_sight_folder_attrs( ) -> None: """Populate QuickSightFolder-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.quick_sight_folder_type = obj.quick_sight_folder_type + attrs.quick_sight_type = obj.quick_sight_type attrs.quick_sight_folder_hierarchy = obj.quick_sight_folder_hierarchy attrs.quick_sight_id = obj.quick_sight_id attrs.quick_sight_sheet_id = obj.quick_sight_sheet_id attrs.quick_sight_sheet_name = obj.quick_sight_sheet_name + attrs.catalog_dataset_guid = obj.catalog_dataset_guid def _extract_quick_sight_folder_attrs(attrs: QuickSightFolderAttributes) -> dict: """Extract all QuickSightFolder attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["quick_sight_folder_type"] = attrs.quick_sight_folder_type + result["quick_sight_type"] = attrs.quick_sight_type result["quick_sight_folder_hierarchy"] = attrs.quick_sight_folder_hierarchy result["quick_sight_id"] = attrs.quick_sight_id result["quick_sight_sheet_id"] = attrs.quick_sight_sheet_id result["quick_sight_sheet_name"] = attrs.quick_sight_sheet_name + result["catalog_dataset_guid"] = attrs.catalog_dataset_guid return result @@ -625,9 +605,6 @@ def _quick_sight_folder_to_nested( is_incomplete=quick_sight_folder.is_incomplete, provenance_type=quick_sight_folder.provenance_type, home_id=quick_sight_folder.home_id, - depth=quick_sight_folder.depth, - immediate_upstream=quick_sight_folder.immediate_upstream, - immediate_downstream=quick_sight_folder.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -661,6 +638,7 @@ def _quick_sight_folder_from_nested(nested: QuickSightFolderNested) -> QuickSigh updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -669,9 +647,6 @@ def _quick_sight_folder_from_nested(nested: QuickSightFolderNested) -> QuickSigh is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_quick_sight_folder_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -702,9 +677,7 @@ def _quick_sight_folder_from_nested_bytes( RelationField, ) -QuickSightFolder.QUICK_SIGHT_FOLDER_TYPE = KeywordField( - "quickSightFolderType", "quickSightFolderType" -) +QuickSightFolder.QUICK_SIGHT_TYPE = KeywordField("quickSightType", "quickSightType") QuickSightFolder.QUICK_SIGHT_FOLDER_HIERARCHY = KeywordField( "quickSightFolderHierarchy", "quickSightFolderHierarchy" ) @@ -715,11 +688,18 @@ def _quick_sight_folder_from_nested_bytes( QuickSightFolder.QUICK_SIGHT_SHEET_NAME = KeywordTextField( "quickSightSheetName", "quickSightSheetName", "quickSightSheetName.text" ) +QuickSightFolder.CATALOG_DATASET_GUID = KeywordField( + "catalogDatasetGuid", "catalogDatasetGuid" +) QuickSightFolder.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") QuickSightFolder.OUTPUT_FROM_AIRFLOW_TASKS = RelationField("outputFromAirflowTasks") QuickSightFolder.ANOMALO_CHECKS = RelationField("anomaloChecks") QuickSightFolder.APPLICATION = RelationField("application") QuickSightFolder.APPLICATION_FIELD = RelationField("applicationField") +QuickSightFolder.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +QuickSightFolder.DATA_CONTRACT_LATEST_CERTIFIED = RelationField( + "dataContractLatestCertified" +) QuickSightFolder.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") QuickSightFolder.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") QuickSightFolder.MODEL_IMPLEMENTED_ENTITIES = RelationField("modelImplementedEntities") @@ -729,6 +709,9 @@ def _quick_sight_folder_from_nested_bytes( QuickSightFolder.METRICS = RelationField("metrics") QuickSightFolder.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") QuickSightFolder.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +QuickSightFolder.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) QuickSightFolder.MEANINGS = RelationField("meanings") QuickSightFolder.MC_MONITORS = RelationField("mcMonitors") QuickSightFolder.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/quick_sight_related.py b/pyatlan_v9/model/assets/quick_sight_related.py index edf865e62..65ee958e1 100644 --- a/pyatlan_v9/model/assets/quick_sight_related.py +++ b/pyatlan_v9/model/assets/quick_sight_related.py @@ -82,10 +82,10 @@ class RelatedQuickSightDataset(RelatedQuickSight): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "QuickSightDataset" so it serializes correctly - quick_sight_dataset_import_mode: Union[str, None, UnsetType] = UNSET + quick_sight_import_mode: Union[str, None, UnsetType] = UNSET """Import mode for this dataset, for example: SPICE or DIRECT_QUERY.""" - quick_sight_dataset_column_count: Union[int, None, UnsetType] = UNSET + quick_sight_column_count: Union[int, None, UnsetType] = UNSET """Number of columns present in this dataset.""" def __post_init__(self) -> None: @@ -103,7 +103,7 @@ class RelatedQuickSightDatasetField(RelatedQuickSight): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "QuickSightDatasetField" so it serializes correctly - quick_sight_dataset_field_type: Union[str, None, UnsetType] = UNSET + quick_sight_type: Union[str, None, UnsetType] = UNSET """Datatype of this field, for example: STRING, INTEGER, etc.""" quick_sight_dataset_qualified_name: Union[str, None, UnsetType] = UNSET @@ -124,8 +124,8 @@ class RelatedQuickSightFolder(RelatedQuickSight): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "QuickSightFolder" so it serializes correctly - quick_sight_folder_type: Union[str, None, UnsetType] = UNSET - """Type of this folder, for example: SHARED.""" + quick_sight_type: Union[str, None, UnsetType] = UNSET + """Type of this folder, for example: SHARED or RESTRICTED.""" quick_sight_folder_hierarchy: Union[List[Dict[str, str]], None, UnsetType] = UNSET """Detailed path of this folder.""" @@ -145,7 +145,7 @@ class RelatedQuickSightAnalysis(RelatedQuickSight): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "QuickSightAnalysis" so it serializes correctly - quick_sight_analysis_status: Union[str, None, UnsetType] = UNSET + quick_sight_status: Union[str, None, UnsetType] = UNSET """Status of this analysis, for example: CREATION_IN_PROGRESS, UPDATE_SUCCESSFUL, etc.""" quick_sight_analysis_calculated_fields: Union[List[str], None, UnsetType] = UNSET @@ -192,10 +192,10 @@ class RelatedQuickSightDashboard(RelatedQuickSight): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "QuickSightDashboard" so it serializes correctly - quick_sight_dashboard_published_version_number: Union[int, None, UnsetType] = UNSET + quick_sight_published_version_number: Union[int, None, UnsetType] = UNSET """Version number of the published dashboard.""" - quick_sight_dashboard_last_published_time: Union[int, None, UnsetType] = UNSET + quick_sight_last_published_time: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this dashboard was last published, in milliseconds.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/readme.py b/pyatlan_v9/model/assets/readme.py index db792da34..0902d0df7 100644 --- a/pyatlan_v9/model/assets/readme.py +++ b/pyatlan_v9/model/assets/readme.py @@ -44,6 +44,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -85,6 +86,7 @@ class Readme(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -104,6 +106,8 @@ class Readme(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Readme" + link: Union[str, None, UnsetType] = UNSET """URL to the resource.""" @@ -165,6 +169,11 @@ class Readme(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -232,72 +241,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Readme instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.asset is UNSET: - errors.append("asset is required for creation") - if errors: - raise ValueError(f"Readme validation failed: {errors}") - - def minimize(self) -> "Readme": - """ - Return a minimal copy of this Readme with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Readme with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Readme instance with only the minimum required fields. - """ - self.validate() - return Readme(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedReadme": - """ - Create a :class:`RelatedReadme` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedReadme reference to this asset. - """ - if self.guid is not UNSET: - return RelatedReadme(guid=self.guid) - return RelatedReadme(qualified_name=self.qualified_name) - @property def description(self) -> Union[str, None, UnsetType]: """Decode URL-encoded description content for parity with legacy models.""" @@ -486,6 +429,11 @@ class ReadmeRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -578,6 +526,7 @@ class ReadmeNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -653,9 +602,6 @@ def _readme_to_nested(readme: Readme) -> ReadmeNested: is_incomplete=readme.is_incomplete, provenance_type=readme.provenance_type, home_id=readme.home_id, - depth=readme.depth, - immediate_upstream=readme.immediate_upstream, - immediate_downstream=readme.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -685,6 +631,7 @@ def _readme_from_nested(nested: ReadmeNested) -> Readme: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -693,9 +640,6 @@ def _readme_from_nested(nested: ReadmeNested) -> Readme: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_readme_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -741,6 +685,9 @@ def _readme_from_nested_bytes(data: bytes, serde: Serde) -> Readme: Readme.METRICS = RelationField("metrics") Readme.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Readme.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Readme.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Readme.MEANINGS = RelationField("meanings") Readme.MC_MONITORS = RelationField("mcMonitors") Readme.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/readme_template.py b/pyatlan_v9/model/assets/readme_template.py index 008b5db9f..88a3dc1d2 100644 --- a/pyatlan_v9/model/assets/readme_template.py +++ b/pyatlan_v9/model/assets/readme_template.py @@ -40,18 +40,14 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable -from .resource_related import ( - RelatedFile, - RelatedLink, - RelatedReadme, - RelatedReadmeTemplate, -) +from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -88,6 +84,7 @@ class ReadmeTemplate(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +102,8 @@ class ReadmeTemplate(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ReadmeTemplate" + icon: Union[str, None, UnsetType] = UNSET """Icon to use for the README template.""" @@ -172,6 +171,11 @@ class ReadmeTemplate(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -227,66 +231,6 @@ class ReadmeTemplate(Asset): def __post_init__(self) -> None: self.type_name = "ReadmeTemplate" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ReadmeTemplate instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ReadmeTemplate validation failed: {errors}") - - def minimize(self) -> "ReadmeTemplate": - """ - Return a minimal copy of this ReadmeTemplate with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ReadmeTemplate with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ReadmeTemplate instance with only the minimum required fields. - """ - self.validate() - return ReadmeTemplate(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedReadmeTemplate": - """ - Create a :class:`RelatedReadmeTemplate` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedReadmeTemplate reference to this asset. - """ - if self.guid is not UNSET: - return RelatedReadmeTemplate(guid=self.guid) - return RelatedReadmeTemplate(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -413,6 +357,11 @@ class ReadmeTemplateRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -501,6 +450,7 @@ class ReadmeTemplateNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -582,9 +532,6 @@ def _readme_template_to_nested(readme_template: ReadmeTemplate) -> ReadmeTemplat is_incomplete=readme_template.is_incomplete, provenance_type=readme_template.provenance_type, home_id=readme_template.home_id, - depth=readme_template.depth, - immediate_upstream=readme_template.immediate_upstream, - immediate_downstream=readme_template.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -618,6 +565,7 @@ def _readme_template_from_nested(nested: ReadmeTemplateNested) -> ReadmeTemplate updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -626,9 +574,6 @@ def _readme_template_from_nested(nested: ReadmeTemplateNested) -> ReadmeTemplate is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_readme_template_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -684,6 +629,9 @@ def _readme_template_from_nested_bytes(data: bytes, serde: Serde) -> ReadmeTempl ReadmeTemplate.METRICS = RelationField("metrics") ReadmeTemplate.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ReadmeTemplate.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ReadmeTemplate.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ReadmeTemplate.MEANINGS = RelationField("meanings") ReadmeTemplate.MC_MONITORS = RelationField("mcMonitors") ReadmeTemplate.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/redash.py b/pyatlan_v9/model/assets/redash.py index fdf19ef79..1fa29f780 100644 --- a/pyatlan_v9/model/assets/redash.py +++ b/pyatlan_v9/model/assets/redash.py @@ -40,12 +40,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .redash_related import RelatedRedash from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -79,6 +79,7 @@ class Redash(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -96,6 +97,8 @@ class Redash(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Redash" + redash_is_published: Union[bool, None, UnsetType] = UNSET """Whether this asset is published in Redash (true) or not (false).""" @@ -148,6 +151,11 @@ class Redash(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -203,66 +211,6 @@ class Redash(Asset): def __post_init__(self) -> None: self.type_name = "Redash" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Redash instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Redash validation failed: {errors}") - - def minimize(self) -> "Redash": - """ - Return a minimal copy of this Redash with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Redash with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Redash instance with only the minimum required fields. - """ - self.validate() - return Redash(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedRedash": - """ - Create a :class:`RelatedRedash` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedRedash reference to this asset. - """ - if self.guid is not UNSET: - return RelatedRedash(guid=self.guid) - return RelatedRedash(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -374,6 +322,11 @@ class RedashRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -460,6 +413,7 @@ class RedashNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -527,9 +481,6 @@ def _redash_to_nested(redash: Redash) -> RedashNested: is_incomplete=redash.is_incomplete, provenance_type=redash.provenance_type, home_id=redash.home_id, - depth=redash.depth, - immediate_upstream=redash.immediate_upstream, - immediate_downstream=redash.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -559,6 +510,7 @@ def _redash_from_nested(nested: RedashNested) -> Redash: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -567,9 +519,6 @@ def _redash_from_nested(nested: RedashNested) -> Redash: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_redash_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -612,6 +561,9 @@ def _redash_from_nested_bytes(data: bytes, serde: Serde) -> Redash: Redash.METRICS = RelationField("metrics") Redash.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Redash.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Redash.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Redash.MEANINGS = RelationField("meanings") Redash.MC_MONITORS = RelationField("mcMonitors") Redash.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/redash_dashboard.py b/pyatlan_v9/model/assets/redash_dashboard.py index 2c409c00b..31d18c97c 100644 --- a/pyatlan_v9/model/assets/redash_dashboard.py +++ b/pyatlan_v9/model/assets/redash_dashboard.py @@ -40,12 +40,12 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .redash_related import RelatedRedashDashboard from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -80,6 +80,7 @@ class RedashDashboard(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +98,8 @@ class RedashDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "RedashDashboard" + redash_dashboard_widget_count: Union[int, None, UnsetType] = UNSET """Number of widgets in this dashboard.""" @@ -152,6 +155,11 @@ class RedashDashboard(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -207,66 +215,6 @@ class RedashDashboard(Asset): def __post_init__(self) -> None: self.type_name = "RedashDashboard" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this RedashDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"RedashDashboard validation failed: {errors}") - - def minimize(self) -> "RedashDashboard": - """ - Return a minimal copy of this RedashDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new RedashDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new RedashDashboard instance with only the minimum required fields. - """ - self.validate() - return RedashDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedRedashDashboard": - """ - Create a :class:`RelatedRedashDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedRedashDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedRedashDashboard(guid=self.guid) - return RelatedRedashDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -383,6 +331,11 @@ class RedashDashboardRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -471,6 +424,7 @@ class RedashDashboardNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -546,9 +500,6 @@ def _redash_dashboard_to_nested( is_incomplete=redash_dashboard.is_incomplete, provenance_type=redash_dashboard.provenance_type, home_id=redash_dashboard.home_id, - depth=redash_dashboard.depth, - immediate_upstream=redash_dashboard.immediate_upstream, - immediate_downstream=redash_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -582,6 +533,7 @@ def _redash_dashboard_from_nested(nested: RedashDashboardNested) -> RedashDashbo updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -590,9 +542,6 @@ def _redash_dashboard_from_nested(nested: RedashDashboardNested) -> RedashDashbo is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_redash_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -649,6 +598,9 @@ def _redash_dashboard_from_nested_bytes(data: bytes, serde: Serde) -> RedashDash RedashDashboard.METRICS = RelationField("metrics") RedashDashboard.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") RedashDashboard.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +RedashDashboard.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) RedashDashboard.MEANINGS = RelationField("meanings") RedashDashboard.MC_MONITORS = RelationField("mcMonitors") RedashDashboard.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/redash_query.py b/pyatlan_v9/model/assets/redash_query.py index a427d7d62..52cf02fb8 100644 --- a/pyatlan_v9/model/assets/redash_query.py +++ b/pyatlan_v9/model/assets/redash_query.py @@ -41,12 +41,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .redash_related import RelatedRedashQuery, RelatedRedashVisualization +from .redash_related import RelatedRedashVisualization from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -86,6 +87,7 @@ class RedashQuery(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -104,6 +106,8 @@ class RedashQuery(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "RedashQuery" + redash_query_sql: Union[str, None, UnsetType] = msgspec.field( default=UNSET, name="redashQuerySQL" ) @@ -176,6 +180,11 @@ class RedashQuery(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -236,66 +245,6 @@ class RedashQuery(Asset): def __post_init__(self) -> None: self.type_name = "RedashQuery" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this RedashQuery instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"RedashQuery validation failed: {errors}") - - def minimize(self) -> "RedashQuery": - """ - Return a minimal copy of this RedashQuery with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new RedashQuery with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new RedashQuery instance with only the minimum required fields. - """ - self.validate() - return RedashQuery(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedRedashQuery": - """ - Create a :class:`RelatedRedashQuery` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedRedashQuery reference to this asset. - """ - if self.guid is not UNSET: - return RelatedRedashQuery(guid=self.guid) - return RelatedRedashQuery(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -427,6 +376,11 @@ class RedashQueryRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -518,6 +472,7 @@ class RedashQueryNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -602,9 +557,6 @@ def _redash_query_to_nested(redash_query: RedashQuery) -> RedashQueryNested: is_incomplete=redash_query.is_incomplete, provenance_type=redash_query.provenance_type, home_id=redash_query.home_id, - depth=redash_query.depth, - immediate_upstream=redash_query.immediate_upstream, - immediate_downstream=redash_query.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -636,6 +588,7 @@ def _redash_query_from_nested(nested: RedashQueryNested) -> RedashQuery: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -644,9 +597,6 @@ def _redash_query_from_nested(nested: RedashQueryNested) -> RedashQuery: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_redash_query_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -713,6 +663,9 @@ def _redash_query_from_nested_bytes(data: bytes, serde: Serde) -> RedashQuery: RedashQuery.METRICS = RelationField("metrics") RedashQuery.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") RedashQuery.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +RedashQuery.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) RedashQuery.MEANINGS = RelationField("meanings") RedashQuery.MC_MONITORS = RelationField("mcMonitors") RedashQuery.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/redash_visualization.py b/pyatlan_v9/model/assets/redash_visualization.py index b346345f2..22e4dba5d 100644 --- a/pyatlan_v9/model/assets/redash_visualization.py +++ b/pyatlan_v9/model/assets/redash_visualization.py @@ -41,12 +41,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess -from .redash_related import RelatedRedashQuery, RelatedRedashVisualization +from .redash_related import RelatedRedashQuery from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject @@ -83,6 +84,7 @@ class RedashVisualization(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -101,6 +103,8 @@ class RedashVisualization(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "RedashVisualization" + redash_visualization_type: Union[str, None, UnsetType] = UNSET """Type of this visualization.""" @@ -162,6 +166,11 @@ class RedashVisualization(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -226,76 +235,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this RedashVisualization instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.redash_query is UNSET: - errors.append("redash_query is required for creation") - if self.redash_query_name is UNSET: - errors.append("redash_query_name is required for creation") - if self.redash_query_qualified_name is UNSET: - errors.append("redash_query_qualified_name is required for creation") - if errors: - raise ValueError(f"RedashVisualization validation failed: {errors}") - - def minimize(self) -> "RedashVisualization": - """ - Return a minimal copy of this RedashVisualization with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new RedashVisualization with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new RedashVisualization instance with only the minimum required fields. - """ - self.validate() - return RedashVisualization(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedRedashVisualization": - """ - Create a :class:`RelatedRedashVisualization` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedRedashVisualization reference to this asset. - """ - if self.guid is not UNSET: - return RelatedRedashVisualization(guid=self.guid) - return RelatedRedashVisualization(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -418,6 +357,11 @@ class RedashVisualizationRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -509,6 +453,7 @@ class RedashVisualizationNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -589,9 +534,6 @@ def _redash_visualization_to_nested( is_incomplete=redash_visualization.is_incomplete, provenance_type=redash_visualization.provenance_type, home_id=redash_visualization.home_id, - depth=redash_visualization.depth, - immediate_upstream=redash_visualization.immediate_upstream, - immediate_downstream=redash_visualization.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -627,6 +569,7 @@ def _redash_visualization_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -635,9 +578,6 @@ def _redash_visualization_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_redash_visualization_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -708,6 +648,9 @@ def _redash_visualization_from_nested_bytes( RedashVisualization.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +RedashVisualization.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) RedashVisualization.MEANINGS = RelationField("meanings") RedashVisualization.MC_MONITORS = RelationField("mcMonitors") RedashVisualization.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/referenceable.py b/pyatlan_v9/model/assets/referenceable.py index 9949a9ed6..2cecd04fb 100644 --- a/pyatlan_v9/model/assets/referenceable.py +++ b/pyatlan_v9/model/assets/referenceable.py @@ -19,14 +19,7 @@ import msgspec from msgspec import UNSET, UnsetType -from pyatlan.model.fields.atlan_fields import ( - InternalKeywordField, - InternalKeywordTextField, - InternalNumericField, - KeywordField, - KeywordTextField, - TextField, -) +from pyatlan.model.fields.atlan_fields import KeywordField, KeywordTextField from pyatlan_v9.model.conversion_utils import ( categorize_relationships, merge_relationships, @@ -78,65 +71,19 @@ def __post_init__(self) -> None: if self.type_name is UNSET: self.type_name = "Referenceable" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Referenceable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Referenceable validation failed: {errors}") - - def minimize(self) -> "Referenceable": - """ - Return a minimal copy of this Referenceable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Referenceable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Referenceable instance with only the minimum required fields. - """ - self.validate() - return Referenceable(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedReferenceable": - """ - Create a :class:`RelatedReferenceable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedReferenceable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedReferenceable(guid=self.guid) - return RelatedReferenceable(qualified_name=self.qualified_name) + # DEFERRED: Referenceable.TYPE_NAME = InternalKeywordTextField("typeName", "__typeName.keyword", "__typeName", "__typeName") + # DEFERRED: Referenceable.GUID = InternalKeywordField("guid", "__guid", "__guid") + # DEFERRED: Referenceable.CREATED_BY = InternalKeywordField("createdBy", "__createdBy", "__createdBy") + # DEFERRED: Referenceable.UPDATED_BY = InternalKeywordField("updatedBy", "__modifiedBy", "__modifiedBy") + # DEFERRED: Referenceable.STATUS = InternalKeywordField("status", "__state", "__state") + # DEFERRED: Referenceable.ATLAN_TAGS = InternalKeywordTextField("classificationNames", "__traitNames", "__classificationsText", "__classificationNames") + # DEFERRED: Referenceable.PROPAGATED_ATLAN_TAGS = InternalKeywordTextField("classificationNames", "__propagatedTraitNames", "__classificationsText", "__propagatedClassificationNames") + # DEFERRED: Referenceable.ASSIGNED_TERMS = InternalKeywordTextField("meanings", "__meanings", "__meaningsText", "__meanings") + # DEFERRED: Referenceable.SUPER_TYPE_NAMES = InternalKeywordTextField("typeName", "__superTypeNames.keyword", "__superTypeNames", "__superTypeNames") + # DEFERRED: Referenceable.CREATE_TIME = InternalNumericField("createTime", "__timestamp", "__timestamp") + # DEFERRED: Referenceable.UPDATE_TIME = InternalNumericField("updateTime", "__modificationTimestamp", "__modificationTimestamp") + # DEFERRED: Referenceable.QUALIFIED_NAME = KeywordTextField("qualifiedName", "qualifiedName", "qualifiedName.text") + # DEFERRED: Referenceable.CUSTOM_ATTRIBUTES = TextField("customAttributes", "customAttributes") # Entity-level field descriptor placeholders (assigned at module bottom) TYPE_NAME: ClassVar[Any] = None @@ -306,9 +253,6 @@ class ReferenceableNested( is_incomplete: Union[Any, UnsetType] = UNSET provenance_type: Union[Any, UnsetType] = UNSET home_id: Union[Any, UnsetType] = UNSET - depth: Union[Any, UnsetType] = UNSET - immediate_upstream: Union[Any, UnsetType] = UNSET - immediate_downstream: Union[Any, UnsetType] = UNSET attributes: Union[ReferenceableAttributes, UnsetType] = UNSET relationship_attributes: Union[ReferenceableRelationshipAttributes, UnsetType] = ( @@ -384,9 +328,6 @@ def _referenceable_to_nested(referenceable: Referenceable) -> ReferenceableNeste is_incomplete=referenceable.is_incomplete, provenance_type=referenceable.provenance_type, home_id=referenceable.home_id, - depth=referenceable.depth, - immediate_upstream=referenceable.immediate_upstream, - immediate_downstream=referenceable.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -420,6 +361,7 @@ def _referenceable_from_nested(nested: ReferenceableNested) -> Referenceable: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -428,9 +370,6 @@ def _referenceable_from_nested(nested: ReferenceableNested) -> Referenceable: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_referenceable_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -461,43 +400,3 @@ def _referenceable_from_nested_bytes(data: bytes, serde: Serde) -> Referenceable Referenceable.MEANINGS = RelationField("meanings") Referenceable.USER_DEF_RELATIONSHIP_TO = RelationField("userDefRelationshipTo") Referenceable.USER_DEF_RELATIONSHIP_FROM = RelationField("userDefRelationshipFrom") - -Referenceable.TYPE_NAME = InternalKeywordTextField( - "typeName", "__typeName.keyword", "__typeName", "__typeName" -) -Referenceable.GUID = InternalKeywordField("guid", "__guid", "__guid") -Referenceable.CREATED_BY = InternalKeywordField( - "createdBy", "__createdBy", "__createdBy" -) -Referenceable.UPDATED_BY = InternalKeywordField( - "updatedBy", "__modifiedBy", "__modifiedBy" -) -Referenceable.STATUS = InternalKeywordField("status", "__state", "__state") -Referenceable.ATLAN_TAGS = InternalKeywordTextField( - "classificationNames", - "__traitNames", - "__classificationsText", - "__classificationNames", -) -Referenceable.PROPAGATED_ATLAN_TAGS = InternalKeywordTextField( - "classificationNames", - "__propagatedTraitNames", - "__classificationsText", - "__propagatedClassificationNames", -) -Referenceable.ASSIGNED_TERMS = InternalKeywordTextField( - "meanings", "__meanings", "__meaningsText", "__meanings" -) -Referenceable.SUPER_TYPE_NAMES = InternalKeywordTextField( - "typeName", "__superTypeNames.keyword", "__superTypeNames", "__superTypeNames" -) -Referenceable.CREATE_TIME = InternalNumericField( - "createTime", "__timestamp", "__timestamp" -) -Referenceable.UPDATE_TIME = InternalNumericField( - "updateTime", "__modificationTimestamp", "__modificationTimestamp" -) -Referenceable.QUALIFIED_NAME = KeywordTextField( - "qualifiedName", "qualifiedName", "qualifiedName.text" -) -Referenceable.CUSTOM_ATTRIBUTES = TextField("customAttributes", "customAttributes") diff --git a/pyatlan_v9/model/assets/referenceable_related.py b/pyatlan_v9/model/assets/referenceable_related.py index fda32fa13..1944f6c7e 100644 --- a/pyatlan_v9/model/assets/referenceable_related.py +++ b/pyatlan_v9/model/assets/referenceable_related.py @@ -19,7 +19,6 @@ __all__ = [ "RelatedReferenceable", - "RelatedPersona", ] @@ -50,19 +49,3 @@ def __post_init__(self) -> None: """ if self.qualified_name is not UNSET and self.unique_attributes is UNSET: self.unique_attributes = {"qualifiedName": self.qualified_name} - - -class RelatedPersona(RelatedReferenceable): - """ - Related entity reference for Persona assets. - - Persona is a bootstrapped type that exists in all Atlan tenants but is not - defined in the typedef hierarchy. - """ - - # type_name inherited from parent with default=UNSET - # __post_init__ sets it to "Persona" so it serializes correctly - - def __post_init__(self) -> None: - RelatedReferenceable.__post_init__(self) - self.type_name = "Persona" diff --git a/pyatlan_v9/model/assets/resource.py b/pyatlan_v9/model/assets/resource.py index 54bad6cab..5119805e0 100644 --- a/pyatlan_v9/model/assets/resource.py +++ b/pyatlan_v9/model/assets/resource.py @@ -40,13 +40,14 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .partial_related import RelatedPartialField, RelatedPartialObject from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable -from .resource_related import RelatedFile, RelatedLink, RelatedReadme, RelatedResource +from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -81,6 +82,7 @@ class Resource(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -98,6 +100,8 @@ class Resource(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Resource" + link: Union[str, None, UnsetType] = UNSET """URL to the resource.""" @@ -159,6 +163,11 @@ class Resource(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -214,66 +223,6 @@ class Resource(Asset): def __post_init__(self) -> None: self.type_name = "Resource" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Resource instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Resource validation failed: {errors}") - - def minimize(self) -> "Resource": - """ - Return a minimal copy of this Resource with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Resource with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Resource instance with only the minimum required fields. - """ - self.validate() - return Resource(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedResource": - """ - Create a :class:`RelatedResource` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedResource reference to this asset. - """ - if self.guid is not UNSET: - return RelatedResource(guid=self.guid) - return RelatedResource(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -394,6 +343,11 @@ class ResourceRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -480,6 +434,7 @@ class ResourceNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -553,9 +508,6 @@ def _resource_to_nested(resource: Resource) -> ResourceNested: is_incomplete=resource.is_incomplete, provenance_type=resource.provenance_type, home_id=resource.home_id, - depth=resource.depth, - immediate_upstream=resource.immediate_upstream, - immediate_downstream=resource.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -587,6 +539,7 @@ def _resource_from_nested(nested: ResourceNested) -> Resource: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -595,9 +548,6 @@ def _resource_from_nested(nested: ResourceNested) -> Resource: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_resource_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -643,6 +593,9 @@ def _resource_from_nested_bytes(data: bytes, serde: Serde) -> Resource: Resource.METRICS = RelationField("metrics") Resource.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Resource.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Resource.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Resource.MEANINGS = RelationField("meanings") Resource.MC_MONITORS = RelationField("mcMonitors") Resource.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/s3.py b/pyatlan_v9/model/assets/s3.py index d8b743ed5..1e473aa7b 100644 --- a/pyatlan_v9/model/assets/s3.py +++ b/pyatlan_v9/model/assets/s3.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,7 +49,6 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .s3_related import RelatedS3 from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -93,6 +93,7 @@ class S3(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -110,6 +111,8 @@ class S3(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "S3" + s3_etag: Union[str, None, UnsetType] = msgspec.field(default=UNSET, name="s3ETag") """Entity tag for the asset. An entity tag is a hash of the object and represents changes to the contents of an object only, not its metadata.""" @@ -201,6 +204,11 @@ class S3(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -256,66 +264,6 @@ class S3(Asset): def __post_init__(self) -> None: self.type_name = "S3" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this S3 instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"S3 validation failed: {errors}") - - def minimize(self) -> "S3": - """ - Return a minimal copy of this S3 with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new S3 with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new S3 instance with only the minimum required fields. - """ - self.validate() - return S3(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedS3": - """ - Create a :class:`RelatedS3` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedS3 reference to this asset. - """ - if self.guid is not UNSET: - return RelatedS3(guid=self.guid) - return RelatedS3(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -466,6 +414,11 @@ class S3RelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -548,6 +501,7 @@ class S3Nested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -641,9 +595,6 @@ def _s3_to_nested(s3: S3) -> S3Nested: is_incomplete=s3.is_incomplete, provenance_type=s3.provenance_type, home_id=s3.home_id, - depth=s3.depth, - immediate_upstream=s3.immediate_upstream, - immediate_downstream=s3.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -673,6 +624,7 @@ def _s3_from_nested(nested: S3Nested) -> S3: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -681,9 +633,6 @@ def _s3_from_nested(nested: S3Nested) -> S3: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_s3_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -745,6 +694,9 @@ def _s3_from_nested_bytes(data: bytes, serde: Serde) -> S3: S3.METRICS = RelationField("metrics") S3.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") S3.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +S3.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) S3.MEANINGS = RelationField("meanings") S3.MC_MONITORS = RelationField("mcMonitors") S3.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/s3_bucket.py b/pyatlan_v9/model/assets/s3_bucket.py index cba5887fd..45fdaea56 100644 --- a/pyatlan_v9/model/assets/s3_bucket.py +++ b/pyatlan_v9/model/assets/s3_bucket.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -49,7 +50,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .s3_related import RelatedS3Bucket, RelatedS3Object, RelatedS3Prefix +from .s3_related import RelatedS3Object, RelatedS3Prefix from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -96,6 +97,7 @@ class S3Bucket(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -115,6 +117,8 @@ class S3Bucket(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "S3Bucket" + s3_object_count: Union[int, None, UnsetType] = UNSET """Number of objects within the bucket.""" @@ -212,6 +216,11 @@ class S3Bucket(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -273,66 +282,6 @@ class S3Bucket(Asset): def __post_init__(self) -> None: self.type_name = "S3Bucket" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this S3Bucket instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"S3Bucket validation failed: {errors}") - - def minimize(self) -> "S3Bucket": - """ - Return a minimal copy of this S3Bucket with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new S3Bucket with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new S3Bucket instance with only the minimum required fields. - """ - self.validate() - return S3Bucket(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedS3Bucket": - """ - Create a :class:`RelatedS3Bucket` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedS3Bucket reference to this asset. - """ - if self.guid is not UNSET: - return RelatedS3Bucket(guid=self.guid) - return RelatedS3Bucket(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -543,6 +492,11 @@ class S3BucketRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -635,6 +589,7 @@ class S3BucketNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -734,9 +689,6 @@ def _s3_bucket_to_nested(s3_bucket: S3Bucket) -> S3BucketNested: is_incomplete=s3_bucket.is_incomplete, provenance_type=s3_bucket.provenance_type, home_id=s3_bucket.home_id, - depth=s3_bucket.depth, - immediate_upstream=s3_bucket.immediate_upstream, - immediate_downstream=s3_bucket.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -768,6 +720,7 @@ def _s3_bucket_from_nested(nested: S3BucketNested) -> S3Bucket: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -776,9 +729,6 @@ def _s3_bucket_from_nested(nested: S3BucketNested) -> S3Bucket: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_s3_bucket_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -846,6 +796,9 @@ def _s3_bucket_from_nested_bytes(data: bytes, serde: Serde) -> S3Bucket: S3Bucket.METRICS = RelationField("metrics") S3Bucket.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") S3Bucket.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +S3Bucket.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) S3Bucket.MEANINGS = RelationField("meanings") S3Bucket.MC_MONITORS = RelationField("mcMonitors") S3Bucket.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/s3_object.py b/pyatlan_v9/model/assets/s3_object.py index 59bdf8675..a9dff4e00 100644 --- a/pyatlan_v9/model/assets/s3_object.py +++ b/pyatlan_v9/model/assets/s3_object.py @@ -44,6 +44,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -51,7 +52,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .s3_related import RelatedS3Bucket, RelatedS3Object, RelatedS3Prefix +from .s3_related import RelatedS3Bucket, RelatedS3Prefix from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -108,6 +109,7 @@ class S3Object(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -127,6 +129,8 @@ class S3Object(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "S3Object" + s3_object_last_modified_time: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this object was last updated, in milliseconds, or when it was created if it has never been modified.""" @@ -254,6 +258,11 @@ class S3Object(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -321,76 +330,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this S3Object instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.bucket is UNSET: - errors.append("bucket is required for creation") - if self.s3_bucket_name is UNSET: - errors.append("s3_bucket_name is required for creation") - if self.s3_bucket_qualified_name is UNSET: - errors.append("s3_bucket_qualified_name is required for creation") - if errors: - raise ValueError(f"S3Object validation failed: {errors}") - - def minimize(self) -> "S3Object": - """ - Return a minimal copy of this S3Object with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new S3Object with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new S3Object instance with only the minimum required fields. - """ - self.validate() - return S3Object(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedS3Object": - """ - Create a :class:`RelatedS3Object` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedS3Object reference to this asset. - """ - if self.guid is not UNSET: - return RelatedS3Object(guid=self.guid) - return RelatedS3Object(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -727,6 +666,11 @@ class S3ObjectRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -819,6 +763,7 @@ class S3ObjectNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -940,9 +885,6 @@ def _s3_object_to_nested(s3_object: S3Object) -> S3ObjectNested: is_incomplete=s3_object.is_incomplete, provenance_type=s3_object.provenance_type, home_id=s3_object.home_id, - depth=s3_object.depth, - immediate_upstream=s3_object.immediate_upstream, - immediate_downstream=s3_object.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -974,6 +916,7 @@ def _s3_object_from_nested(nested: S3ObjectNested) -> S3Object: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -982,9 +925,6 @@ def _s3_object_from_nested(nested: S3ObjectNested) -> S3Object: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_s3_object_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1078,6 +1018,9 @@ def _s3_object_from_nested_bytes(data: bytes, serde: Serde) -> S3Object: S3Object.METRICS = RelationField("metrics") S3Object.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") S3Object.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +S3Object.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) S3Object.MEANINGS = RelationField("meanings") S3Object.MC_MONITORS = RelationField("mcMonitors") S3Object.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/s3_prefix.py b/pyatlan_v9/model/assets/s3_prefix.py index ce1cf3cd1..ff9faed91 100644 --- a/pyatlan_v9/model/assets/s3_prefix.py +++ b/pyatlan_v9/model/assets/s3_prefix.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -98,6 +99,7 @@ class S3Prefix(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -119,6 +121,8 @@ class S3Prefix(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "S3Prefix" + s3_bucket_name: Union[str, None, UnsetType] = UNSET """Simple name of the bucket in which this prefix exists.""" @@ -222,6 +226,11 @@ class S3Prefix(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -295,76 +304,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this S3Prefix instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.s3_bucket is UNSET: - errors.append("s3_bucket is required for creation") - if self.s3_bucket_name is UNSET: - errors.append("s3_bucket_name is required for creation") - if self.s3_bucket_qualified_name is UNSET: - errors.append("s3_bucket_qualified_name is required for creation") - if errors: - raise ValueError(f"S3Prefix validation failed: {errors}") - - def minimize(self) -> "S3Prefix": - """ - Return a minimal copy of this S3Prefix with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new S3Prefix with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new S3Prefix instance with only the minimum required fields. - """ - self.validate() - return S3Prefix(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedS3Prefix": - """ - Create a :class:`RelatedS3Prefix` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedS3Prefix reference to this asset. - """ - if self.guid is not UNSET: - return RelatedS3Prefix(guid=self.guid) - return RelatedS3Prefix(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -527,6 +466,11 @@ class S3PrefixRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -625,6 +569,7 @@ class S3PrefixNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -730,9 +675,6 @@ def _s3_prefix_to_nested(s3_prefix: S3Prefix) -> S3PrefixNested: is_incomplete=s3_prefix.is_incomplete, provenance_type=s3_prefix.provenance_type, home_id=s3_prefix.home_id, - depth=s3_prefix.depth, - immediate_upstream=s3_prefix.immediate_upstream, - immediate_downstream=s3_prefix.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -764,6 +706,7 @@ def _s3_prefix_from_nested(nested: S3PrefixNested) -> S3Prefix: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -772,9 +715,6 @@ def _s3_prefix_from_nested(nested: S3PrefixNested) -> S3Prefix: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_s3_prefix_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -843,6 +783,9 @@ def _s3_prefix_from_nested_bytes(data: bytes, serde: Serde) -> S3Prefix: S3Prefix.METRICS = RelationField("metrics") S3Prefix.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") S3Prefix.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +S3Prefix.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) S3Prefix.MEANINGS = RelationField("meanings") S3Prefix.MC_MONITORS = RelationField("mcMonitors") S3Prefix.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/saa_s.py b/pyatlan_v9/model/assets/saa_s.py index a1f61fe19..0e81e4d29 100644 --- a/pyatlan_v9/model/assets/saa_s.py +++ b/pyatlan_v9/model/assets/saa_s.py @@ -37,10 +37,10 @@ _extract_asset_attrs, _populate_asset_attrs, ) -from .catalog_related import RelatedSaaS from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -78,6 +78,7 @@ class SaaS(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +96,8 @@ class SaaS(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SaaS" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -144,6 +147,11 @@ class SaaS(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -199,66 +207,6 @@ class SaaS(Asset): def __post_init__(self) -> None: self.type_name = "SaaS" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SaaS instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SaaS validation failed: {errors}") - - def minimize(self) -> "SaaS": - """ - Return a minimal copy of this SaaS with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SaaS with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SaaS instance with only the minimum required fields. - """ - self.validate() - return SaaS(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSaaS": - """ - Create a :class:`RelatedSaaS` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSaaS reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSaaS(guid=self.guid) - return RelatedSaaS(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -367,6 +315,11 @@ class SaaSRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -449,6 +402,7 @@ class SaaSNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -514,9 +468,6 @@ def _saa_s_to_nested(saa_s: SaaS) -> SaaSNested: is_incomplete=saa_s.is_incomplete, provenance_type=saa_s.provenance_type, home_id=saa_s.home_id, - depth=saa_s.depth, - immediate_upstream=saa_s.immediate_upstream, - immediate_downstream=saa_s.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -546,6 +497,7 @@ def _saa_s_from_nested(nested: SaaSNested) -> SaaS: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -554,9 +506,6 @@ def _saa_s_from_nested(nested: SaaSNested) -> SaaS: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_saa_s_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -594,6 +543,9 @@ def _saa_s_from_nested_bytes(data: bytes, serde: Serde) -> SaaS: SaaS.METRICS = RelationField("metrics") SaaS.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SaaS.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SaaS.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SaaS.MEANINGS = RelationField("meanings") SaaS.MC_MONITORS = RelationField("mcMonitors") SaaS.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sage_maker.py b/pyatlan_v9/model/assets/sage_maker.py index 165b99e76..cbdb4d125 100644 --- a/pyatlan_v9/model/assets/sage_maker.py +++ b/pyatlan_v9/model/assets/sage_maker.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,7 +49,6 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sage_maker_related import RelatedSageMaker from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -97,6 +97,7 @@ class SageMaker(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -114,6 +115,8 @@ class SageMaker(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SageMaker" + sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET """Primary S3 URI associated with this SageMaker asset.""" @@ -231,6 +234,11 @@ class SageMaker(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -286,66 +294,6 @@ class SageMaker(Asset): def __post_init__(self) -> None: self.type_name = "SageMaker" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMaker instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SageMaker validation failed: {errors}") - - def minimize(self) -> "SageMaker": - """ - Return a minimal copy of this SageMaker with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMaker with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMaker instance with only the minimum required fields. - """ - self.validate() - return SageMaker(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSageMaker": - """ - Create a :class:`RelatedSageMaker` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMaker reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMaker(guid=self.guid) - return RelatedSageMaker(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -522,6 +470,11 @@ class SageMakerRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -608,6 +561,7 @@ class SageMakerNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -719,9 +673,6 @@ def _sage_maker_to_nested(sage_maker: SageMaker) -> SageMakerNested: is_incomplete=sage_maker.is_incomplete, provenance_type=sage_maker.provenance_type, home_id=sage_maker.home_id, - depth=sage_maker.depth, - immediate_upstream=sage_maker.immediate_upstream, - immediate_downstream=sage_maker.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -753,6 +704,7 @@ def _sage_maker_from_nested(nested: SageMakerNested) -> SageMaker: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -761,9 +713,6 @@ def _sage_maker_from_nested(nested: SageMakerNested) -> SageMaker: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -844,6 +793,9 @@ def _sage_maker_from_nested_bytes(data: bytes, serde: Serde) -> SageMaker: SageMaker.METRICS = RelationField("metrics") SageMaker.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SageMaker.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SageMaker.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SageMaker.MEANINGS = RelationField("meanings") SageMaker.MC_MONITORS = RelationField("mcMonitors") SageMaker.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sage_maker_feature.py b/pyatlan_v9/model/assets/sage_maker_feature.py index 1b654a249..02123f3f0 100644 --- a/pyatlan_v9/model/assets/sage_maker_feature.py +++ b/pyatlan_v9/model/assets/sage_maker_feature.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -49,7 +50,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sage_maker_related import RelatedSageMakerFeature, RelatedSageMakerFeatureGroup +from .sage_maker_related import RelatedSageMakerFeatureGroup from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -65,10 +66,10 @@ class SageMakerFeature(Asset): Instance of a SageMaker Feature in Atlan. Represents an individual feature within a Feature Group, including its data type and metadata. """ - SAGE_MAKER_FEATURE_GROUP_NAME: ClassVar[Any] = None - SAGE_MAKER_FEATURE_GROUP_QUALIFIED_NAME: ClassVar[Any] = None - SAGE_MAKER_FEATURE_DATA_TYPE: ClassVar[Any] = None - SAGE_MAKER_FEATURE_IS_RECORD_IDENTIFIER: ClassVar[Any] = None + SAGE_MAKER_GROUP_NAME: ClassVar[Any] = None + SAGE_MAKER_GROUP_QUALIFIED_NAME: ClassVar[Any] = None + SAGE_MAKER_DATA_TYPE: ClassVar[Any] = None + SAGE_MAKER_IS_RECORD_IDENTIFIER: ClassVar[Any] = None SAGE_MAKER_S3_URI: ClassVar[Any] = None ETHICAL_AI_PRIVACY_CONFIG: ClassVar[Any] = None ETHICAL_AI_FAIRNESS_CONFIG: ClassVar[Any] = None @@ -102,6 +103,7 @@ class SageMakerFeature(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -120,16 +122,18 @@ class SageMakerFeature(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sage_maker_feature_group_name: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SageMakerFeature" + + sage_maker_group_name: Union[str, None, UnsetType] = UNSET """Name of the Feature Group that contains this feature.""" - sage_maker_feature_group_qualified_name: Union[str, None, UnsetType] = UNSET + sage_maker_group_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the Feature Group that contains this feature.""" - sage_maker_feature_data_type: Union[str, None, UnsetType] = UNSET + sage_maker_data_type: Union[str, None, UnsetType] = UNSET """Data type of the feature (e.g., String, Integral, Fractional).""" - sage_maker_feature_is_record_identifier: Union[bool, None, UnsetType] = UNSET + sage_maker_is_record_identifier: Union[bool, None, UnsetType] = UNSET """Whether this feature serves as the record identifier for the Feature Group.""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -249,6 +253,11 @@ class SageMakerFeature(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -315,72 +324,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerFeature instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sage_maker_feature_group is UNSET: - errors.append("sage_maker_feature_group is required for creation") - if errors: - raise ValueError(f"SageMakerFeature validation failed: {errors}") - - def minimize(self) -> "SageMakerFeature": - """ - Return a minimal copy of this SageMakerFeature with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerFeature with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerFeature instance with only the minimum required fields. - """ - self.validate() - return SageMakerFeature(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSageMakerFeature": - """ - Create a :class:`RelatedSageMakerFeature` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerFeature reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerFeature(guid=self.guid) - return RelatedSageMakerFeature(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -438,16 +381,16 @@ def from_json( class SageMakerFeatureAttributes(AssetAttributes): """SageMakerFeature-specific attributes for nested API format.""" - sage_maker_feature_group_name: Union[str, None, UnsetType] = UNSET + sage_maker_group_name: Union[str, None, UnsetType] = UNSET """Name of the Feature Group that contains this feature.""" - sage_maker_feature_group_qualified_name: Union[str, None, UnsetType] = UNSET + sage_maker_group_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the Feature Group that contains this feature.""" - sage_maker_feature_data_type: Union[str, None, UnsetType] = UNSET + sage_maker_data_type: Union[str, None, UnsetType] = UNSET """Data type of the feature (e.g., String, Integral, Fractional).""" - sage_maker_feature_is_record_identifier: Union[bool, None, UnsetType] = UNSET + sage_maker_is_record_identifier: Union[bool, None, UnsetType] = UNSET """Whether this feature serves as the record identifier for the Feature Group.""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -571,6 +514,11 @@ class SageMakerFeatureRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -664,6 +612,7 @@ class SageMakerFeatureNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -689,14 +638,10 @@ def _populate_sage_maker_feature_attrs( ) -> None: """Populate SageMakerFeature-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sage_maker_feature_group_name = obj.sage_maker_feature_group_name - attrs.sage_maker_feature_group_qualified_name = ( - obj.sage_maker_feature_group_qualified_name - ) - attrs.sage_maker_feature_data_type = obj.sage_maker_feature_data_type - attrs.sage_maker_feature_is_record_identifier = ( - obj.sage_maker_feature_is_record_identifier - ) + attrs.sage_maker_group_name = obj.sage_maker_group_name + attrs.sage_maker_group_qualified_name = obj.sage_maker_group_qualified_name + attrs.sage_maker_data_type = obj.sage_maker_data_type + attrs.sage_maker_is_record_identifier = obj.sage_maker_is_record_identifier attrs.sage_maker_s3_uri = obj.sage_maker_s3_uri attrs.ethical_ai_privacy_config = obj.ethical_ai_privacy_config attrs.ethical_ai_fairness_config = obj.ethical_ai_fairness_config @@ -725,14 +670,10 @@ def _populate_sage_maker_feature_attrs( def _extract_sage_maker_feature_attrs(attrs: SageMakerFeatureAttributes) -> dict: """Extract all SageMakerFeature attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sage_maker_feature_group_name"] = attrs.sage_maker_feature_group_name - result["sage_maker_feature_group_qualified_name"] = ( - attrs.sage_maker_feature_group_qualified_name - ) - result["sage_maker_feature_data_type"] = attrs.sage_maker_feature_data_type - result["sage_maker_feature_is_record_identifier"] = ( - attrs.sage_maker_feature_is_record_identifier - ) + result["sage_maker_group_name"] = attrs.sage_maker_group_name + result["sage_maker_group_qualified_name"] = attrs.sage_maker_group_qualified_name + result["sage_maker_data_type"] = attrs.sage_maker_data_type + result["sage_maker_is_record_identifier"] = attrs.sage_maker_is_record_identifier result["sage_maker_s3_uri"] = attrs.sage_maker_s3_uri result["ethical_ai_privacy_config"] = attrs.ethical_ai_privacy_config result["ethical_ai_fairness_config"] = attrs.ethical_ai_fairness_config @@ -798,9 +739,6 @@ def _sage_maker_feature_to_nested( is_incomplete=sage_maker_feature.is_incomplete, provenance_type=sage_maker_feature.provenance_type, home_id=sage_maker_feature.home_id, - depth=sage_maker_feature.depth, - immediate_upstream=sage_maker_feature.immediate_upstream, - immediate_downstream=sage_maker_feature.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -834,6 +772,7 @@ def _sage_maker_feature_from_nested(nested: SageMakerFeatureNested) -> SageMaker updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -842,9 +781,6 @@ def _sage_maker_feature_from_nested(nested: SageMakerFeatureNested) -> SageMaker is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_feature_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -876,17 +812,17 @@ def _sage_maker_feature_from_nested_bytes( RelationField, ) -SageMakerFeature.SAGE_MAKER_FEATURE_GROUP_NAME = KeywordField( - "sageMakerFeatureGroupName", "sageMakerFeatureGroupName" +SageMakerFeature.SAGE_MAKER_GROUP_NAME = KeywordField( + "sageMakerGroupName", "sageMakerGroupName" ) -SageMakerFeature.SAGE_MAKER_FEATURE_GROUP_QUALIFIED_NAME = KeywordField( - "sageMakerFeatureGroupQualifiedName", "sageMakerFeatureGroupQualifiedName" +SageMakerFeature.SAGE_MAKER_GROUP_QUALIFIED_NAME = KeywordField( + "sageMakerGroupQualifiedName", "sageMakerGroupQualifiedName" ) -SageMakerFeature.SAGE_MAKER_FEATURE_DATA_TYPE = KeywordField( - "sageMakerFeatureDataType", "sageMakerFeatureDataType" +SageMakerFeature.SAGE_MAKER_DATA_TYPE = KeywordField( + "sageMakerDataType", "sageMakerDataType" ) -SageMakerFeature.SAGE_MAKER_FEATURE_IS_RECORD_IDENTIFIER = BooleanField( - "sageMakerFeatureIsRecordIdentifier", "sageMakerFeatureIsRecordIdentifier" +SageMakerFeature.SAGE_MAKER_IS_RECORD_IDENTIFIER = BooleanField( + "sageMakerIsRecordIdentifier", "sageMakerIsRecordIdentifier" ) SageMakerFeature.SAGE_MAKER_S3_URI = KeywordField("sageMakerS3Uri", "sageMakerS3Uri") SageMakerFeature.ETHICAL_AI_PRIVACY_CONFIG = KeywordField( @@ -946,6 +882,9 @@ def _sage_maker_feature_from_nested_bytes( SageMakerFeature.METRICS = RelationField("metrics") SageMakerFeature.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SageMakerFeature.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SageMakerFeature.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SageMakerFeature.MEANINGS = RelationField("meanings") SageMakerFeature.MC_MONITORS = RelationField("mcMonitors") SageMakerFeature.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sage_maker_feature_group.py b/pyatlan_v9/model/assets/sage_maker_feature_group.py index 01520b197..4b4e057f6 100644 --- a/pyatlan_v9/model/assets/sage_maker_feature_group.py +++ b/pyatlan_v9/model/assets/sage_maker_feature_group.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,7 +49,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sage_maker_related import RelatedSageMakerFeature, RelatedSageMakerFeatureGroup +from .sage_maker_related import RelatedSageMakerFeature from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -64,11 +65,11 @@ class SageMakerFeatureGroup(Asset): Instance of a SageMaker Feature Store Feature Group in Atlan. Represents a collection of related features that can be used for machine learning training and inference. """ - SAGE_MAKER_FEATURE_GROUP_STATUS: ClassVar[Any] = None - SAGE_MAKER_FEATURE_GROUP_RECORD_ID_NAME: ClassVar[Any] = None - SAGE_MAKER_FEATURE_GROUP_GLUE_DATABASE_NAME: ClassVar[Any] = None - SAGE_MAKER_FEATURE_GROUP_GLUE_TABLE_NAME: ClassVar[Any] = None - SAGE_MAKER_FEATURE_GROUP_FEATURE_COUNT: ClassVar[Any] = None + SAGE_MAKER_STATUS: ClassVar[Any] = None + SAGE_MAKER_RECORD_ID_NAME: ClassVar[Any] = None + SAGE_MAKER_GLUE_DATABASE_NAME: ClassVar[Any] = None + SAGE_MAKER_GLUE_TABLE_NAME: ClassVar[Any] = None + SAGE_MAKER_FEATURE_COUNT: ClassVar[Any] = None SAGE_MAKER_S3_URI: ClassVar[Any] = None ETHICAL_AI_PRIVACY_CONFIG: ClassVar[Any] = None ETHICAL_AI_FAIRNESS_CONFIG: ClassVar[Any] = None @@ -102,6 +103,7 @@ class SageMakerFeatureGroup(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -120,19 +122,21 @@ class SageMakerFeatureGroup(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sage_maker_feature_group_status: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SageMakerFeatureGroup" + + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the Feature Group (e.g., Created, Creating, Failed).""" - sage_maker_feature_group_record_id_name: Union[str, None, UnsetType] = UNSET + sage_maker_record_id_name: Union[str, None, UnsetType] = UNSET """Name of the feature that serves as the record identifier.""" - sage_maker_feature_group_glue_database_name: Union[str, None, UnsetType] = UNSET + sage_maker_glue_database_name: Union[str, None, UnsetType] = UNSET """AWS Glue database name associated with this Feature Group.""" - sage_maker_feature_group_glue_table_name: Union[str, None, UnsetType] = UNSET + sage_maker_glue_table_name: Union[str, None, UnsetType] = UNSET """AWS Glue table name associated with this Feature Group.""" - sage_maker_feature_group_feature_count: Union[int, None, UnsetType] = UNSET + sage_maker_feature_count: Union[int, None, UnsetType] = UNSET """Number of features in this Feature Group.""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -252,6 +256,11 @@ class SageMakerFeatureGroup(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -310,66 +319,6 @@ class SageMakerFeatureGroup(Asset): def __post_init__(self) -> None: self.type_name = "SageMakerFeatureGroup" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerFeatureGroup instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SageMakerFeatureGroup validation failed: {errors}") - - def minimize(self) -> "SageMakerFeatureGroup": - """ - Return a minimal copy of this SageMakerFeatureGroup with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerFeatureGroup with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerFeatureGroup instance with only the minimum required fields. - """ - self.validate() - return SageMakerFeatureGroup(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSageMakerFeatureGroup": - """ - Create a :class:`RelatedSageMakerFeatureGroup` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerFeatureGroup reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerFeatureGroup(guid=self.guid) - return RelatedSageMakerFeatureGroup(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -427,19 +376,19 @@ def from_json( class SageMakerFeatureGroupAttributes(AssetAttributes): """SageMakerFeatureGroup-specific attributes for nested API format.""" - sage_maker_feature_group_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the Feature Group (e.g., Created, Creating, Failed).""" - sage_maker_feature_group_record_id_name: Union[str, None, UnsetType] = UNSET + sage_maker_record_id_name: Union[str, None, UnsetType] = UNSET """Name of the feature that serves as the record identifier.""" - sage_maker_feature_group_glue_database_name: Union[str, None, UnsetType] = UNSET + sage_maker_glue_database_name: Union[str, None, UnsetType] = UNSET """AWS Glue database name associated with this Feature Group.""" - sage_maker_feature_group_glue_table_name: Union[str, None, UnsetType] = UNSET + sage_maker_glue_table_name: Union[str, None, UnsetType] = UNSET """AWS Glue table name associated with this Feature Group.""" - sage_maker_feature_group_feature_count: Union[int, None, UnsetType] = UNSET + sage_maker_feature_count: Union[int, None, UnsetType] = UNSET """Number of features in this Feature Group.""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -563,6 +512,11 @@ class SageMakerFeatureGroupRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -654,6 +608,7 @@ class SageMakerFeatureGroupNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -679,19 +634,11 @@ def _populate_sage_maker_feature_group_attrs( ) -> None: """Populate SageMakerFeatureGroup-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sage_maker_feature_group_status = obj.sage_maker_feature_group_status - attrs.sage_maker_feature_group_record_id_name = ( - obj.sage_maker_feature_group_record_id_name - ) - attrs.sage_maker_feature_group_glue_database_name = ( - obj.sage_maker_feature_group_glue_database_name - ) - attrs.sage_maker_feature_group_glue_table_name = ( - obj.sage_maker_feature_group_glue_table_name - ) - attrs.sage_maker_feature_group_feature_count = ( - obj.sage_maker_feature_group_feature_count - ) + attrs.sage_maker_status = obj.sage_maker_status + attrs.sage_maker_record_id_name = obj.sage_maker_record_id_name + attrs.sage_maker_glue_database_name = obj.sage_maker_glue_database_name + attrs.sage_maker_glue_table_name = obj.sage_maker_glue_table_name + attrs.sage_maker_feature_count = obj.sage_maker_feature_count attrs.sage_maker_s3_uri = obj.sage_maker_s3_uri attrs.ethical_ai_privacy_config = obj.ethical_ai_privacy_config attrs.ethical_ai_fairness_config = obj.ethical_ai_fairness_config @@ -722,19 +669,11 @@ def _extract_sage_maker_feature_group_attrs( ) -> dict: """Extract all SageMakerFeatureGroup attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sage_maker_feature_group_status"] = attrs.sage_maker_feature_group_status - result["sage_maker_feature_group_record_id_name"] = ( - attrs.sage_maker_feature_group_record_id_name - ) - result["sage_maker_feature_group_glue_database_name"] = ( - attrs.sage_maker_feature_group_glue_database_name - ) - result["sage_maker_feature_group_glue_table_name"] = ( - attrs.sage_maker_feature_group_glue_table_name - ) - result["sage_maker_feature_group_feature_count"] = ( - attrs.sage_maker_feature_group_feature_count - ) + result["sage_maker_status"] = attrs.sage_maker_status + result["sage_maker_record_id_name"] = attrs.sage_maker_record_id_name + result["sage_maker_glue_database_name"] = attrs.sage_maker_glue_database_name + result["sage_maker_glue_table_name"] = attrs.sage_maker_glue_table_name + result["sage_maker_feature_count"] = attrs.sage_maker_feature_count result["sage_maker_s3_uri"] = attrs.sage_maker_s3_uri result["ethical_ai_privacy_config"] = attrs.ethical_ai_privacy_config result["ethical_ai_fairness_config"] = attrs.ethical_ai_fairness_config @@ -800,9 +739,6 @@ def _sage_maker_feature_group_to_nested( is_incomplete=sage_maker_feature_group.is_incomplete, provenance_type=sage_maker_feature_group.provenance_type, home_id=sage_maker_feature_group.home_id, - depth=sage_maker_feature_group.depth, - immediate_upstream=sage_maker_feature_group.immediate_upstream, - immediate_downstream=sage_maker_feature_group.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -838,6 +774,7 @@ def _sage_maker_feature_group_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -846,9 +783,6 @@ def _sage_maker_feature_group_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_feature_group_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -880,20 +814,20 @@ def _sage_maker_feature_group_from_nested_bytes( RelationField, ) -SageMakerFeatureGroup.SAGE_MAKER_FEATURE_GROUP_STATUS = KeywordField( - "sageMakerFeatureGroupStatus", "sageMakerFeatureGroupStatus" +SageMakerFeatureGroup.SAGE_MAKER_STATUS = KeywordField( + "sageMakerStatus", "sageMakerStatus" ) -SageMakerFeatureGroup.SAGE_MAKER_FEATURE_GROUP_RECORD_ID_NAME = KeywordField( - "sageMakerFeatureGroupRecordIdName", "sageMakerFeatureGroupRecordIdName" +SageMakerFeatureGroup.SAGE_MAKER_RECORD_ID_NAME = KeywordField( + "sageMakerRecordIdName", "sageMakerRecordIdName" ) -SageMakerFeatureGroup.SAGE_MAKER_FEATURE_GROUP_GLUE_DATABASE_NAME = KeywordField( - "sageMakerFeatureGroupGlueDatabaseName", "sageMakerFeatureGroupGlueDatabaseName" +SageMakerFeatureGroup.SAGE_MAKER_GLUE_DATABASE_NAME = KeywordField( + "sageMakerGlueDatabaseName", "sageMakerGlueDatabaseName" ) -SageMakerFeatureGroup.SAGE_MAKER_FEATURE_GROUP_GLUE_TABLE_NAME = KeywordField( - "sageMakerFeatureGroupGlueTableName", "sageMakerFeatureGroupGlueTableName" +SageMakerFeatureGroup.SAGE_MAKER_GLUE_TABLE_NAME = KeywordField( + "sageMakerGlueTableName", "sageMakerGlueTableName" ) -SageMakerFeatureGroup.SAGE_MAKER_FEATURE_GROUP_FEATURE_COUNT = NumericField( - "sageMakerFeatureGroupFeatureCount", "sageMakerFeatureGroupFeatureCount" +SageMakerFeatureGroup.SAGE_MAKER_FEATURE_COUNT = NumericField( + "sageMakerFeatureCount", "sageMakerFeatureCount" ) SageMakerFeatureGroup.SAGE_MAKER_S3_URI = KeywordField( "sageMakerS3Uri", "sageMakerS3Uri" @@ -963,6 +897,9 @@ def _sage_maker_feature_group_from_nested_bytes( SageMakerFeatureGroup.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SageMakerFeatureGroup.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SageMakerFeatureGroup.MEANINGS = RelationField("meanings") SageMakerFeatureGroup.MC_MONITORS = RelationField("mcMonitors") SageMakerFeatureGroup.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sage_maker_model.py b/pyatlan_v9/model/assets/sage_maker_model.py index 667720fc1..831b7297a 100644 --- a/pyatlan_v9/model/assets/sage_maker_model.py +++ b/pyatlan_v9/model/assets/sage_maker_model.py @@ -43,6 +43,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -51,7 +52,6 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .sage_maker_related import ( - RelatedSageMakerModel, RelatedSageMakerModelDeployment, RelatedSageMakerModelGroup, ) @@ -70,12 +70,12 @@ class SageMakerModel(Asset): Instance of a SageMaker ML Model in Atlan. Represents trained machine learning models that can be deployed for inference. """ - SAGE_MAKER_MODEL_CONTAINER_IMAGE: ClassVar[Any] = None - SAGE_MAKER_MODEL_EXECUTION_ROLE_ARN: ClassVar[Any] = None - SAGE_MAKER_MODEL_MODEL_GROUP_NAME: ClassVar[Any] = None - SAGE_MAKER_MODEL_MODEL_GROUP_QUALIFIED_NAME: ClassVar[Any] = None - SAGE_MAKER_MODEL_VERSION: ClassVar[Any] = None - SAGE_MAKER_MODEL_STATUS: ClassVar[Any] = None + SAGE_MAKER_CONTAINER_IMAGE: ClassVar[Any] = None + SAGE_MAKER_EXECUTION_ROLE_ARN: ClassVar[Any] = None + SAGE_MAKER_MODEL_GROUP_NAME: ClassVar[Any] = None + SAGE_MAKER_MODEL_GROUP_QUALIFIED_NAME: ClassVar[Any] = None + SAGE_MAKER_VERSION: ClassVar[Any] = None + SAGE_MAKER_STATUS: ClassVar[Any] = None SAGE_MAKER_S3_URI: ClassVar[Any] = None ETHICAL_AI_PRIVACY_CONFIG: ClassVar[Any] = None ETHICAL_AI_FAIRNESS_CONFIG: ClassVar[Any] = None @@ -113,6 +113,7 @@ class SageMakerModel(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -132,22 +133,24 @@ class SageMakerModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sage_maker_model_container_image: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SageMakerModel" + + sage_maker_container_image: Union[str, None, UnsetType] = UNSET """Docker container image used for the model.""" - sage_maker_model_execution_role_arn: Union[str, None, UnsetType] = UNSET + sage_maker_execution_role_arn: Union[str, None, UnsetType] = UNSET """ARN of the IAM role used by the model for accessing AWS resources.""" - sage_maker_model_model_group_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_group_name: Union[str, None, UnsetType] = UNSET """Name of the parent Model Group.""" - sage_maker_model_model_group_qualified_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_group_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent Model Group.""" - sage_maker_model_version: Union[str, None, UnsetType] = UNSET + sage_maker_version: Union[str, None, UnsetType] = UNSET """Version of the SageMaker Model Package.""" - sage_maker_model_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Status of the SageMaker Model Package (ACTIVE or INACTIVE).""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -279,6 +282,11 @@ class SageMakerModel(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -348,78 +356,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sage_maker_model_group is UNSET: - errors.append("sage_maker_model_group is required for creation") - if self.sage_maker_model_group_name is UNSET: - errors.append("sage_maker_model_group_name is required for creation") - if self.sage_maker_model_group_qualified_name is UNSET: - errors.append( - "sage_maker_model_group_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"SageMakerModel validation failed: {errors}") - - def minimize(self) -> "SageMakerModel": - """ - Return a minimal copy of this SageMakerModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerModel instance with only the minimum required fields. - """ - self.validate() - return SageMakerModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSageMakerModel": - """ - Create a :class:`RelatedSageMakerModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerModel(guid=self.guid) - return RelatedSageMakerModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -475,22 +411,22 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> SageMakerMo class SageMakerModelAttributes(AssetAttributes): """SageMakerModel-specific attributes for nested API format.""" - sage_maker_model_container_image: Union[str, None, UnsetType] = UNSET + sage_maker_container_image: Union[str, None, UnsetType] = UNSET """Docker container image used for the model.""" - sage_maker_model_execution_role_arn: Union[str, None, UnsetType] = UNSET + sage_maker_execution_role_arn: Union[str, None, UnsetType] = UNSET """ARN of the IAM role used by the model for accessing AWS resources.""" - sage_maker_model_model_group_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_group_name: Union[str, None, UnsetType] = UNSET """Name of the parent Model Group.""" - sage_maker_model_model_group_qualified_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_group_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent Model Group.""" - sage_maker_model_version: Union[str, None, UnsetType] = UNSET + sage_maker_version: Union[str, None, UnsetType] = UNSET """Version of the SageMaker Model Package.""" - sage_maker_model_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Status of the SageMaker Model Package (ACTIVE or INACTIVE).""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -626,6 +562,11 @@ class SageMakerModelRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -723,6 +664,7 @@ class SageMakerModelNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -749,14 +691,14 @@ def _populate_sage_maker_model_attrs( ) -> None: """Populate SageMakerModel-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sage_maker_model_container_image = obj.sage_maker_model_container_image - attrs.sage_maker_model_execution_role_arn = obj.sage_maker_model_execution_role_arn - attrs.sage_maker_model_model_group_name = obj.sage_maker_model_model_group_name - attrs.sage_maker_model_model_group_qualified_name = ( - obj.sage_maker_model_model_group_qualified_name - ) - attrs.sage_maker_model_version = obj.sage_maker_model_version - attrs.sage_maker_model_status = obj.sage_maker_model_status + attrs.sage_maker_container_image = obj.sage_maker_container_image + attrs.sage_maker_execution_role_arn = obj.sage_maker_execution_role_arn + attrs.sage_maker_model_group_name = obj.sage_maker_model_group_name + attrs.sage_maker_model_group_qualified_name = ( + obj.sage_maker_model_group_qualified_name + ) + attrs.sage_maker_version = obj.sage_maker_version + attrs.sage_maker_status = obj.sage_maker_status attrs.sage_maker_s3_uri = obj.sage_maker_s3_uri attrs.ethical_ai_privacy_config = obj.ethical_ai_privacy_config attrs.ethical_ai_fairness_config = obj.ethical_ai_fairness_config @@ -788,18 +730,14 @@ def _populate_sage_maker_model_attrs( def _extract_sage_maker_model_attrs(attrs: SageMakerModelAttributes) -> dict: """Extract all SageMakerModel attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sage_maker_model_container_image"] = attrs.sage_maker_model_container_image - result["sage_maker_model_execution_role_arn"] = ( - attrs.sage_maker_model_execution_role_arn - ) - result["sage_maker_model_model_group_name"] = ( - attrs.sage_maker_model_model_group_name - ) - result["sage_maker_model_model_group_qualified_name"] = ( - attrs.sage_maker_model_model_group_qualified_name - ) - result["sage_maker_model_version"] = attrs.sage_maker_model_version - result["sage_maker_model_status"] = attrs.sage_maker_model_status + result["sage_maker_container_image"] = attrs.sage_maker_container_image + result["sage_maker_execution_role_arn"] = attrs.sage_maker_execution_role_arn + result["sage_maker_model_group_name"] = attrs.sage_maker_model_group_name + result["sage_maker_model_group_qualified_name"] = ( + attrs.sage_maker_model_group_qualified_name + ) + result["sage_maker_version"] = attrs.sage_maker_version + result["sage_maker_status"] = attrs.sage_maker_status result["sage_maker_s3_uri"] = attrs.sage_maker_s3_uri result["ethical_ai_privacy_config"] = attrs.ethical_ai_privacy_config result["ethical_ai_fairness_config"] = attrs.ethical_ai_fairness_config @@ -868,9 +806,6 @@ def _sage_maker_model_to_nested( is_incomplete=sage_maker_model.is_incomplete, provenance_type=sage_maker_model.provenance_type, home_id=sage_maker_model.home_id, - depth=sage_maker_model.depth, - immediate_upstream=sage_maker_model.immediate_upstream, - immediate_downstream=sage_maker_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -904,6 +839,7 @@ def _sage_maker_model_from_nested(nested: SageMakerModelNested) -> SageMakerMode updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -912,9 +848,6 @@ def _sage_maker_model_from_nested(nested: SageMakerModelNested) -> SageMakerMode is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_model_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -943,24 +876,20 @@ def _sage_maker_model_from_nested_bytes(data: bytes, serde: Serde) -> SageMakerM RelationField, ) -SageMakerModel.SAGE_MAKER_MODEL_CONTAINER_IMAGE = KeywordField( - "sageMakerModelContainerImage", "sageMakerModelContainerImage" +SageMakerModel.SAGE_MAKER_CONTAINER_IMAGE = KeywordField( + "sageMakerContainerImage", "sageMakerContainerImage" ) -SageMakerModel.SAGE_MAKER_MODEL_EXECUTION_ROLE_ARN = KeywordField( - "sageMakerModelExecutionRoleArn", "sageMakerModelExecutionRoleArn" +SageMakerModel.SAGE_MAKER_EXECUTION_ROLE_ARN = KeywordField( + "sageMakerExecutionRoleArn", "sageMakerExecutionRoleArn" ) -SageMakerModel.SAGE_MAKER_MODEL_MODEL_GROUP_NAME = KeywordField( - "sageMakerModelModelGroupName", "sageMakerModelModelGroupName" +SageMakerModel.SAGE_MAKER_MODEL_GROUP_NAME = KeywordField( + "sageMakerModelGroupName", "sageMakerModelGroupName" ) -SageMakerModel.SAGE_MAKER_MODEL_MODEL_GROUP_QUALIFIED_NAME = KeywordField( - "sageMakerModelModelGroupQualifiedName", "sageMakerModelModelGroupQualifiedName" -) -SageMakerModel.SAGE_MAKER_MODEL_VERSION = KeywordField( - "sageMakerModelVersion", "sageMakerModelVersion" -) -SageMakerModel.SAGE_MAKER_MODEL_STATUS = KeywordField( - "sageMakerModelStatus", "sageMakerModelStatus" +SageMakerModel.SAGE_MAKER_MODEL_GROUP_QUALIFIED_NAME = KeywordField( + "sageMakerModelGroupQualifiedName", "sageMakerModelGroupQualifiedName" ) +SageMakerModel.SAGE_MAKER_VERSION = KeywordField("sageMakerVersion", "sageMakerVersion") +SageMakerModel.SAGE_MAKER_STATUS = KeywordField("sageMakerStatus", "sageMakerStatus") SageMakerModel.SAGE_MAKER_S3_URI = KeywordField("sageMakerS3Uri", "sageMakerS3Uri") SageMakerModel.ETHICAL_AI_PRIVACY_CONFIG = KeywordField( "ethicalAIPrivacyConfig", "ethicalAIPrivacyConfig" @@ -1029,6 +958,9 @@ def _sage_maker_model_from_nested_bytes(data: bytes, serde: Serde) -> SageMakerM SageMakerModel.METRICS = RelationField("metrics") SageMakerModel.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SageMakerModel.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SageMakerModel.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SageMakerModel.MEANINGS = RelationField("meanings") SageMakerModel.MC_MONITORS = RelationField("mcMonitors") SageMakerModel.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sage_maker_model_deployment.py b/pyatlan_v9/model/assets/sage_maker_model_deployment.py index a616637ea..fbe1c638d 100644 --- a/pyatlan_v9/model/assets/sage_maker_model_deployment.py +++ b/pyatlan_v9/model/assets/sage_maker_model_deployment.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -49,7 +50,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sage_maker_related import RelatedSageMakerModel, RelatedSageMakerModelDeployment +from .sage_maker_related import RelatedSageMakerModel from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -65,10 +66,10 @@ class SageMakerModelDeployment(Asset): Instance of a SageMaker Endpoint in Atlan. Represents deployed models that can serve real-time inference requests. """ - SAGE_MAKER_MODEL_DEPLOYMENT_STATUS: ClassVar[Any] = None - SAGE_MAKER_MODEL_DEPLOYMENT_ENDPOINT_CONFIG_NAME: ClassVar[Any] = None - SAGE_MAKER_MODEL_DEPLOYMENT_MODEL_NAME: ClassVar[Any] = None - SAGE_MAKER_MODEL_DEPLOYMENT_MODEL_QUALIFIED_NAME: ClassVar[Any] = None + SAGE_MAKER_STATUS: ClassVar[Any] = None + SAGE_MAKER_ENDPOINT_CONFIG_NAME: ClassVar[Any] = None + SAGE_MAKER_MODEL_NAME: ClassVar[Any] = None + SAGE_MAKER_MODEL_QUALIFIED_NAME: ClassVar[Any] = None SAGE_MAKER_S3_URI: ClassVar[Any] = None ETHICAL_AI_PRIVACY_CONFIG: ClassVar[Any] = None ETHICAL_AI_FAIRNESS_CONFIG: ClassVar[Any] = None @@ -102,6 +103,7 @@ class SageMakerModelDeployment(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -120,20 +122,18 @@ class SageMakerModelDeployment(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sage_maker_model_deployment_status: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SageMakerModelDeployment" + + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the endpoint (e.g., InService, OutOfService, Creating, Failed).""" - sage_maker_model_deployment_endpoint_config_name: Union[str, None, UnsetType] = ( - UNSET - ) + sage_maker_endpoint_config_name: Union[str, None, UnsetType] = UNSET """Name of the endpoint configuration used by this deployment.""" - sage_maker_model_deployment_model_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_name: Union[str, None, UnsetType] = UNSET """Name of the parent Model.""" - sage_maker_model_deployment_model_qualified_name: Union[str, None, UnsetType] = ( - UNSET - ) + sage_maker_model_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent Model.""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -253,6 +253,11 @@ class SageMakerModelDeployment(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -319,80 +324,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerModelDeployment instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sage_maker_model is UNSET: - errors.append("sage_maker_model is required for creation") - if self.sage_maker_model_name is UNSET: - errors.append("sage_maker_model_name is required for creation") - if self.sage_maker_model_qualified_name is UNSET: - errors.append( - "sage_maker_model_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"SageMakerModelDeployment validation failed: {errors}") - - def minimize(self) -> "SageMakerModelDeployment": - """ - Return a minimal copy of this SageMakerModelDeployment with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerModelDeployment with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerModelDeployment instance with only the minimum required fields. - """ - self.validate() - return SageMakerModelDeployment( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSageMakerModelDeployment": - """ - Create a :class:`RelatedSageMakerModelDeployment` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerModelDeployment reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerModelDeployment(guid=self.guid) - return RelatedSageMakerModelDeployment(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -450,20 +381,16 @@ def from_json( class SageMakerModelDeploymentAttributes(AssetAttributes): """SageMakerModelDeployment-specific attributes for nested API format.""" - sage_maker_model_deployment_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the endpoint (e.g., InService, OutOfService, Creating, Failed).""" - sage_maker_model_deployment_endpoint_config_name: Union[str, None, UnsetType] = ( - UNSET - ) + sage_maker_endpoint_config_name: Union[str, None, UnsetType] = UNSET """Name of the endpoint configuration used by this deployment.""" - sage_maker_model_deployment_model_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_name: Union[str, None, UnsetType] = UNSET """Name of the parent Model.""" - sage_maker_model_deployment_model_qualified_name: Union[str, None, UnsetType] = ( - UNSET - ) + sage_maker_model_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent Model.""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -587,6 +514,11 @@ class SageMakerModelDeploymentRelationshipAttributes(AssetRelationshipAttributes ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -678,6 +610,7 @@ class SageMakerModelDeploymentNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -703,16 +636,10 @@ def _populate_sage_maker_model_deployment_attrs( ) -> None: """Populate SageMakerModelDeployment-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sage_maker_model_deployment_status = obj.sage_maker_model_deployment_status - attrs.sage_maker_model_deployment_endpoint_config_name = ( - obj.sage_maker_model_deployment_endpoint_config_name - ) - attrs.sage_maker_model_deployment_model_name = ( - obj.sage_maker_model_deployment_model_name - ) - attrs.sage_maker_model_deployment_model_qualified_name = ( - obj.sage_maker_model_deployment_model_qualified_name - ) + attrs.sage_maker_status = obj.sage_maker_status + attrs.sage_maker_endpoint_config_name = obj.sage_maker_endpoint_config_name + attrs.sage_maker_model_name = obj.sage_maker_model_name + attrs.sage_maker_model_qualified_name = obj.sage_maker_model_qualified_name attrs.sage_maker_s3_uri = obj.sage_maker_s3_uri attrs.ethical_ai_privacy_config = obj.ethical_ai_privacy_config attrs.ethical_ai_fairness_config = obj.ethical_ai_fairness_config @@ -743,18 +670,10 @@ def _extract_sage_maker_model_deployment_attrs( ) -> dict: """Extract all SageMakerModelDeployment attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sage_maker_model_deployment_status"] = ( - attrs.sage_maker_model_deployment_status - ) - result["sage_maker_model_deployment_endpoint_config_name"] = ( - attrs.sage_maker_model_deployment_endpoint_config_name - ) - result["sage_maker_model_deployment_model_name"] = ( - attrs.sage_maker_model_deployment_model_name - ) - result["sage_maker_model_deployment_model_qualified_name"] = ( - attrs.sage_maker_model_deployment_model_qualified_name - ) + result["sage_maker_status"] = attrs.sage_maker_status + result["sage_maker_endpoint_config_name"] = attrs.sage_maker_endpoint_config_name + result["sage_maker_model_name"] = attrs.sage_maker_model_name + result["sage_maker_model_qualified_name"] = attrs.sage_maker_model_qualified_name result["sage_maker_s3_uri"] = attrs.sage_maker_s3_uri result["ethical_ai_privacy_config"] = attrs.ethical_ai_privacy_config result["ethical_ai_fairness_config"] = attrs.ethical_ai_fairness_config @@ -820,9 +739,6 @@ def _sage_maker_model_deployment_to_nested( is_incomplete=sage_maker_model_deployment.is_incomplete, provenance_type=sage_maker_model_deployment.provenance_type, home_id=sage_maker_model_deployment.home_id, - depth=sage_maker_model_deployment.depth, - immediate_upstream=sage_maker_model_deployment.immediate_upstream, - immediate_downstream=sage_maker_model_deployment.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -858,6 +774,7 @@ def _sage_maker_model_deployment_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -866,9 +783,6 @@ def _sage_maker_model_deployment_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_model_deployment_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -901,23 +815,17 @@ def _sage_maker_model_deployment_from_nested_bytes( RelationField, ) -SageMakerModelDeployment.SAGE_MAKER_MODEL_DEPLOYMENT_STATUS = KeywordField( - "sageMakerModelDeploymentStatus", "sageMakerModelDeploymentStatus" +SageMakerModelDeployment.SAGE_MAKER_STATUS = KeywordField( + "sageMakerStatus", "sageMakerStatus" ) -SageMakerModelDeployment.SAGE_MAKER_MODEL_DEPLOYMENT_ENDPOINT_CONFIG_NAME = ( - KeywordField( - "sageMakerModelDeploymentEndpointConfigName", - "sageMakerModelDeploymentEndpointConfigName", - ) +SageMakerModelDeployment.SAGE_MAKER_ENDPOINT_CONFIG_NAME = KeywordField( + "sageMakerEndpointConfigName", "sageMakerEndpointConfigName" ) -SageMakerModelDeployment.SAGE_MAKER_MODEL_DEPLOYMENT_MODEL_NAME = KeywordField( - "sageMakerModelDeploymentModelName", "sageMakerModelDeploymentModelName" +SageMakerModelDeployment.SAGE_MAKER_MODEL_NAME = KeywordField( + "sageMakerModelName", "sageMakerModelName" ) -SageMakerModelDeployment.SAGE_MAKER_MODEL_DEPLOYMENT_MODEL_QUALIFIED_NAME = ( - KeywordField( - "sageMakerModelDeploymentModelQualifiedName", - "sageMakerModelDeploymentModelQualifiedName", - ) +SageMakerModelDeployment.SAGE_MAKER_MODEL_QUALIFIED_NAME = KeywordField( + "sageMakerModelQualifiedName", "sageMakerModelQualifiedName" ) SageMakerModelDeployment.SAGE_MAKER_S3_URI = KeywordField( "sageMakerS3Uri", "sageMakerS3Uri" @@ -991,6 +899,9 @@ def _sage_maker_model_deployment_from_nested_bytes( SageMakerModelDeployment.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SageMakerModelDeployment.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SageMakerModelDeployment.MEANINGS = RelationField("meanings") SageMakerModelDeployment.MC_MONITORS = RelationField("mcMonitors") SageMakerModelDeployment.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sage_maker_model_group.py b/pyatlan_v9/model/assets/sage_maker_model_group.py index c92f82702..f243bfa40 100644 --- a/pyatlan_v9/model/assets/sage_maker_model_group.py +++ b/pyatlan_v9/model/assets/sage_maker_model_group.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -49,7 +50,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sage_maker_related import RelatedSageMakerModel, RelatedSageMakerModelGroup +from .sage_maker_related import RelatedSageMakerModel from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -65,7 +66,7 @@ class SageMakerModelGroup(Asset): Instance of a SageMaker Model Package Group in Atlan. Represents a collection of versioned models that can be organized and managed together. """ - SAGE_MAKER_MODEL_GROUP_STATUS: ClassVar[Any] = None + SAGE_MAKER_STATUS: ClassVar[Any] = None SAGE_MAKER_S3_URI: ClassVar[Any] = None ETHICAL_AI_PRIVACY_CONFIG: ClassVar[Any] = None ETHICAL_AI_FAIRNESS_CONFIG: ClassVar[Any] = None @@ -104,6 +105,7 @@ class SageMakerModelGroup(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -122,7 +124,9 @@ class SageMakerModelGroup(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sage_maker_model_group_status: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SageMakerModelGroup" + + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the Model Package Group.""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -259,6 +263,11 @@ class SageMakerModelGroup(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -317,66 +326,6 @@ class SageMakerModelGroup(Asset): def __post_init__(self) -> None: self.type_name = "SageMakerModelGroup" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerModelGroup instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SageMakerModelGroup validation failed: {errors}") - - def minimize(self) -> "SageMakerModelGroup": - """ - Return a minimal copy of this SageMakerModelGroup with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerModelGroup with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerModelGroup instance with only the minimum required fields. - """ - self.validate() - return SageMakerModelGroup(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSageMakerModelGroup": - """ - Create a :class:`RelatedSageMakerModelGroup` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerModelGroup reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerModelGroup(guid=self.guid) - return RelatedSageMakerModelGroup(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -434,7 +383,7 @@ def from_json( class SageMakerModelGroupAttributes(AssetAttributes): """SageMakerModelGroup-specific attributes for nested API format.""" - sage_maker_model_group_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the Model Package Group.""" sage_maker_s3_uri: Union[str, None, UnsetType] = UNSET @@ -575,6 +524,11 @@ class SageMakerModelGroupRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -668,6 +622,7 @@ class SageMakerModelGroupNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -693,7 +648,7 @@ def _populate_sage_maker_model_group_attrs( ) -> None: """Populate SageMakerModelGroup-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sage_maker_model_group_status = obj.sage_maker_model_group_status + attrs.sage_maker_status = obj.sage_maker_status attrs.sage_maker_s3_uri = obj.sage_maker_s3_uri attrs.ethical_ai_privacy_config = obj.ethical_ai_privacy_config attrs.ethical_ai_fairness_config = obj.ethical_ai_fairness_config @@ -725,7 +680,7 @@ def _populate_sage_maker_model_group_attrs( def _extract_sage_maker_model_group_attrs(attrs: SageMakerModelGroupAttributes) -> dict: """Extract all SageMakerModelGroup attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sage_maker_model_group_status"] = attrs.sage_maker_model_group_status + result["sage_maker_status"] = attrs.sage_maker_status result["sage_maker_s3_uri"] = attrs.sage_maker_s3_uri result["ethical_ai_privacy_config"] = attrs.ethical_ai_privacy_config result["ethical_ai_fairness_config"] = attrs.ethical_ai_fairness_config @@ -794,9 +749,6 @@ def _sage_maker_model_group_to_nested( is_incomplete=sage_maker_model_group.is_incomplete, provenance_type=sage_maker_model_group.provenance_type, home_id=sage_maker_model_group.home_id, - depth=sage_maker_model_group.depth, - immediate_upstream=sage_maker_model_group.immediate_upstream, - immediate_downstream=sage_maker_model_group.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -832,6 +784,7 @@ def _sage_maker_model_group_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -840,9 +793,6 @@ def _sage_maker_model_group_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_model_group_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -873,8 +823,8 @@ def _sage_maker_model_group_from_nested_bytes( RelationField, ) -SageMakerModelGroup.SAGE_MAKER_MODEL_GROUP_STATUS = KeywordField( - "sageMakerModelGroupStatus", "sageMakerModelGroupStatus" +SageMakerModelGroup.SAGE_MAKER_STATUS = KeywordField( + "sageMakerStatus", "sageMakerStatus" ) SageMakerModelGroup.SAGE_MAKER_S3_URI = KeywordField("sageMakerS3Uri", "sageMakerS3Uri") SageMakerModelGroup.ETHICAL_AI_PRIVACY_CONFIG = KeywordField( @@ -945,6 +895,9 @@ def _sage_maker_model_group_from_nested_bytes( SageMakerModelGroup.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SageMakerModelGroup.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SageMakerModelGroup.MEANINGS = RelationField("meanings") SageMakerModelGroup.MC_MONITORS = RelationField("mcMonitors") SageMakerModelGroup.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sage_maker_related.py b/pyatlan_v9/model/assets/sage_maker_related.py index 07cd28170..2589e80dd 100644 --- a/pyatlan_v9/model/assets/sage_maker_related.py +++ b/pyatlan_v9/model/assets/sage_maker_related.py @@ -56,19 +56,19 @@ class RelatedSageMakerFeatureGroup(RelatedSageMaker): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SageMakerFeatureGroup" so it serializes correctly - sage_maker_feature_group_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the Feature Group (e.g., Created, Creating, Failed).""" - sage_maker_feature_group_record_id_name: Union[str, None, UnsetType] = UNSET + sage_maker_record_id_name: Union[str, None, UnsetType] = UNSET """Name of the feature that serves as the record identifier.""" - sage_maker_feature_group_glue_database_name: Union[str, None, UnsetType] = UNSET + sage_maker_glue_database_name: Union[str, None, UnsetType] = UNSET """AWS Glue database name associated with this Feature Group.""" - sage_maker_feature_group_glue_table_name: Union[str, None, UnsetType] = UNSET + sage_maker_glue_table_name: Union[str, None, UnsetType] = UNSET """AWS Glue table name associated with this Feature Group.""" - sage_maker_feature_group_feature_count: Union[int, None, UnsetType] = UNSET + sage_maker_feature_count: Union[int, None, UnsetType] = UNSET """Number of features in this Feature Group.""" def __post_init__(self) -> None: @@ -86,16 +86,16 @@ class RelatedSageMakerFeature(RelatedSageMaker): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SageMakerFeature" so it serializes correctly - sage_maker_feature_group_name: Union[str, None, UnsetType] = UNSET + sage_maker_group_name: Union[str, None, UnsetType] = UNSET """Name of the Feature Group that contains this feature.""" - sage_maker_feature_group_qualified_name: Union[str, None, UnsetType] = UNSET + sage_maker_group_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the Feature Group that contains this feature.""" - sage_maker_feature_data_type: Union[str, None, UnsetType] = UNSET + sage_maker_data_type: Union[str, None, UnsetType] = UNSET """Data type of the feature (e.g., String, Integral, Fractional).""" - sage_maker_feature_is_record_identifier: Union[bool, None, UnsetType] = UNSET + sage_maker_is_record_identifier: Union[bool, None, UnsetType] = UNSET """Whether this feature serves as the record identifier for the Feature Group.""" def __post_init__(self) -> None: @@ -113,22 +113,22 @@ class RelatedSageMakerModel(RelatedSageMaker): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SageMakerModel" so it serializes correctly - sage_maker_model_container_image: Union[str, None, UnsetType] = UNSET + sage_maker_container_image: Union[str, None, UnsetType] = UNSET """Docker container image used for the model.""" - sage_maker_model_execution_role_arn: Union[str, None, UnsetType] = UNSET + sage_maker_execution_role_arn: Union[str, None, UnsetType] = UNSET """ARN of the IAM role used by the model for accessing AWS resources.""" - sage_maker_model_model_group_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_group_name: Union[str, None, UnsetType] = UNSET """Name of the parent Model Group.""" - sage_maker_model_model_group_qualified_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_group_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent Model Group.""" - sage_maker_model_version: Union[str, None, UnsetType] = UNSET + sage_maker_version: Union[str, None, UnsetType] = UNSET """Version of the SageMaker Model Package.""" - sage_maker_model_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Status of the SageMaker Model Package (ACTIVE or INACTIVE).""" def __post_init__(self) -> None: @@ -146,7 +146,7 @@ class RelatedSageMakerModelGroup(RelatedSageMaker): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SageMakerModelGroup" so it serializes correctly - sage_maker_model_group_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the Model Package Group.""" def __post_init__(self) -> None: @@ -164,20 +164,16 @@ class RelatedSageMakerModelDeployment(RelatedSageMaker): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SageMakerModelDeployment" so it serializes correctly - sage_maker_model_deployment_status: Union[str, None, UnsetType] = UNSET + sage_maker_status: Union[str, None, UnsetType] = UNSET """Current status of the endpoint (e.g., InService, OutOfService, Creating, Failed).""" - sage_maker_model_deployment_endpoint_config_name: Union[str, None, UnsetType] = ( - UNSET - ) + sage_maker_endpoint_config_name: Union[str, None, UnsetType] = UNSET """Name of the endpoint configuration used by this deployment.""" - sage_maker_model_deployment_model_name: Union[str, None, UnsetType] = UNSET + sage_maker_model_name: Union[str, None, UnsetType] = UNSET """Name of the parent Model.""" - sage_maker_model_deployment_model_qualified_name: Union[str, None, UnsetType] = ( - UNSET - ) + sage_maker_model_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the parent Model.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/sage_maker_unified_studio.py b/pyatlan_v9/model/assets/sage_maker_unified_studio.py index e084e749a..a5f96a6ea 100644 --- a/pyatlan_v9/model/assets/sage_maker_unified_studio.py +++ b/pyatlan_v9/model/assets/sage_maker_unified_studio.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -47,7 +48,6 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sage_maker_unified_studio_related import RelatedSageMakerUnifiedStudio from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -84,6 +84,7 @@ class SageMakerUnifiedStudio(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -101,6 +102,8 @@ class SageMakerUnifiedStudio(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SageMakerUnifiedStudio" + smus_domain_name: Union[str, None, UnsetType] = UNSET """Name of the SageMaker Unified Studio domain.""" @@ -168,6 +171,11 @@ class SageMakerUnifiedStudio(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -223,68 +231,6 @@ class SageMakerUnifiedStudio(Asset): def __post_init__(self) -> None: self.type_name = "SageMakerUnifiedStudio" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerUnifiedStudio instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SageMakerUnifiedStudio validation failed: {errors}") - - def minimize(self) -> "SageMakerUnifiedStudio": - """ - Return a minimal copy of this SageMakerUnifiedStudio with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerUnifiedStudio with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerUnifiedStudio instance with only the minimum required fields. - """ - self.validate() - return SageMakerUnifiedStudio( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSageMakerUnifiedStudio": - """ - Create a :class:`RelatedSageMakerUnifiedStudio` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerUnifiedStudio reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerUnifiedStudio(guid=self.guid) - return RelatedSageMakerUnifiedStudio(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -413,6 +359,11 @@ class SageMakerUnifiedStudioRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -501,6 +452,7 @@ class SageMakerUnifiedStudioNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -586,9 +538,6 @@ def _sage_maker_unified_studio_to_nested( is_incomplete=sage_maker_unified_studio.is_incomplete, provenance_type=sage_maker_unified_studio.provenance_type, home_id=sage_maker_unified_studio.home_id, - depth=sage_maker_unified_studio.depth, - immediate_upstream=sage_maker_unified_studio.immediate_upstream, - immediate_downstream=sage_maker_unified_studio.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -624,6 +573,7 @@ def _sage_maker_unified_studio_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -632,9 +582,6 @@ def _sage_maker_unified_studio_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_unified_studio_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -704,6 +651,9 @@ def _sage_maker_unified_studio_from_nested_bytes( SageMakerUnifiedStudio.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SageMakerUnifiedStudio.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SageMakerUnifiedStudio.MEANINGS = RelationField("meanings") SageMakerUnifiedStudio.MC_MONITORS = RelationField("mcMonitors") SageMakerUnifiedStudio.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sage_maker_unified_studio_asset.py b/pyatlan_v9/model/assets/sage_maker_unified_studio_asset.py index 9a64ff941..b7a01e822 100644 --- a/pyatlan_v9/model/assets/sage_maker_unified_studio_asset.py +++ b/pyatlan_v9/model/assets/sage_maker_unified_studio_asset.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -47,10 +48,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sage_maker_unified_studio_related import ( - RelatedSageMakerUnifiedStudioAsset, - RelatedSageMakerUnifiedStudioAssetSchema, -) +from .sage_maker_unified_studio_related import RelatedSageMakerUnifiedStudioAssetSchema from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -92,6 +90,7 @@ class SageMakerUnifiedStudioAsset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -110,6 +109,8 @@ class SageMakerUnifiedStudioAsset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SageMakerUnifiedStudioAsset" + smus_asset_summary: Union[str, None, UnsetType] = UNSET """Summary text for the asset in SageMaker Unified Studio.""" @@ -192,6 +193,11 @@ class SageMakerUnifiedStudioAsset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -252,68 +258,6 @@ class SageMakerUnifiedStudioAsset(Asset): def __post_init__(self) -> None: self.type_name = "SageMakerUnifiedStudioAsset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerUnifiedStudioAsset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SageMakerUnifiedStudioAsset validation failed: {errors}") - - def minimize(self) -> "SageMakerUnifiedStudioAsset": - """ - Return a minimal copy of this SageMakerUnifiedStudioAsset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerUnifiedStudioAsset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerUnifiedStudioAsset instance with only the minimum required fields. - """ - self.validate() - return SageMakerUnifiedStudioAsset( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSageMakerUnifiedStudioAsset": - """ - Create a :class:`RelatedSageMakerUnifiedStudioAsset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerUnifiedStudioAsset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerUnifiedStudioAsset(guid=self.guid) - return RelatedSageMakerUnifiedStudioAsset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -457,6 +401,11 @@ class SageMakerUnifiedStudioAssetRelationshipAttributes(AssetRelationshipAttribu ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -550,6 +499,7 @@ class SageMakerUnifiedStudioAssetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -648,9 +598,6 @@ def _sage_maker_unified_studio_asset_to_nested( is_incomplete=sage_maker_unified_studio_asset.is_incomplete, provenance_type=sage_maker_unified_studio_asset.provenance_type, home_id=sage_maker_unified_studio_asset.home_id, - depth=sage_maker_unified_studio_asset.depth, - immediate_upstream=sage_maker_unified_studio_asset.immediate_upstream, - immediate_downstream=sage_maker_unified_studio_asset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -686,6 +633,7 @@ def _sage_maker_unified_studio_asset_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -694,9 +642,6 @@ def _sage_maker_unified_studio_asset_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_unified_studio_asset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -791,6 +736,9 @@ def _sage_maker_unified_studio_asset_from_nested_bytes( SageMakerUnifiedStudioAsset.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SageMakerUnifiedStudioAsset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SageMakerUnifiedStudioAsset.MEANINGS = RelationField("meanings") SageMakerUnifiedStudioAsset.MC_MONITORS = RelationField("mcMonitors") SageMakerUnifiedStudioAsset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sage_maker_unified_studio_asset_schema.py b/pyatlan_v9/model/assets/sage_maker_unified_studio_asset_schema.py index d2fcc544a..135bccb2c 100644 --- a/pyatlan_v9/model/assets/sage_maker_unified_studio_asset_schema.py +++ b/pyatlan_v9/model/assets/sage_maker_unified_studio_asset_schema.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,10 +49,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sage_maker_unified_studio_related import ( - RelatedSageMakerUnifiedStudioAsset, - RelatedSageMakerUnifiedStudioAssetSchema, -) +from .sage_maker_unified_studio_related import RelatedSageMakerUnifiedStudioAsset from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -91,6 +89,7 @@ class SageMakerUnifiedStudioAssetSchema(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -109,6 +108,8 @@ class SageMakerUnifiedStudioAssetSchema(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SageMakerUnifiedStudioAssetSchema" + smus_data_type: Union[str, None, UnsetType] = UNSET """Data type of the schema/column.""" @@ -185,6 +186,11 @@ class SageMakerUnifiedStudioAssetSchema(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -249,78 +255,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerUnifiedStudioAssetSchema instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.smus_asset is UNSET: - errors.append("smus_asset is required for creation") - if errors: - raise ValueError( - f"SageMakerUnifiedStudioAssetSchema validation failed: {errors}" - ) - - def minimize(self) -> "SageMakerUnifiedStudioAssetSchema": - """ - Return a minimal copy of this SageMakerUnifiedStudioAssetSchema with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerUnifiedStudioAssetSchema with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerUnifiedStudioAssetSchema instance with only the minimum required fields. - """ - self.validate() - return SageMakerUnifiedStudioAssetSchema( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSageMakerUnifiedStudioAssetSchema": - """ - Create a :class:`RelatedSageMakerUnifiedStudioAssetSchema` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerUnifiedStudioAssetSchema reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerUnifiedStudioAssetSchema(guid=self.guid) - return RelatedSageMakerUnifiedStudioAssetSchema( - qualified_name=self.qualified_name - ) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -462,6 +396,11 @@ class SageMakerUnifiedStudioAssetSchemaRelationshipAttributes( ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -553,6 +492,7 @@ class SageMakerUnifiedStudioAssetSchemaNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -648,9 +588,6 @@ def _sage_maker_unified_studio_asset_schema_to_nested( is_incomplete=sage_maker_unified_studio_asset_schema.is_incomplete, provenance_type=sage_maker_unified_studio_asset_schema.provenance_type, home_id=sage_maker_unified_studio_asset_schema.home_id, - depth=sage_maker_unified_studio_asset_schema.depth, - immediate_upstream=sage_maker_unified_studio_asset_schema.immediate_upstream, - immediate_downstream=sage_maker_unified_studio_asset_schema.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -686,6 +623,7 @@ def _sage_maker_unified_studio_asset_schema_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -694,9 +632,6 @@ def _sage_maker_unified_studio_asset_schema_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_unified_studio_asset_schema_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -792,6 +727,9 @@ def _sage_maker_unified_studio_asset_schema_from_nested_bytes( SageMakerUnifiedStudioAssetSchema.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SageMakerUnifiedStudioAssetSchema.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = ( + RelationField("gcpDataplexAspectTypeMetadataEntities") +) SageMakerUnifiedStudioAssetSchema.MEANINGS = RelationField("meanings") SageMakerUnifiedStudioAssetSchema.MC_MONITORS = RelationField("mcMonitors") SageMakerUnifiedStudioAssetSchema.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sage_maker_unified_studio_project.py b/pyatlan_v9/model/assets/sage_maker_unified_studio_project.py index 61ede08ab..35e47ffb6 100644 --- a/pyatlan_v9/model/assets/sage_maker_unified_studio_project.py +++ b/pyatlan_v9/model/assets/sage_maker_unified_studio_project.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,7 +49,6 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .sage_maker_unified_studio_related import ( - RelatedSageMakerUnifiedStudioProject, RelatedSageMakerUnifiedStudioPublishedAsset, RelatedSageMakerUnifiedStudioSubscribedAsset, ) @@ -92,6 +92,7 @@ class SageMakerUnifiedStudioProject(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -111,6 +112,8 @@ class SageMakerUnifiedStudioProject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SageMakerUnifiedStudioProject" + smus_project_status: Union[str, None, UnsetType] = UNSET """Status of the SageMaker Unified Studio project.""" @@ -190,6 +193,11 @@ class SageMakerUnifiedStudioProject(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -255,70 +263,6 @@ class SageMakerUnifiedStudioProject(Asset): def __post_init__(self) -> None: self.type_name = "SageMakerUnifiedStudioProject" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerUnifiedStudioProject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError( - f"SageMakerUnifiedStudioProject validation failed: {errors}" - ) - - def minimize(self) -> "SageMakerUnifiedStudioProject": - """ - Return a minimal copy of this SageMakerUnifiedStudioProject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerUnifiedStudioProject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerUnifiedStudioProject instance with only the minimum required fields. - """ - self.validate() - return SageMakerUnifiedStudioProject( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSageMakerUnifiedStudioProject": - """ - Create a :class:`RelatedSageMakerUnifiedStudioProject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerUnifiedStudioProject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerUnifiedStudioProject(guid=self.guid) - return RelatedSageMakerUnifiedStudioProject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -459,6 +403,11 @@ class SageMakerUnifiedStudioProjectRelationshipAttributes(AssetRelationshipAttri ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -557,6 +506,7 @@ class SageMakerUnifiedStudioProjectNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -654,9 +604,6 @@ def _sage_maker_unified_studio_project_to_nested( is_incomplete=sage_maker_unified_studio_project.is_incomplete, provenance_type=sage_maker_unified_studio_project.provenance_type, home_id=sage_maker_unified_studio_project.home_id, - depth=sage_maker_unified_studio_project.depth, - immediate_upstream=sage_maker_unified_studio_project.immediate_upstream, - immediate_downstream=sage_maker_unified_studio_project.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -692,6 +639,7 @@ def _sage_maker_unified_studio_project_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -700,9 +648,6 @@ def _sage_maker_unified_studio_project_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_unified_studio_project_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -796,6 +741,9 @@ def _sage_maker_unified_studio_project_from_nested_bytes( SageMakerUnifiedStudioProject.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SageMakerUnifiedStudioProject.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = ( + RelationField("gcpDataplexAspectTypeMetadataEntities") +) SageMakerUnifiedStudioProject.MEANINGS = RelationField("meanings") SageMakerUnifiedStudioProject.MC_MONITORS = RelationField("mcMonitors") SageMakerUnifiedStudioProject.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sage_maker_unified_studio_published_asset.py b/pyatlan_v9/model/assets/sage_maker_unified_studio_published_asset.py index 897884115..c1a0b288d 100644 --- a/pyatlan_v9/model/assets/sage_maker_unified_studio_published_asset.py +++ b/pyatlan_v9/model/assets/sage_maker_unified_studio_published_asset.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -51,7 +52,6 @@ from .sage_maker_unified_studio_related import ( RelatedSageMakerUnifiedStudioAssetSchema, RelatedSageMakerUnifiedStudioProject, - RelatedSageMakerUnifiedStudioPublishedAsset, RelatedSageMakerUnifiedStudioSubscribedAsset, ) from .schema_registry_related import RelatedSchemaRegistrySubject @@ -96,6 +96,7 @@ class SageMakerUnifiedStudioPublishedAsset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -116,6 +117,8 @@ class SageMakerUnifiedStudioPublishedAsset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SageMakerUnifiedStudioPublishedAsset" + smus_published_asset_subscriptions_count: Union[int, None, UnsetType] = UNSET """Number of subscriptions for the published asset.""" @@ -201,6 +204,11 @@ class SageMakerUnifiedStudioPublishedAsset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -275,78 +283,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerUnifiedStudioPublishedAsset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.smus_project is UNSET: - errors.append("smus_project is required for creation") - if errors: - raise ValueError( - f"SageMakerUnifiedStudioPublishedAsset validation failed: {errors}" - ) - - def minimize(self) -> "SageMakerUnifiedStudioPublishedAsset": - """ - Return a minimal copy of this SageMakerUnifiedStudioPublishedAsset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerUnifiedStudioPublishedAsset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerUnifiedStudioPublishedAsset instance with only the minimum required fields. - """ - self.validate() - return SageMakerUnifiedStudioPublishedAsset( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSageMakerUnifiedStudioPublishedAsset": - """ - Create a :class:`RelatedSageMakerUnifiedStudioPublishedAsset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerUnifiedStudioPublishedAsset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerUnifiedStudioPublishedAsset(guid=self.guid) - return RelatedSageMakerUnifiedStudioPublishedAsset( - qualified_name=self.qualified_name - ) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -497,6 +433,11 @@ class SageMakerUnifiedStudioPublishedAssetRelationshipAttributes( ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -598,6 +539,7 @@ class SageMakerUnifiedStudioPublishedAssetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -705,9 +647,6 @@ def _sage_maker_unified_studio_published_asset_to_nested( is_incomplete=sage_maker_unified_studio_published_asset.is_incomplete, provenance_type=sage_maker_unified_studio_published_asset.provenance_type, home_id=sage_maker_unified_studio_published_asset.home_id, - depth=sage_maker_unified_studio_published_asset.depth, - immediate_upstream=sage_maker_unified_studio_published_asset.immediate_upstream, - immediate_downstream=sage_maker_unified_studio_published_asset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -743,6 +682,7 @@ def _sage_maker_unified_studio_published_asset_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -751,9 +691,6 @@ def _sage_maker_unified_studio_published_asset_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_unified_studio_published_asset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -866,6 +803,9 @@ def _sage_maker_unified_studio_published_asset_from_nested_bytes( SageMakerUnifiedStudioPublishedAsset.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SageMakerUnifiedStudioPublishedAsset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = ( + RelationField("gcpDataplexAspectTypeMetadataEntities") +) SageMakerUnifiedStudioPublishedAsset.MEANINGS = RelationField("meanings") SageMakerUnifiedStudioPublishedAsset.MC_MONITORS = RelationField("mcMonitors") SageMakerUnifiedStudioPublishedAsset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sage_maker_unified_studio_subscribed_asset.py b/pyatlan_v9/model/assets/sage_maker_unified_studio_subscribed_asset.py index f9eba1473..2c833ee90 100644 --- a/pyatlan_v9/model/assets/sage_maker_unified_studio_subscribed_asset.py +++ b/pyatlan_v9/model/assets/sage_maker_unified_studio_subscribed_asset.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -52,7 +53,6 @@ RelatedSageMakerUnifiedStudioAssetSchema, RelatedSageMakerUnifiedStudioProject, RelatedSageMakerUnifiedStudioPublishedAsset, - RelatedSageMakerUnifiedStudioSubscribedAsset, ) from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck @@ -103,6 +103,7 @@ class SageMakerUnifiedStudioSubscribedAsset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -123,6 +124,8 @@ class SageMakerUnifiedStudioSubscribedAsset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SageMakerUnifiedStudioSubscribedAsset" + smus_subscribed_asset_project_name: Union[str, None, UnsetType] = UNSET """Name of the SageMaker Unified Studio project from which this asset is subscribed.""" @@ -229,6 +232,11 @@ class SageMakerUnifiedStudioSubscribedAsset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -303,78 +311,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SageMakerUnifiedStudioSubscribedAsset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.smus_project is UNSET: - errors.append("smus_project is required for creation") - if errors: - raise ValueError( - f"SageMakerUnifiedStudioSubscribedAsset validation failed: {errors}" - ) - - def minimize(self) -> "SageMakerUnifiedStudioSubscribedAsset": - """ - Return a minimal copy of this SageMakerUnifiedStudioSubscribedAsset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SageMakerUnifiedStudioSubscribedAsset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SageMakerUnifiedStudioSubscribedAsset instance with only the minimum required fields. - """ - self.validate() - return SageMakerUnifiedStudioSubscribedAsset( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSageMakerUnifiedStudioSubscribedAsset": - """ - Create a :class:`RelatedSageMakerUnifiedStudioSubscribedAsset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSageMakerUnifiedStudioSubscribedAsset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSageMakerUnifiedStudioSubscribedAsset(guid=self.guid) - return RelatedSageMakerUnifiedStudioSubscribedAsset( - qualified_name=self.qualified_name - ) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -546,6 +482,11 @@ class SageMakerUnifiedStudioSubscribedAssetRelationshipAttributes( ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -649,6 +590,7 @@ class SageMakerUnifiedStudioSubscribedAssetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -790,9 +732,6 @@ def _sage_maker_unified_studio_subscribed_asset_to_nested( is_incomplete=sage_maker_unified_studio_subscribed_asset.is_incomplete, provenance_type=sage_maker_unified_studio_subscribed_asset.provenance_type, home_id=sage_maker_unified_studio_subscribed_asset.home_id, - depth=sage_maker_unified_studio_subscribed_asset.depth, - immediate_upstream=sage_maker_unified_studio_subscribed_asset.immediate_upstream, - immediate_downstream=sage_maker_unified_studio_subscribed_asset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -828,6 +767,7 @@ def _sage_maker_unified_studio_subscribed_asset_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -836,9 +776,6 @@ def _sage_maker_unified_studio_subscribed_asset_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sage_maker_unified_studio_subscribed_asset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -974,6 +911,9 @@ def _sage_maker_unified_studio_subscribed_asset_from_nested_bytes( SageMakerUnifiedStudioSubscribedAsset.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SageMakerUnifiedStudioSubscribedAsset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = ( + RelationField("gcpDataplexAspectTypeMetadataEntities") +) SageMakerUnifiedStudioSubscribedAsset.MEANINGS = RelationField("meanings") SageMakerUnifiedStudioSubscribedAsset.MC_MONITORS = RelationField("mcMonitors") SageMakerUnifiedStudioSubscribedAsset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/salesforce.py b/pyatlan_v9/model/assets/salesforce.py index 52c8a6d29..6fa435db4 100644 --- a/pyatlan_v9/model/assets/salesforce.py +++ b/pyatlan_v9/model/assets/salesforce.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -47,7 +48,6 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .salesforce_related import RelatedSalesforce from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -80,6 +80,7 @@ class Salesforce(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +98,8 @@ class Salesforce(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Salesforce" + organization_qualified_name: Union[str, None, UnsetType] = UNSET """Fully-qualified name of the organization in Salesforce.""" @@ -152,6 +155,11 @@ class Salesforce(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -207,66 +215,6 @@ class Salesforce(Asset): def __post_init__(self) -> None: self.type_name = "Salesforce" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Salesforce instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Salesforce validation failed: {errors}") - - def minimize(self) -> "Salesforce": - """ - Return a minimal copy of this Salesforce with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Salesforce with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Salesforce instance with only the minimum required fields. - """ - self.validate() - return Salesforce(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSalesforce": - """ - Create a :class:`RelatedSalesforce` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSalesforce reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSalesforce(guid=self.guid) - return RelatedSalesforce(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -381,6 +329,11 @@ class SalesforceRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -467,6 +420,7 @@ class SalesforceNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -536,9 +490,6 @@ def _salesforce_to_nested(salesforce: Salesforce) -> SalesforceNested: is_incomplete=salesforce.is_incomplete, provenance_type=salesforce.provenance_type, home_id=salesforce.home_id, - depth=salesforce.depth, - immediate_upstream=salesforce.immediate_upstream, - immediate_downstream=salesforce.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -570,6 +521,7 @@ def _salesforce_from_nested(nested: SalesforceNested) -> Salesforce: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -578,9 +530,6 @@ def _salesforce_from_nested(nested: SalesforceNested) -> Salesforce: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_salesforce_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -624,6 +573,9 @@ def _salesforce_from_nested_bytes(data: bytes, serde: Serde) -> Salesforce: Salesforce.METRICS = RelationField("metrics") Salesforce.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Salesforce.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Salesforce.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Salesforce.MEANINGS = RelationField("meanings") Salesforce.MC_MONITORS = RelationField("mcMonitors") Salesforce.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/salesforce_dashboard.py b/pyatlan_v9/model/assets/salesforce_dashboard.py index ab44b0363..01f52da15 100644 --- a/pyatlan_v9/model/assets/salesforce_dashboard.py +++ b/pyatlan_v9/model/assets/salesforce_dashboard.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,11 +49,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .salesforce_related import ( - RelatedSalesforceDashboard, - RelatedSalesforceOrganization, - RelatedSalesforceReport, -) +from .salesforce_related import RelatedSalesforceOrganization, RelatedSalesforceReport from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -88,6 +85,7 @@ class SalesforceDashboard(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -107,6 +105,8 @@ class SalesforceDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SalesforceDashboard" + source_id: Union[str, None, UnsetType] = UNSET """Identifier of the dashboard in Salesforce.""" @@ -171,6 +171,11 @@ class SalesforceDashboard(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -238,74 +243,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SalesforceDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.organization is UNSET: - errors.append("organization is required for creation") - if self.organization_qualified_name is UNSET: - errors.append("organization_qualified_name is required for creation") - if errors: - raise ValueError(f"SalesforceDashboard validation failed: {errors}") - - def minimize(self) -> "SalesforceDashboard": - """ - Return a minimal copy of this SalesforceDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SalesforceDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SalesforceDashboard instance with only the minimum required fields. - """ - self.validate() - return SalesforceDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSalesforceDashboard": - """ - Create a :class:`RelatedSalesforceDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSalesforceDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSalesforceDashboard(guid=self.guid) - return RelatedSalesforceDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -431,6 +368,11 @@ class SalesforceDashboardRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -525,6 +467,7 @@ class SalesforceDashboardNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -608,9 +551,6 @@ def _salesforce_dashboard_to_nested( is_incomplete=salesforce_dashboard.is_incomplete, provenance_type=salesforce_dashboard.provenance_type, home_id=salesforce_dashboard.home_id, - depth=salesforce_dashboard.depth, - immediate_upstream=salesforce_dashboard.immediate_upstream, - immediate_downstream=salesforce_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -646,6 +586,7 @@ def _salesforce_dashboard_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -654,9 +595,6 @@ def _salesforce_dashboard_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_salesforce_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -719,6 +657,9 @@ def _salesforce_dashboard_from_nested_bytes( SalesforceDashboard.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SalesforceDashboard.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SalesforceDashboard.MEANINGS = RelationField("meanings") SalesforceDashboard.MC_MONITORS = RelationField("mcMonitors") SalesforceDashboard.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/salesforce_field.py b/pyatlan_v9/model/assets/salesforce_field.py index 451427951..d775a2bf3 100644 --- a/pyatlan_v9/model/assets/salesforce_field.py +++ b/pyatlan_v9/model/assets/salesforce_field.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,7 +49,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .salesforce_related import RelatedSalesforceField, RelatedSalesforceObject +from .salesforce_related import RelatedSalesforceObject from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -97,6 +98,7 @@ class SalesforceField(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -116,6 +118,8 @@ class SalesforceField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SalesforceField" + data_type: Union[str, None, UnsetType] = UNSET """Data type of values in this field.""" @@ -219,6 +223,11 @@ class SalesforceField(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -288,76 +297,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SalesforceField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.object is UNSET: - errors.append("object is required for creation") - if self.object_qualified_name is UNSET: - errors.append("object_qualified_name is required for creation") - if self.organization_qualified_name is UNSET: - errors.append("organization_qualified_name is required for creation") - if errors: - raise ValueError(f"SalesforceField validation failed: {errors}") - - def minimize(self) -> "SalesforceField": - """ - Return a minimal copy of this SalesforceField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SalesforceField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SalesforceField instance with only the minimum required fields. - """ - self.validate() - return SalesforceField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSalesforceField": - """ - Create a :class:`RelatedSalesforceField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSalesforceField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSalesforceField(guid=self.guid) - return RelatedSalesforceField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -522,6 +461,11 @@ class SalesforceFieldRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -616,6 +560,7 @@ class SalesforceFieldNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -725,9 +670,6 @@ def _salesforce_field_to_nested( is_incomplete=salesforce_field.is_incomplete, provenance_type=salesforce_field.provenance_type, home_id=salesforce_field.home_id, - depth=salesforce_field.depth, - immediate_upstream=salesforce_field.immediate_upstream, - immediate_downstream=salesforce_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -761,6 +703,7 @@ def _salesforce_field_from_nested(nested: SalesforceFieldNested) -> SalesforceFi updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -769,9 +712,6 @@ def _salesforce_field_from_nested(nested: SalesforceFieldNested) -> SalesforceFi is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_salesforce_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -849,6 +789,9 @@ def _salesforce_field_from_nested_bytes(data: bytes, serde: Serde) -> Salesforce SalesforceField.METRICS = RelationField("metrics") SalesforceField.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SalesforceField.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SalesforceField.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SalesforceField.MEANINGS = RelationField("meanings") SalesforceField.MC_MONITORS = RelationField("mcMonitors") SalesforceField.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/salesforce_object.py b/pyatlan_v9/model/assets/salesforce_object.py index 3afb0f9d7..048d16147 100644 --- a/pyatlan_v9/model/assets/salesforce_object.py +++ b/pyatlan_v9/model/assets/salesforce_object.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,11 +49,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .salesforce_related import ( - RelatedSalesforceField, - RelatedSalesforceObject, - RelatedSalesforceOrganization, -) +from .salesforce_related import RelatedSalesforceField, RelatedSalesforceOrganization from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -89,6 +86,7 @@ class SalesforceObject(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -109,6 +107,8 @@ class SalesforceObject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SalesforceObject" + is_custom: Union[bool, None, UnsetType] = UNSET """Whether this object is a custom object (true) or not (false).""" @@ -176,6 +176,11 @@ class SalesforceObject(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -246,74 +251,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SalesforceObject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.organization is UNSET: - errors.append("organization is required for creation") - if self.organization_qualified_name is UNSET: - errors.append("organization_qualified_name is required for creation") - if errors: - raise ValueError(f"SalesforceObject validation failed: {errors}") - - def minimize(self) -> "SalesforceObject": - """ - Return a minimal copy of this SalesforceObject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SalesforceObject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SalesforceObject instance with only the minimum required fields. - """ - self.validate() - return SalesforceObject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSalesforceObject": - """ - Create a :class:`RelatedSalesforceObject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSalesforceObject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSalesforceObject(guid=self.guid) - return RelatedSalesforceObject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -442,6 +379,11 @@ class SalesforceObjectRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -539,6 +481,7 @@ class SalesforceObjectNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -625,9 +568,6 @@ def _salesforce_object_to_nested( is_incomplete=salesforce_object.is_incomplete, provenance_type=salesforce_object.provenance_type, home_id=salesforce_object.home_id, - depth=salesforce_object.depth, - immediate_upstream=salesforce_object.immediate_upstream, - immediate_downstream=salesforce_object.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -661,6 +601,7 @@ def _salesforce_object_from_nested(nested: SalesforceObjectNested) -> Salesforce updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -669,9 +610,6 @@ def _salesforce_object_from_nested(nested: SalesforceObjectNested) -> Salesforce is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_salesforce_object_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -730,6 +668,9 @@ def _salesforce_object_from_nested_bytes(data: bytes, serde: Serde) -> Salesforc SalesforceObject.METRICS = RelationField("metrics") SalesforceObject.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SalesforceObject.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SalesforceObject.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SalesforceObject.MEANINGS = RelationField("meanings") SalesforceObject.MC_MONITORS = RelationField("mcMonitors") SalesforceObject.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/salesforce_organization.py b/pyatlan_v9/model/assets/salesforce_organization.py index ef6f09f68..0cb647394 100644 --- a/pyatlan_v9/model/assets/salesforce_organization.py +++ b/pyatlan_v9/model/assets/salesforce_organization.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,6 @@ from .salesforce_related import ( RelatedSalesforceDashboard, RelatedSalesforceObject, - RelatedSalesforceOrganization, RelatedSalesforceReport, ) from .schema_registry_related import RelatedSchemaRegistrySubject @@ -86,6 +86,7 @@ class SalesforceOrganization(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -106,6 +107,8 @@ class SalesforceOrganization(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SalesforceOrganization" + source_id: Union[str, None, UnsetType] = UNSET """Identifier of the organization in Salesforce.""" @@ -164,6 +167,11 @@ class SalesforceOrganization(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -228,68 +236,6 @@ class SalesforceOrganization(Asset): def __post_init__(self) -> None: self.type_name = "SalesforceOrganization" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SalesforceOrganization instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SalesforceOrganization validation failed: {errors}") - - def minimize(self) -> "SalesforceOrganization": - """ - Return a minimal copy of this SalesforceOrganization with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SalesforceOrganization with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SalesforceOrganization instance with only the minimum required fields. - """ - self.validate() - return SalesforceOrganization( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSalesforceOrganization": - """ - Create a :class:`RelatedSalesforceOrganization` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSalesforceOrganization reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSalesforceOrganization(guid=self.guid) - return RelatedSalesforceOrganization(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -409,6 +355,11 @@ class SalesforceOrganizationRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -506,6 +457,7 @@ class SalesforceOrganizationNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -588,9 +540,6 @@ def _salesforce_organization_to_nested( is_incomplete=salesforce_organization.is_incomplete, provenance_type=salesforce_organization.provenance_type, home_id=salesforce_organization.home_id, - depth=salesforce_organization.depth, - immediate_upstream=salesforce_organization.immediate_upstream, - immediate_downstream=salesforce_organization.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -626,6 +575,7 @@ def _salesforce_organization_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -634,9 +584,6 @@ def _salesforce_organization_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_salesforce_organization_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -697,6 +644,9 @@ def _salesforce_organization_from_nested_bytes( SalesforceOrganization.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SalesforceOrganization.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SalesforceOrganization.MEANINGS = RelationField("meanings") SalesforceOrganization.MC_MONITORS = RelationField("mcMonitors") SalesforceOrganization.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/salesforce_report.py b/pyatlan_v9/model/assets/salesforce_report.py index 52fb426ca..0cd0ca77d 100644 --- a/pyatlan_v9/model/assets/salesforce_report.py +++ b/pyatlan_v9/model/assets/salesforce_report.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -51,7 +52,6 @@ from .salesforce_related import ( RelatedSalesforceDashboard, RelatedSalesforceOrganization, - RelatedSalesforceReport, ) from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck @@ -88,6 +88,7 @@ class SalesforceReport(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -107,6 +108,8 @@ class SalesforceReport(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SalesforceReport" + source_id: Union[str, None, UnsetType] = UNSET """Identifier of the report in Salesforce.""" @@ -171,6 +174,11 @@ class SalesforceReport(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -238,74 +246,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SalesforceReport instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.organization is UNSET: - errors.append("organization is required for creation") - if self.organization_qualified_name is UNSET: - errors.append("organization_qualified_name is required for creation") - if errors: - raise ValueError(f"SalesforceReport validation failed: {errors}") - - def minimize(self) -> "SalesforceReport": - """ - Return a minimal copy of this SalesforceReport with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SalesforceReport with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SalesforceReport instance with only the minimum required fields. - """ - self.validate() - return SalesforceReport(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSalesforceReport": - """ - Create a :class:`RelatedSalesforceReport` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSalesforceReport reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSalesforceReport(guid=self.guid) - return RelatedSalesforceReport(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -431,6 +371,11 @@ class SalesforceReportRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -525,6 +470,7 @@ class SalesforceReportNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -608,9 +554,6 @@ def _salesforce_report_to_nested( is_incomplete=salesforce_report.is_incomplete, provenance_type=salesforce_report.provenance_type, home_id=salesforce_report.home_id, - depth=salesforce_report.depth, - immediate_upstream=salesforce_report.immediate_upstream, - immediate_downstream=salesforce_report.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -644,6 +587,7 @@ def _salesforce_report_from_nested(nested: SalesforceReportNested) -> Salesforce updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -652,9 +596,6 @@ def _salesforce_report_from_nested(nested: SalesforceReportNested) -> Salesforce is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_salesforce_report_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -707,6 +648,9 @@ def _salesforce_report_from_nested_bytes(data: bytes, serde: Serde) -> Salesforc SalesforceReport.METRICS = RelationField("metrics") SalesforceReport.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SalesforceReport.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SalesforceReport.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SalesforceReport.MEANINGS = RelationField("meanings") SalesforceReport.MC_MONITORS = RelationField("mcMonitors") SalesforceReport.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sap.py b/pyatlan_v9/model/assets/sap.py index ef70bb332..71df966be 100644 --- a/pyatlan_v9/model/assets/sap.py +++ b/pyatlan_v9/model/assets/sap.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -47,7 +48,6 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sap_related import RelatedSAP from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -85,6 +85,7 @@ class SAP(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +103,8 @@ class SAP(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SAP" + sap_technical_name: Union[str, None, UnsetType] = UNSET """Technical identifier for SAP data objects, used for integration and internal reference.""" @@ -172,6 +175,11 @@ class SAP(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -227,66 +235,6 @@ class SAP(Asset): def __post_init__(self) -> None: self.type_name = "SAP" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SAP instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SAP validation failed: {errors}") - - def minimize(self) -> "SAP": - """ - Return a minimal copy of this SAP with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SAP with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SAP instance with only the minimum required fields. - """ - self.validate() - return SAP(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSAP": - """ - Create a :class:`RelatedSAP` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSAP reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSAP(guid=self.guid) - return RelatedSAP(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -416,6 +364,11 @@ class SAPRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -498,6 +451,7 @@ class SAPNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -577,9 +531,6 @@ def _sap_to_nested(sap: SAP) -> SAPNested: is_incomplete=sap.is_incomplete, provenance_type=sap.provenance_type, home_id=sap.home_id, - depth=sap.depth, - immediate_upstream=sap.immediate_upstream, - immediate_downstream=sap.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -609,6 +560,7 @@ def _sap_from_nested(nested: SAPNested) -> SAP: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -617,9 +569,6 @@ def _sap_from_nested(nested: SAPNested) -> SAP: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -668,6 +617,9 @@ def _sap_from_nested_bytes(data: bytes, serde: Serde) -> SAP: SAP.METRICS = RelationField("metrics") SAP.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SAP.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SAP.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SAP.MEANINGS = RelationField("meanings") SAP.MC_MONITORS = RelationField("mcMonitors") SAP.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sap_erp_abap_program.py b/pyatlan_v9/model/assets/sap_erp_abap_program.py index 1b0c7885d..2f378efb9 100644 --- a/pyatlan_v9/model/assets/sap_erp_abap_program.py +++ b/pyatlan_v9/model/assets/sap_erp_abap_program.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,7 +49,6 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .sap_related import ( - RelatedSapErpAbapProgram, RelatedSapErpComponent, RelatedSapErpFunctionModule, RelatedSapErpTransactionCode, @@ -91,6 +91,7 @@ class SapErpAbapProgram(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -111,6 +112,8 @@ class SapErpAbapProgram(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SapErpAbapProgram" + sap_erp_abap_program_type: Union[str, None, UnsetType] = UNSET """Specifies the type of ABAP program in SAP ERP (e.g., Report, Module Pool, Function Group).""" @@ -184,6 +187,11 @@ class SapErpAbapProgram(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -252,66 +260,6 @@ class SapErpAbapProgram(Asset): def __post_init__(self) -> None: self.type_name = "SapErpAbapProgram" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SapErpAbapProgram instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SapErpAbapProgram validation failed: {errors}") - - def minimize(self) -> "SapErpAbapProgram": - """ - Return a minimal copy of this SapErpAbapProgram with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SapErpAbapProgram with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SapErpAbapProgram instance with only the minimum required fields. - """ - self.validate() - return SapErpAbapProgram(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSapErpAbapProgram": - """ - Create a :class:`RelatedSapErpAbapProgram` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSapErpAbapProgram reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSapErpAbapProgram(guid=self.guid) - return RelatedSapErpAbapProgram(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -446,6 +394,11 @@ class SapErpAbapProgramRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -547,6 +500,7 @@ class SapErpAbapProgramNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -637,9 +591,6 @@ def _sap_erp_abap_program_to_nested( is_incomplete=sap_erp_abap_program.is_incomplete, provenance_type=sap_erp_abap_program.provenance_type, home_id=sap_erp_abap_program.home_id, - depth=sap_erp_abap_program.depth, - immediate_upstream=sap_erp_abap_program.immediate_upstream, - immediate_downstream=sap_erp_abap_program.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -675,6 +626,7 @@ def _sap_erp_abap_program_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -683,9 +635,6 @@ def _sap_erp_abap_program_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_erp_abap_program_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -751,6 +700,9 @@ def _sap_erp_abap_program_from_nested_bytes( SapErpAbapProgram.METRICS = RelationField("metrics") SapErpAbapProgram.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SapErpAbapProgram.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SapErpAbapProgram.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SapErpAbapProgram.MEANINGS = RelationField("meanings") SapErpAbapProgram.MC_MONITORS = RelationField("mcMonitors") SapErpAbapProgram.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sap_erp_cds_view.py b/pyatlan_v9/model/assets/sap_erp_cds_view.py index 5f0e20031..fa5081b75 100644 --- a/pyatlan_v9/model/assets/sap_erp_cds_view.py +++ b/pyatlan_v9/model/assets/sap_erp_cds_view.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -47,11 +48,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sap_related import ( - RelatedSapErpCdsView, - RelatedSapErpColumn, - RelatedSapErpComponent, -) +from .sap_related import RelatedSapErpColumn, RelatedSapErpComponent from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -67,10 +64,9 @@ class SapErpCdsView(Asset): Instance of a SAP CDS View in Atlan. """ - SAP_ERP_CDS_VIEW_TECHNICAL_NAME: ClassVar[Any] = None - SAP_ERP_CDS_VIEW_SOURCE_NAME: ClassVar[Any] = None - SAP_ERP_CDS_VIEW_SOURCE_TYPE: ClassVar[Any] = None SAP_TECHNICAL_NAME: ClassVar[Any] = None + SAP_SOURCE_NAME: ClassVar[Any] = None + SAP_SOURCE_TYPE: ClassVar[Any] = None SAP_LOGICAL_NAME: ClassVar[Any] = None SAP_PACKAGE_NAME: ClassVar[Any] = None SAP_COMPONENT_NAME: ClassVar[Any] = None @@ -92,6 +88,7 @@ class SapErpCdsView(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -111,18 +108,17 @@ class SapErpCdsView(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sap_erp_cds_view_technical_name: Union[str, None, UnsetType] = UNSET - """The technical database view name of the SAP ERP CDS View.""" + type_name: Union[str, UnsetType] = "SapErpCdsView" + + sap_technical_name: Union[str, None, UnsetType] = UNSET + """Technical identifier for SAP data objects, used for integration and internal reference.""" - sap_erp_cds_view_source_name: Union[str, None, UnsetType] = UNSET + sap_source_name: Union[str, None, UnsetType] = UNSET """The source name of the SAP ERP CDS View Definition.""" - sap_erp_cds_view_source_type: Union[str, None, UnsetType] = UNSET + sap_source_type: Union[str, None, UnsetType] = UNSET """The source type of the SAP ERP CDS View Definition.""" - sap_technical_name: Union[str, None, UnsetType] = UNSET - """Technical identifier for SAP data objects, used for integration and internal reference.""" - sap_logical_name: Union[str, None, UnsetType] = UNSET """Logical, business-friendly identifier for SAP data objects, aligned with business terminology and concepts.""" @@ -190,6 +186,11 @@ class SapErpCdsView(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -251,66 +252,6 @@ class SapErpCdsView(Asset): def __post_init__(self) -> None: self.type_name = "SapErpCdsView" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SapErpCdsView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SapErpCdsView validation failed: {errors}") - - def minimize(self) -> "SapErpCdsView": - """ - Return a minimal copy of this SapErpCdsView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SapErpCdsView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SapErpCdsView instance with only the minimum required fields. - """ - self.validate() - return SapErpCdsView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSapErpCdsView": - """ - Create a :class:`RelatedSapErpCdsView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSapErpCdsView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSapErpCdsView(guid=self.guid) - return RelatedSapErpCdsView(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -366,18 +307,15 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> SapErpCdsVi class SapErpCdsViewAttributes(AssetAttributes): """SapErpCdsView-specific attributes for nested API format.""" - sap_erp_cds_view_technical_name: Union[str, None, UnsetType] = UNSET - """The technical database view name of the SAP ERP CDS View.""" + sap_technical_name: Union[str, None, UnsetType] = UNSET + """Technical identifier for SAP data objects, used for integration and internal reference.""" - sap_erp_cds_view_source_name: Union[str, None, UnsetType] = UNSET + sap_source_name: Union[str, None, UnsetType] = UNSET """The source name of the SAP ERP CDS View Definition.""" - sap_erp_cds_view_source_type: Union[str, None, UnsetType] = UNSET + sap_source_type: Union[str, None, UnsetType] = UNSET """The source type of the SAP ERP CDS View Definition.""" - sap_technical_name: Union[str, None, UnsetType] = UNSET - """Technical identifier for SAP data objects, used for integration and internal reference.""" - sap_logical_name: Union[str, None, UnsetType] = UNSET """Logical, business-friendly identifier for SAP data objects, aligned with business terminology and concepts.""" @@ -449,6 +387,11 @@ class SapErpCdsViewRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -543,6 +486,7 @@ class SapErpCdsViewNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -569,10 +513,9 @@ def _populate_sap_erp_cds_view_attrs( ) -> None: """Populate SapErpCdsView-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sap_erp_cds_view_technical_name = obj.sap_erp_cds_view_technical_name - attrs.sap_erp_cds_view_source_name = obj.sap_erp_cds_view_source_name - attrs.sap_erp_cds_view_source_type = obj.sap_erp_cds_view_source_type attrs.sap_technical_name = obj.sap_technical_name + attrs.sap_source_name = obj.sap_source_name + attrs.sap_source_type = obj.sap_source_type attrs.sap_logical_name = obj.sap_logical_name attrs.sap_package_name = obj.sap_package_name attrs.sap_component_name = obj.sap_component_name @@ -585,10 +528,9 @@ def _populate_sap_erp_cds_view_attrs( def _extract_sap_erp_cds_view_attrs(attrs: SapErpCdsViewAttributes) -> dict: """Extract all SapErpCdsView attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sap_erp_cds_view_technical_name"] = attrs.sap_erp_cds_view_technical_name - result["sap_erp_cds_view_source_name"] = attrs.sap_erp_cds_view_source_name - result["sap_erp_cds_view_source_type"] = attrs.sap_erp_cds_view_source_type result["sap_technical_name"] = attrs.sap_technical_name + result["sap_source_name"] = attrs.sap_source_name + result["sap_source_type"] = attrs.sap_source_type result["sap_logical_name"] = attrs.sap_logical_name result["sap_package_name"] = attrs.sap_package_name result["sap_component_name"] = attrs.sap_component_name @@ -634,9 +576,6 @@ def _sap_erp_cds_view_to_nested(sap_erp_cds_view: SapErpCdsView) -> SapErpCdsVie is_incomplete=sap_erp_cds_view.is_incomplete, provenance_type=sap_erp_cds_view.provenance_type, home_id=sap_erp_cds_view.home_id, - depth=sap_erp_cds_view.depth, - immediate_upstream=sap_erp_cds_view.immediate_upstream, - immediate_downstream=sap_erp_cds_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -670,6 +609,7 @@ def _sap_erp_cds_view_from_nested(nested: SapErpCdsViewNested) -> SapErpCdsView: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -678,9 +618,6 @@ def _sap_erp_cds_view_from_nested(nested: SapErpCdsViewNested) -> SapErpCdsView: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_erp_cds_view_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -709,16 +646,9 @@ def _sap_erp_cds_view_from_nested_bytes(data: bytes, serde: Serde) -> SapErpCdsV RelationField, ) -SapErpCdsView.SAP_ERP_CDS_VIEW_TECHNICAL_NAME = KeywordField( - "sapErpCdsViewTechnicalName", "sapErpCdsViewTechnicalName" -) -SapErpCdsView.SAP_ERP_CDS_VIEW_SOURCE_NAME = KeywordField( - "sapErpCdsViewSourceName", "sapErpCdsViewSourceName" -) -SapErpCdsView.SAP_ERP_CDS_VIEW_SOURCE_TYPE = KeywordField( - "sapErpCdsViewSourceType", "sapErpCdsViewSourceType" -) SapErpCdsView.SAP_TECHNICAL_NAME = KeywordField("sapTechnicalName", "sapTechnicalName") +SapErpCdsView.SAP_SOURCE_NAME = KeywordField("sapSourceName", "sapSourceName") +SapErpCdsView.SAP_SOURCE_TYPE = KeywordField("sapSourceType", "sapSourceType") SapErpCdsView.SAP_LOGICAL_NAME = KeywordField("sapLogicalName", "sapLogicalName") SapErpCdsView.SAP_PACKAGE_NAME = KeywordField("sapPackageName", "sapPackageName") SapErpCdsView.SAP_COMPONENT_NAME = KeywordField("sapComponentName", "sapComponentName") @@ -744,6 +674,9 @@ def _sap_erp_cds_view_from_nested_bytes(data: bytes, serde: Serde) -> SapErpCdsV SapErpCdsView.METRICS = RelationField("metrics") SapErpCdsView.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SapErpCdsView.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SapErpCdsView.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SapErpCdsView.MEANINGS = RelationField("meanings") SapErpCdsView.MC_MONITORS = RelationField("mcMonitors") SapErpCdsView.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sap_erp_column.py b/pyatlan_v9/model/assets/sap_erp_column.py index 8417cfe82..2e68196d8 100644 --- a/pyatlan_v9/model/assets/sap_erp_column.py +++ b/pyatlan_v9/model/assets/sap_erp_column.py @@ -48,6 +48,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -55,12 +56,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sap_related import ( - RelatedSapErpCdsView, - RelatedSapErpColumn, - RelatedSapErpTable, - RelatedSapErpView, -) +from .sap_related import RelatedSapErpCdsView, RelatedSapErpTable, RelatedSapErpView from .schema_registry_related import RelatedSchemaRegistrySubject from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck @@ -81,21 +77,21 @@ class SapErpColumn(Asset): Instance of a SAP Column in Atlan. """ - SAP_ERP_COLUMN_DATA_ELEMENT: ClassVar[Any] = None - SAP_ERP_COLUMN_LOGICAL_DATA_TYPE: ClassVar[Any] = None - SAP_ERP_COLUMN_LENGTH: ClassVar[Any] = None - SAP_ERP_COLUMN_DECIMALS: ClassVar[Any] = None - SAP_ERP_COLUMN_IS_PRIMARY: ClassVar[Any] = None - SAP_ERP_COLUMN_IS_FOREIGN: ClassVar[Any] = None - SAP_ERP_COLUMN_IS_MANDATORY: ClassVar[Any] = None + SAP_DATA_ELEMENT: ClassVar[Any] = None + SAP_LOGICAL_DATA_TYPE: ClassVar[Any] = None + SAP_LENGTH: ClassVar[Any] = None + SAP_DECIMALS: ClassVar[Any] = None + SAP_IS_PRIMARY: ClassVar[Any] = None + SAP_IS_FOREIGN: ClassVar[Any] = None + SAP_IS_MANDATORY: ClassVar[Any] = None SAP_ERP_TABLE_NAME: ClassVar[Any] = None SAP_ERP_TABLE_QUALIFIED_NAME: ClassVar[Any] = None SAP_ERP_VIEW_NAME: ClassVar[Any] = None SAP_ERP_VIEW_QUALIFIED_NAME: ClassVar[Any] = None SAP_ERP_CDS_VIEW_NAME: ClassVar[Any] = None SAP_ERP_CDS_VIEW_QUALIFIED_NAME: ClassVar[Any] = None - SAP_ERP_COLUMN_CHECK_TABLE_NAME: ClassVar[Any] = None - SAP_ERP_COLUMN_CHECK_TABLE_QUALIFIED_NAME: ClassVar[Any] = None + SAP_CHECK_TABLE_NAME: ClassVar[Any] = None + SAP_CHECK_TABLE_QUALIFIED_NAME: ClassVar[Any] = None SAP_TECHNICAL_NAME: ClassVar[Any] = None SAP_LOGICAL_NAME: ClassVar[Any] = None SAP_PACKAGE_NAME: ClassVar[Any] = None @@ -148,6 +144,7 @@ class SapErpColumn(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -172,25 +169,27 @@ class SapErpColumn(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None - sap_erp_column_data_element: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SapErpColumn" + + sap_data_element: Union[str, None, UnsetType] = UNSET """Represents the SAP ERP data element, providing semantic information about the column.""" - sap_erp_column_logical_data_type: Union[str, None, UnsetType] = UNSET + sap_logical_data_type: Union[str, None, UnsetType] = UNSET """Specifies the logical data type of values in this SAP ERP column""" - sap_erp_column_length: Union[str, None, UnsetType] = UNSET + sap_length: Union[str, None, UnsetType] = UNSET """Indicates the maximum length of the values that the SAP ERP column can store.""" - sap_erp_column_decimals: Union[str, None, UnsetType] = UNSET + sap_decimals: Union[str, None, UnsetType] = UNSET """Defines the number of decimal places allowed for numeric values in the SAP ERP column.""" - sap_erp_column_is_primary: Union[bool, None, UnsetType] = UNSET + sap_is_primary: Union[bool, None, UnsetType] = UNSET """When true, this column is the primary key for the SAP ERP table or view.""" - sap_erp_column_is_foreign: Union[bool, None, UnsetType] = UNSET + sap_is_foreign: Union[bool, None, UnsetType] = UNSET """When true, this column is the foreign key for the SAP ERP table or view.""" - sap_erp_column_is_mandatory: Union[bool, None, UnsetType] = UNSET + sap_is_mandatory: Union[bool, None, UnsetType] = UNSET """When true, the values in this column can be null.""" sap_erp_table_name: Union[str, None, UnsetType] = UNSET @@ -211,10 +210,10 @@ class SapErpColumn(Asset): sap_erp_cds_view_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the SAP ERP CDS view in which this column asset exists.""" - sap_erp_column_check_table_name: Union[str, None, UnsetType] = UNSET + sap_check_table_name: Union[str, None, UnsetType] = UNSET """Defines the SAP ERP table name used as a foreign key reference to validate permissible values for this column.""" - sap_erp_column_check_table_qualified_name: Union[str, None, UnsetType] = UNSET + sap_check_table_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the SAP ERP Table used as a foreign key reference to validate permissible values for this column.""" sap_technical_name: Union[str, None, UnsetType] = UNSET @@ -381,6 +380,11 @@ class SapErpColumn(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -471,76 +475,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SapErpColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sap_erp_table is UNSET: - errors.append("sap_erp_table is required for creation") - if self.sap_erp_table_name is UNSET: - errors.append("sap_erp_table_name is required for creation") - if self.sap_erp_table_qualified_name is UNSET: - errors.append("sap_erp_table_qualified_name is required for creation") - if errors: - raise ValueError(f"SapErpColumn validation failed: {errors}") - - def minimize(self) -> "SapErpColumn": - """ - Return a minimal copy of this SapErpColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SapErpColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SapErpColumn instance with only the minimum required fields. - """ - self.validate() - return SapErpColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSapErpColumn": - """ - Create a :class:`RelatedSapErpColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSapErpColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSapErpColumn(guid=self.guid) - return RelatedSapErpColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -596,25 +530,25 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> SapErpColum class SapErpColumnAttributes(AssetAttributes): """SapErpColumn-specific attributes for nested API format.""" - sap_erp_column_data_element: Union[str, None, UnsetType] = UNSET + sap_data_element: Union[str, None, UnsetType] = UNSET """Represents the SAP ERP data element, providing semantic information about the column.""" - sap_erp_column_logical_data_type: Union[str, None, UnsetType] = UNSET + sap_logical_data_type: Union[str, None, UnsetType] = UNSET """Specifies the logical data type of values in this SAP ERP column""" - sap_erp_column_length: Union[str, None, UnsetType] = UNSET + sap_length: Union[str, None, UnsetType] = UNSET """Indicates the maximum length of the values that the SAP ERP column can store.""" - sap_erp_column_decimals: Union[str, None, UnsetType] = UNSET + sap_decimals: Union[str, None, UnsetType] = UNSET """Defines the number of decimal places allowed for numeric values in the SAP ERP column.""" - sap_erp_column_is_primary: Union[bool, None, UnsetType] = UNSET + sap_is_primary: Union[bool, None, UnsetType] = UNSET """When true, this column is the primary key for the SAP ERP table or view.""" - sap_erp_column_is_foreign: Union[bool, None, UnsetType] = UNSET + sap_is_foreign: Union[bool, None, UnsetType] = UNSET """When true, this column is the foreign key for the SAP ERP table or view.""" - sap_erp_column_is_mandatory: Union[bool, None, UnsetType] = UNSET + sap_is_mandatory: Union[bool, None, UnsetType] = UNSET """When true, the values in this column can be null.""" sap_erp_table_name: Union[str, None, UnsetType] = UNSET @@ -635,10 +569,10 @@ class SapErpColumnAttributes(AssetAttributes): sap_erp_cds_view_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the SAP ERP CDS view in which this column asset exists.""" - sap_erp_column_check_table_name: Union[str, None, UnsetType] = UNSET + sap_check_table_name: Union[str, None, UnsetType] = UNSET """Defines the SAP ERP table name used as a foreign key reference to validate permissible values for this column.""" - sap_erp_column_check_table_qualified_name: Union[str, None, UnsetType] = UNSET + sap_check_table_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the SAP ERP Table used as a foreign key reference to validate permissible values for this column.""" sap_technical_name: Union[str, None, UnsetType] = UNSET @@ -809,6 +743,11 @@ class SapErpColumnRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -932,6 +871,7 @@ class SapErpColumnNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -963,23 +903,21 @@ def _populate_sap_erp_column_attrs( ) -> None: """Populate SapErpColumn-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sap_erp_column_data_element = obj.sap_erp_column_data_element - attrs.sap_erp_column_logical_data_type = obj.sap_erp_column_logical_data_type - attrs.sap_erp_column_length = obj.sap_erp_column_length - attrs.sap_erp_column_decimals = obj.sap_erp_column_decimals - attrs.sap_erp_column_is_primary = obj.sap_erp_column_is_primary - attrs.sap_erp_column_is_foreign = obj.sap_erp_column_is_foreign - attrs.sap_erp_column_is_mandatory = obj.sap_erp_column_is_mandatory + attrs.sap_data_element = obj.sap_data_element + attrs.sap_logical_data_type = obj.sap_logical_data_type + attrs.sap_length = obj.sap_length + attrs.sap_decimals = obj.sap_decimals + attrs.sap_is_primary = obj.sap_is_primary + attrs.sap_is_foreign = obj.sap_is_foreign + attrs.sap_is_mandatory = obj.sap_is_mandatory attrs.sap_erp_table_name = obj.sap_erp_table_name attrs.sap_erp_table_qualified_name = obj.sap_erp_table_qualified_name attrs.sap_erp_view_name = obj.sap_erp_view_name attrs.sap_erp_view_qualified_name = obj.sap_erp_view_qualified_name attrs.sap_erp_cds_view_name = obj.sap_erp_cds_view_name attrs.sap_erp_cds_view_qualified_name = obj.sap_erp_cds_view_qualified_name - attrs.sap_erp_column_check_table_name = obj.sap_erp_column_check_table_name - attrs.sap_erp_column_check_table_qualified_name = ( - obj.sap_erp_column_check_table_qualified_name - ) + attrs.sap_check_table_name = obj.sap_check_table_name + attrs.sap_check_table_qualified_name = obj.sap_check_table_qualified_name attrs.sap_technical_name = obj.sap_technical_name attrs.sap_logical_name = obj.sap_logical_name attrs.sap_package_name = obj.sap_package_name @@ -1021,23 +959,21 @@ def _populate_sap_erp_column_attrs( def _extract_sap_erp_column_attrs(attrs: SapErpColumnAttributes) -> dict: """Extract all SapErpColumn attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sap_erp_column_data_element"] = attrs.sap_erp_column_data_element - result["sap_erp_column_logical_data_type"] = attrs.sap_erp_column_logical_data_type - result["sap_erp_column_length"] = attrs.sap_erp_column_length - result["sap_erp_column_decimals"] = attrs.sap_erp_column_decimals - result["sap_erp_column_is_primary"] = attrs.sap_erp_column_is_primary - result["sap_erp_column_is_foreign"] = attrs.sap_erp_column_is_foreign - result["sap_erp_column_is_mandatory"] = attrs.sap_erp_column_is_mandatory + result["sap_data_element"] = attrs.sap_data_element + result["sap_logical_data_type"] = attrs.sap_logical_data_type + result["sap_length"] = attrs.sap_length + result["sap_decimals"] = attrs.sap_decimals + result["sap_is_primary"] = attrs.sap_is_primary + result["sap_is_foreign"] = attrs.sap_is_foreign + result["sap_is_mandatory"] = attrs.sap_is_mandatory result["sap_erp_table_name"] = attrs.sap_erp_table_name result["sap_erp_table_qualified_name"] = attrs.sap_erp_table_qualified_name result["sap_erp_view_name"] = attrs.sap_erp_view_name result["sap_erp_view_qualified_name"] = attrs.sap_erp_view_qualified_name result["sap_erp_cds_view_name"] = attrs.sap_erp_cds_view_name result["sap_erp_cds_view_qualified_name"] = attrs.sap_erp_cds_view_qualified_name - result["sap_erp_column_check_table_name"] = attrs.sap_erp_column_check_table_name - result["sap_erp_column_check_table_qualified_name"] = ( - attrs.sap_erp_column_check_table_qualified_name - ) + result["sap_check_table_name"] = attrs.sap_check_table_name + result["sap_check_table_qualified_name"] = attrs.sap_check_table_qualified_name result["sap_technical_name"] = attrs.sap_technical_name result["sap_logical_name"] = attrs.sap_logical_name result["sap_package_name"] = attrs.sap_package_name @@ -1116,9 +1052,6 @@ def _sap_erp_column_to_nested(sap_erp_column: SapErpColumn) -> SapErpColumnNeste is_incomplete=sap_erp_column.is_incomplete, provenance_type=sap_erp_column.provenance_type, home_id=sap_erp_column.home_id, - depth=sap_erp_column.depth, - immediate_upstream=sap_erp_column.immediate_upstream, - immediate_downstream=sap_erp_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1152,6 +1085,7 @@ def _sap_erp_column_from_nested(nested: SapErpColumnNested) -> SapErpColumn: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1160,9 +1094,6 @@ def _sap_erp_column_from_nested(nested: SapErpColumnNested) -> SapErpColumn: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_erp_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1193,27 +1124,15 @@ def _sap_erp_column_from_nested_bytes(data: bytes, serde: Serde) -> SapErpColumn RelationField, ) -SapErpColumn.SAP_ERP_COLUMN_DATA_ELEMENT = KeywordField( - "sapErpColumnDataElement", "sapErpColumnDataElement" -) -SapErpColumn.SAP_ERP_COLUMN_LOGICAL_DATA_TYPE = KeywordField( - "sapErpColumnLogicalDataType", "sapErpColumnLogicalDataType" -) -SapErpColumn.SAP_ERP_COLUMN_LENGTH = KeywordField( - "sapErpColumnLength", "sapErpColumnLength" -) -SapErpColumn.SAP_ERP_COLUMN_DECIMALS = KeywordField( - "sapErpColumnDecimals", "sapErpColumnDecimals" -) -SapErpColumn.SAP_ERP_COLUMN_IS_PRIMARY = BooleanField( - "sapErpColumnIsPrimary", "sapErpColumnIsPrimary" -) -SapErpColumn.SAP_ERP_COLUMN_IS_FOREIGN = BooleanField( - "sapErpColumnIsForeign", "sapErpColumnIsForeign" -) -SapErpColumn.SAP_ERP_COLUMN_IS_MANDATORY = BooleanField( - "sapErpColumnIsMandatory", "sapErpColumnIsMandatory" +SapErpColumn.SAP_DATA_ELEMENT = KeywordField("sapDataElement", "sapDataElement") +SapErpColumn.SAP_LOGICAL_DATA_TYPE = KeywordField( + "sapLogicalDataType", "sapLogicalDataType" ) +SapErpColumn.SAP_LENGTH = KeywordField("sapLength", "sapLength") +SapErpColumn.SAP_DECIMALS = KeywordField("sapDecimals", "sapDecimals") +SapErpColumn.SAP_IS_PRIMARY = BooleanField("sapIsPrimary", "sapIsPrimary") +SapErpColumn.SAP_IS_FOREIGN = BooleanField("sapIsForeign", "sapIsForeign") +SapErpColumn.SAP_IS_MANDATORY = BooleanField("sapIsMandatory", "sapIsMandatory") SapErpColumn.SAP_ERP_TABLE_NAME = KeywordField("sapErpTableName", "sapErpTableName") SapErpColumn.SAP_ERP_TABLE_QUALIFIED_NAME = KeywordTextField( "sapErpTableQualifiedName", @@ -1232,11 +1151,11 @@ def _sap_erp_column_from_nested_bytes(data: bytes, serde: Serde) -> SapErpColumn "sapErpCdsViewQualifiedName", "sapErpCdsViewQualifiedName.text", ) -SapErpColumn.SAP_ERP_COLUMN_CHECK_TABLE_NAME = KeywordField( - "sapErpColumnCheckTableName", "sapErpColumnCheckTableName" +SapErpColumn.SAP_CHECK_TABLE_NAME = KeywordField( + "sapCheckTableName", "sapCheckTableName" ) -SapErpColumn.SAP_ERP_COLUMN_CHECK_TABLE_QUALIFIED_NAME = KeywordField( - "sapErpColumnCheckTableQualifiedName", "sapErpColumnCheckTableQualifiedName" +SapErpColumn.SAP_CHECK_TABLE_QUALIFIED_NAME = KeywordField( + "sapCheckTableQualifiedName", "sapCheckTableQualifiedName" ) SapErpColumn.SAP_TECHNICAL_NAME = KeywordField("sapTechnicalName", "sapTechnicalName") SapErpColumn.SAP_LOGICAL_NAME = KeywordField("sapLogicalName", "sapLogicalName") @@ -1321,6 +1240,9 @@ def _sap_erp_column_from_nested_bytes(data: bytes, serde: Serde) -> SapErpColumn SapErpColumn.DBT_SOURCES = RelationField("dbtSources") SapErpColumn.SQL_DBT_SOURCES = RelationField("sqlDBTSources") SapErpColumn.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +SapErpColumn.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SapErpColumn.MEANINGS = RelationField("meanings") SapErpColumn.MC_MONITORS = RelationField("mcMonitors") SapErpColumn.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sap_erp_component.py b/pyatlan_v9/model/assets/sap_erp_component.py index cad1809c4..efaa56beb 100644 --- a/pyatlan_v9/model/assets/sap_erp_component.py +++ b/pyatlan_v9/model/assets/sap_erp_component.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -93,6 +94,7 @@ class SapErpComponent(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -118,6 +120,8 @@ class SapErpComponent(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SapErpComponent" + sap_technical_name: Union[str, None, UnsetType] = UNSET """Technical identifier for SAP data objects, used for integration and internal reference.""" @@ -188,6 +192,11 @@ class SapErpComponent(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -273,66 +282,6 @@ class SapErpComponent(Asset): def __post_init__(self) -> None: self.type_name = "SapErpComponent" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SapErpComponent instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SapErpComponent validation failed: {errors}") - - def minimize(self) -> "SapErpComponent": - """ - Return a minimal copy of this SapErpComponent with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SapErpComponent with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SapErpComponent instance with only the minimum required fields. - """ - self.validate() - return SapErpComponent(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSapErpComponent": - """ - Create a :class:`RelatedSapErpComponent` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSapErpComponent reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSapErpComponent(guid=self.guid) - return RelatedSapErpComponent(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -464,6 +413,11 @@ class SapErpComponentRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -582,6 +536,7 @@ class SapErpComponentNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -675,9 +630,6 @@ def _sap_erp_component_to_nested( is_incomplete=sap_erp_component.is_incomplete, provenance_type=sap_erp_component.provenance_type, home_id=sap_erp_component.home_id, - depth=sap_erp_component.depth, - immediate_upstream=sap_erp_component.immediate_upstream, - immediate_downstream=sap_erp_component.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -711,6 +663,7 @@ def _sap_erp_component_from_nested(nested: SapErpComponentNested) -> SapErpCompo updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -719,9 +672,6 @@ def _sap_erp_component_from_nested(nested: SapErpComponentNested) -> SapErpCompo is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_erp_component_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -782,6 +732,9 @@ def _sap_erp_component_from_nested_bytes(data: bytes, serde: Serde) -> SapErpCom SapErpComponent.METRICS = RelationField("metrics") SapErpComponent.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SapErpComponent.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SapErpComponent.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SapErpComponent.MEANINGS = RelationField("meanings") SapErpComponent.MC_MONITORS = RelationField("mcMonitors") SapErpComponent.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sap_erp_function_module.py b/pyatlan_v9/model/assets/sap_erp_function_module.py index 366ee605f..7ee5ec060 100644 --- a/pyatlan_v9/model/assets/sap_erp_function_module.py +++ b/pyatlan_v9/model/assets/sap_erp_function_module.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -47,11 +48,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sap_related import ( - RelatedSapErpAbapProgram, - RelatedSapErpComponent, - RelatedSapErpFunctionModule, -) +from .sap_related import RelatedSapErpAbapProgram, RelatedSapErpComponent from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -67,11 +64,11 @@ class SapErpFunctionModule(Asset): Instance of a SAP Function in Atlan. """ - SAP_ERP_FUNCTION_MODULE_GROUP: ClassVar[Any] = None + SAP_GROUP: ClassVar[Any] = None SAP_ERP_FUNCTION_MODULE_IMPORT_PARAMS: ClassVar[Any] = None - SAP_ERP_FUNCTION_MODULE_IMPORT_PARAMS_COUNT: ClassVar[Any] = None + SAP_IMPORT_PARAMS_COUNT: ClassVar[Any] = None SAP_ERP_FUNCTION_MODULE_EXPORT_PARAMS: ClassVar[Any] = None - SAP_ERP_FUNCTION_MODULE_EXPORT_PARAMS_COUNT: ClassVar[Any] = None + SAP_EXPORT_PARAMS_COUNT: ClassVar[Any] = None SAP_ERP_FUNCTION_EXCEPTION_LIST: ClassVar[Any] = None SAP_ERP_FUNCTION_EXCEPTION_LIST_COUNT: ClassVar[Any] = None SAP_TECHNICAL_NAME: ClassVar[Any] = None @@ -96,6 +93,7 @@ class SapErpFunctionModule(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -115,7 +113,9 @@ class SapErpFunctionModule(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sap_erp_function_module_group: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SapErpFunctionModule" + + sap_group: Union[str, None, UnsetType] = UNSET """Represents the group to which the SAP ERP function module belongs.""" sap_erp_function_module_import_params: Union[ @@ -123,7 +123,7 @@ class SapErpFunctionModule(Asset): ] = UNSET """Parameters imported by the SAP ERP function module, defined as key-value pairs.""" - sap_erp_function_module_import_params_count: Union[int, None, UnsetType] = UNSET + sap_import_params_count: Union[int, None, UnsetType] = UNSET """Represents the total number of Import Parameters in a given SAP ERP Function Module.""" sap_erp_function_module_export_params: Union[ @@ -131,7 +131,7 @@ class SapErpFunctionModule(Asset): ] = UNSET """Parameters exported by the SAP ERP function module, defined as key-value pairs.""" - sap_erp_function_module_export_params_count: Union[int, None, UnsetType] = UNSET + sap_export_params_count: Union[int, None, UnsetType] = UNSET """Represents the total number of Export Parameters in a given SAP ERP Function Module.""" sap_erp_function_exception_list: Union[List[Dict[str, str]], None, UnsetType] = ( @@ -212,6 +212,11 @@ class SapErpFunctionModule(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -273,66 +278,6 @@ class SapErpFunctionModule(Asset): def __post_init__(self) -> None: self.type_name = "SapErpFunctionModule" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SapErpFunctionModule instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SapErpFunctionModule validation failed: {errors}") - - def minimize(self) -> "SapErpFunctionModule": - """ - Return a minimal copy of this SapErpFunctionModule with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SapErpFunctionModule with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SapErpFunctionModule instance with only the minimum required fields. - """ - self.validate() - return SapErpFunctionModule(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSapErpFunctionModule": - """ - Create a :class:`RelatedSapErpFunctionModule` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSapErpFunctionModule reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSapErpFunctionModule(guid=self.guid) - return RelatedSapErpFunctionModule(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -390,7 +335,7 @@ def from_json( class SapErpFunctionModuleAttributes(AssetAttributes): """SapErpFunctionModule-specific attributes for nested API format.""" - sap_erp_function_module_group: Union[str, None, UnsetType] = UNSET + sap_group: Union[str, None, UnsetType] = UNSET """Represents the group to which the SAP ERP function module belongs.""" sap_erp_function_module_import_params: Union[ @@ -398,7 +343,7 @@ class SapErpFunctionModuleAttributes(AssetAttributes): ] = UNSET """Parameters imported by the SAP ERP function module, defined as key-value pairs.""" - sap_erp_function_module_import_params_count: Union[int, None, UnsetType] = UNSET + sap_import_params_count: Union[int, None, UnsetType] = UNSET """Represents the total number of Import Parameters in a given SAP ERP Function Module.""" sap_erp_function_module_export_params: Union[ @@ -406,7 +351,7 @@ class SapErpFunctionModuleAttributes(AssetAttributes): ] = UNSET """Parameters exported by the SAP ERP function module, defined as key-value pairs.""" - sap_erp_function_module_export_params_count: Union[int, None, UnsetType] = UNSET + sap_export_params_count: Union[int, None, UnsetType] = UNSET """Represents the total number of Export Parameters in a given SAP ERP Function Module.""" sap_erp_function_exception_list: Union[List[Dict[str, str]], None, UnsetType] = ( @@ -491,6 +436,11 @@ class SapErpFunctionModuleRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -585,6 +535,7 @@ class SapErpFunctionModuleNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -611,19 +562,15 @@ def _populate_sap_erp_function_module_attrs( ) -> None: """Populate SapErpFunctionModule-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sap_erp_function_module_group = obj.sap_erp_function_module_group + attrs.sap_group = obj.sap_group attrs.sap_erp_function_module_import_params = ( obj.sap_erp_function_module_import_params ) - attrs.sap_erp_function_module_import_params_count = ( - obj.sap_erp_function_module_import_params_count - ) + attrs.sap_import_params_count = obj.sap_import_params_count attrs.sap_erp_function_module_export_params = ( obj.sap_erp_function_module_export_params ) - attrs.sap_erp_function_module_export_params_count = ( - obj.sap_erp_function_module_export_params_count - ) + attrs.sap_export_params_count = obj.sap_export_params_count attrs.sap_erp_function_exception_list = obj.sap_erp_function_exception_list attrs.sap_erp_function_exception_list_count = ( obj.sap_erp_function_exception_list_count @@ -643,19 +590,15 @@ def _extract_sap_erp_function_module_attrs( ) -> dict: """Extract all SapErpFunctionModule attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sap_erp_function_module_group"] = attrs.sap_erp_function_module_group + result["sap_group"] = attrs.sap_group result["sap_erp_function_module_import_params"] = ( attrs.sap_erp_function_module_import_params ) - result["sap_erp_function_module_import_params_count"] = ( - attrs.sap_erp_function_module_import_params_count - ) + result["sap_import_params_count"] = attrs.sap_import_params_count result["sap_erp_function_module_export_params"] = ( attrs.sap_erp_function_module_export_params ) - result["sap_erp_function_module_export_params_count"] = ( - attrs.sap_erp_function_module_export_params_count - ) + result["sap_export_params_count"] = attrs.sap_export_params_count result["sap_erp_function_exception_list"] = attrs.sap_erp_function_exception_list result["sap_erp_function_exception_list_count"] = ( attrs.sap_erp_function_exception_list_count @@ -708,9 +651,6 @@ def _sap_erp_function_module_to_nested( is_incomplete=sap_erp_function_module.is_incomplete, provenance_type=sap_erp_function_module.provenance_type, home_id=sap_erp_function_module.home_id, - depth=sap_erp_function_module.depth, - immediate_upstream=sap_erp_function_module.immediate_upstream, - immediate_downstream=sap_erp_function_module.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -746,6 +686,7 @@ def _sap_erp_function_module_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -754,9 +695,6 @@ def _sap_erp_function_module_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_erp_function_module_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -787,20 +725,18 @@ def _sap_erp_function_module_from_nested_bytes( RelationField, ) -SapErpFunctionModule.SAP_ERP_FUNCTION_MODULE_GROUP = KeywordField( - "sapErpFunctionModuleGroup", "sapErpFunctionModuleGroup" -) +SapErpFunctionModule.SAP_GROUP = KeywordField("sapGroup", "sapGroup") SapErpFunctionModule.SAP_ERP_FUNCTION_MODULE_IMPORT_PARAMS = KeywordField( "sapErpFunctionModuleImportParams", "sapErpFunctionModuleImportParams" ) -SapErpFunctionModule.SAP_ERP_FUNCTION_MODULE_IMPORT_PARAMS_COUNT = NumericField( - "sapErpFunctionModuleImportParamsCount", "sapErpFunctionModuleImportParamsCount" +SapErpFunctionModule.SAP_IMPORT_PARAMS_COUNT = NumericField( + "sapImportParamsCount", "sapImportParamsCount" ) SapErpFunctionModule.SAP_ERP_FUNCTION_MODULE_EXPORT_PARAMS = KeywordField( "sapErpFunctionModuleExportParams", "sapErpFunctionModuleExportParams" ) -SapErpFunctionModule.SAP_ERP_FUNCTION_MODULE_EXPORT_PARAMS_COUNT = NumericField( - "sapErpFunctionModuleExportParamsCount", "sapErpFunctionModuleExportParamsCount" +SapErpFunctionModule.SAP_EXPORT_PARAMS_COUNT = NumericField( + "sapExportParamsCount", "sapExportParamsCount" ) SapErpFunctionModule.SAP_ERP_FUNCTION_EXCEPTION_LIST = KeywordField( "sapErpFunctionExceptionList", "sapErpFunctionExceptionList" @@ -844,6 +780,9 @@ def _sap_erp_function_module_from_nested_bytes( SapErpFunctionModule.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SapErpFunctionModule.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SapErpFunctionModule.MEANINGS = RelationField("meanings") SapErpFunctionModule.MC_MONITORS = RelationField("mcMonitors") SapErpFunctionModule.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sap_erp_table.py b/pyatlan_v9/model/assets/sap_erp_table.py index 733ff9deb..f357f57a3 100644 --- a/pyatlan_v9/model/assets/sap_erp_table.py +++ b/pyatlan_v9/model/assets/sap_erp_table.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -47,7 +48,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sap_related import RelatedSapErpColumn, RelatedSapErpComponent, RelatedSapErpTable +from .sap_related import RelatedSapErpColumn, RelatedSapErpComponent from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -87,6 +88,7 @@ class SapErpTable(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -106,6 +108,8 @@ class SapErpTable(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SapErpTable" + sap_erp_table_type: Union[str, None, UnsetType] = UNSET """Type of the SAP ERP table.""" @@ -182,6 +186,11 @@ class SapErpTable(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -243,66 +252,6 @@ class SapErpTable(Asset): def __post_init__(self) -> None: self.type_name = "SapErpTable" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SapErpTable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SapErpTable validation failed: {errors}") - - def minimize(self) -> "SapErpTable": - """ - Return a minimal copy of this SapErpTable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SapErpTable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SapErpTable instance with only the minimum required fields. - """ - self.validate() - return SapErpTable(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSapErpTable": - """ - Create a :class:`RelatedSapErpTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSapErpTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSapErpTable(guid=self.guid) - return RelatedSapErpTable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -438,6 +387,11 @@ class SapErpTableRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -530,6 +484,7 @@ class SapErpTableNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -617,9 +572,6 @@ def _sap_erp_table_to_nested(sap_erp_table: SapErpTable) -> SapErpTableNested: is_incomplete=sap_erp_table.is_incomplete, provenance_type=sap_erp_table.provenance_type, home_id=sap_erp_table.home_id, - depth=sap_erp_table.depth, - immediate_upstream=sap_erp_table.immediate_upstream, - immediate_downstream=sap_erp_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -651,6 +603,7 @@ def _sap_erp_table_from_nested(nested: SapErpTableNested) -> SapErpTable: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -659,9 +612,6 @@ def _sap_erp_table_from_nested(nested: SapErpTableNested) -> SapErpTable: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_erp_table_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -718,6 +668,9 @@ def _sap_erp_table_from_nested_bytes(data: bytes, serde: Serde) -> SapErpTable: SapErpTable.METRICS = RelationField("metrics") SapErpTable.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SapErpTable.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SapErpTable.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SapErpTable.MEANINGS = RelationField("meanings") SapErpTable.MC_MONITORS = RelationField("mcMonitors") SapErpTable.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sap_erp_transaction_code.py b/pyatlan_v9/model/assets/sap_erp_transaction_code.py index a783d69be..b14769577 100644 --- a/pyatlan_v9/model/assets/sap_erp_transaction_code.py +++ b/pyatlan_v9/model/assets/sap_erp_transaction_code.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -47,11 +48,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sap_related import ( - RelatedSapErpAbapProgram, - RelatedSapErpComponent, - RelatedSapErpTransactionCode, -) +from .sap_related import RelatedSapErpAbapProgram, RelatedSapErpComponent from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -89,6 +86,7 @@ class SapErpTransactionCode(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -108,6 +106,8 @@ class SapErpTransactionCode(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SapErpTransactionCode" + sap_technical_name: Union[str, None, UnsetType] = UNSET """Technical identifier for SAP data objects, used for integration and internal reference.""" @@ -178,6 +178,11 @@ class SapErpTransactionCode(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -239,66 +244,6 @@ class SapErpTransactionCode(Asset): def __post_init__(self) -> None: self.type_name = "SapErpTransactionCode" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SapErpTransactionCode instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SapErpTransactionCode validation failed: {errors}") - - def minimize(self) -> "SapErpTransactionCode": - """ - Return a minimal copy of this SapErpTransactionCode with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SapErpTransactionCode with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SapErpTransactionCode instance with only the minimum required fields. - """ - self.validate() - return SapErpTransactionCode(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSapErpTransactionCode": - """ - Create a :class:`RelatedSapErpTransactionCode` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSapErpTransactionCode reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSapErpTransactionCode(guid=self.guid) - return RelatedSapErpTransactionCode(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -430,6 +375,11 @@ class SapErpTransactionCodeRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -524,6 +474,7 @@ class SapErpTransactionCodeNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -613,9 +564,6 @@ def _sap_erp_transaction_code_to_nested( is_incomplete=sap_erp_transaction_code.is_incomplete, provenance_type=sap_erp_transaction_code.provenance_type, home_id=sap_erp_transaction_code.home_id, - depth=sap_erp_transaction_code.depth, - immediate_upstream=sap_erp_transaction_code.immediate_upstream, - immediate_downstream=sap_erp_transaction_code.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -651,6 +599,7 @@ def _sap_erp_transaction_code_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -659,9 +608,6 @@ def _sap_erp_transaction_code_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_erp_transaction_code_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -736,6 +682,9 @@ def _sap_erp_transaction_code_from_nested_bytes( SapErpTransactionCode.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SapErpTransactionCode.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SapErpTransactionCode.MEANINGS = RelationField("meanings") SapErpTransactionCode.MC_MONITORS = RelationField("mcMonitors") SapErpTransactionCode.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sap_erp_view.py b/pyatlan_v9/model/assets/sap_erp_view.py index df1117db8..3ca95aced 100644 --- a/pyatlan_v9/model/assets/sap_erp_view.py +++ b/pyatlan_v9/model/assets/sap_erp_view.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -47,7 +48,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .sap_related import RelatedSapErpColumn, RelatedSapErpComponent, RelatedSapErpView +from .sap_related import RelatedSapErpColumn, RelatedSapErpComponent from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -63,8 +64,8 @@ class SapErpView(Asset): Instance of a SAP table in Atlan. """ - SAP_ERP_VIEW_TYPE: ClassVar[Any] = None - SAP_ERP_VIEW_DEFINITION: ClassVar[Any] = None + SAP_TYPE: ClassVar[Any] = None + SAP_DEFINITION: ClassVar[Any] = None SAP_TECHNICAL_NAME: ClassVar[Any] = None SAP_LOGICAL_NAME: ClassVar[Any] = None SAP_PACKAGE_NAME: ClassVar[Any] = None @@ -87,6 +88,7 @@ class SapErpView(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -106,10 +108,12 @@ class SapErpView(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sap_erp_view_type: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SapErpView" + + sap_type: Union[str, None, UnsetType] = UNSET """Type of the SAP ERP View.""" - sap_erp_view_definition: Union[str, None, UnsetType] = UNSET + sap_definition: Union[str, None, UnsetType] = UNSET """Specifies the definition of the SAP ERP View""" sap_technical_name: Union[str, None, UnsetType] = UNSET @@ -182,6 +186,11 @@ class SapErpView(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -243,66 +252,6 @@ class SapErpView(Asset): def __post_init__(self) -> None: self.type_name = "SapErpView" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SapErpView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SapErpView validation failed: {errors}") - - def minimize(self) -> "SapErpView": - """ - Return a minimal copy of this SapErpView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SapErpView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SapErpView instance with only the minimum required fields. - """ - self.validate() - return SapErpView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSapErpView": - """ - Create a :class:`RelatedSapErpView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSapErpView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSapErpView(guid=self.guid) - return RelatedSapErpView(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -358,10 +307,10 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> SapErpView: class SapErpViewAttributes(AssetAttributes): """SapErpView-specific attributes for nested API format.""" - sap_erp_view_type: Union[str, None, UnsetType] = UNSET + sap_type: Union[str, None, UnsetType] = UNSET """Type of the SAP ERP View.""" - sap_erp_view_definition: Union[str, None, UnsetType] = UNSET + sap_definition: Union[str, None, UnsetType] = UNSET """Specifies the definition of the SAP ERP View""" sap_technical_name: Union[str, None, UnsetType] = UNSET @@ -438,6 +387,11 @@ class SapErpViewRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -530,6 +484,7 @@ class SapErpViewNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -554,8 +509,8 @@ class SapErpViewNested(AssetNested): def _populate_sap_erp_view_attrs(attrs: SapErpViewAttributes, obj: SapErpView) -> None: """Populate SapErpView-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sap_erp_view_type = obj.sap_erp_view_type - attrs.sap_erp_view_definition = obj.sap_erp_view_definition + attrs.sap_type = obj.sap_type + attrs.sap_definition = obj.sap_definition attrs.sap_technical_name = obj.sap_technical_name attrs.sap_logical_name = obj.sap_logical_name attrs.sap_package_name = obj.sap_package_name @@ -569,8 +524,8 @@ def _populate_sap_erp_view_attrs(attrs: SapErpViewAttributes, obj: SapErpView) - def _extract_sap_erp_view_attrs(attrs: SapErpViewAttributes) -> dict: """Extract all SapErpView attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sap_erp_view_type"] = attrs.sap_erp_view_type - result["sap_erp_view_definition"] = attrs.sap_erp_view_definition + result["sap_type"] = attrs.sap_type + result["sap_definition"] = attrs.sap_definition result["sap_technical_name"] = attrs.sap_technical_name result["sap_logical_name"] = attrs.sap_logical_name result["sap_package_name"] = attrs.sap_package_name @@ -615,9 +570,6 @@ def _sap_erp_view_to_nested(sap_erp_view: SapErpView) -> SapErpViewNested: is_incomplete=sap_erp_view.is_incomplete, provenance_type=sap_erp_view.provenance_type, home_id=sap_erp_view.home_id, - depth=sap_erp_view.depth, - immediate_upstream=sap_erp_view.immediate_upstream, - immediate_downstream=sap_erp_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -649,6 +601,7 @@ def _sap_erp_view_from_nested(nested: SapErpViewNested) -> SapErpView: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -657,9 +610,6 @@ def _sap_erp_view_from_nested(nested: SapErpViewNested) -> SapErpView: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sap_erp_view_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -686,10 +636,8 @@ def _sap_erp_view_from_nested_bytes(data: bytes, serde: Serde) -> SapErpView: RelationField, ) -SapErpView.SAP_ERP_VIEW_TYPE = KeywordField("sapErpViewType", "sapErpViewType") -SapErpView.SAP_ERP_VIEW_DEFINITION = KeywordField( - "sapErpViewDefinition", "sapErpViewDefinition" -) +SapErpView.SAP_TYPE = KeywordField("sapType", "sapType") +SapErpView.SAP_DEFINITION = KeywordField("sapDefinition", "sapDefinition") SapErpView.SAP_TECHNICAL_NAME = KeywordField("sapTechnicalName", "sapTechnicalName") SapErpView.SAP_LOGICAL_NAME = KeywordField("sapLogicalName", "sapLogicalName") SapErpView.SAP_PACKAGE_NAME = KeywordField("sapPackageName", "sapPackageName") @@ -714,6 +662,9 @@ def _sap_erp_view_from_nested_bytes(data: bytes, serde: Serde) -> SapErpView: SapErpView.METRICS = RelationField("metrics") SapErpView.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SapErpView.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SapErpView.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SapErpView.MEANINGS = RelationField("meanings") SapErpView.MC_MONITORS = RelationField("mcMonitors") SapErpView.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sap_related.py b/pyatlan_v9/model/assets/sap_related.py index d6cb164bd..d7b6d070a 100644 --- a/pyatlan_v9/model/assets/sap_related.py +++ b/pyatlan_v9/model/assets/sap_related.py @@ -92,10 +92,10 @@ class RelatedSapErpView(RelatedSAP): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SapErpView" so it serializes correctly - sap_erp_view_type: Union[str, None, UnsetType] = UNSET + sap_type: Union[str, None, UnsetType] = UNSET """Type of the SAP ERP View.""" - sap_erp_view_definition: Union[str, None, UnsetType] = UNSET + sap_definition: Union[str, None, UnsetType] = UNSET """Specifies the definition of the SAP ERP View""" def __post_init__(self) -> None: @@ -113,13 +113,13 @@ class RelatedSapErpCdsView(RelatedSAP): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SapErpCdsView" so it serializes correctly - sap_erp_cds_view_technical_name: Union[str, None, UnsetType] = UNSET + sap_technical_name: Union[str, None, UnsetType] = UNSET """The technical database view name of the SAP ERP CDS View.""" - sap_erp_cds_view_source_name: Union[str, None, UnsetType] = UNSET + sap_source_name: Union[str, None, UnsetType] = UNSET """The source name of the SAP ERP CDS View Definition.""" - sap_erp_cds_view_source_type: Union[str, None, UnsetType] = UNSET + sap_source_type: Union[str, None, UnsetType] = UNSET """The source type of the SAP ERP CDS View Definition.""" def __post_init__(self) -> None: @@ -137,25 +137,25 @@ class RelatedSapErpColumn(RelatedSAP): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SapErpColumn" so it serializes correctly - sap_erp_column_data_element: Union[str, None, UnsetType] = UNSET + sap_data_element: Union[str, None, UnsetType] = UNSET """Represents the SAP ERP data element, providing semantic information about the column.""" - sap_erp_column_logical_data_type: Union[str, None, UnsetType] = UNSET + sap_logical_data_type: Union[str, None, UnsetType] = UNSET """Specifies the logical data type of values in this SAP ERP column""" - sap_erp_column_length: Union[str, None, UnsetType] = UNSET + sap_length: Union[str, None, UnsetType] = UNSET """Indicates the maximum length of the values that the SAP ERP column can store.""" - sap_erp_column_decimals: Union[str, None, UnsetType] = UNSET + sap_decimals: Union[str, None, UnsetType] = UNSET """Defines the number of decimal places allowed for numeric values in the SAP ERP column.""" - sap_erp_column_is_primary: Union[bool, None, UnsetType] = UNSET + sap_is_primary: Union[bool, None, UnsetType] = UNSET """When true, this column is the primary key for the SAP ERP table or view.""" - sap_erp_column_is_foreign: Union[bool, None, UnsetType] = UNSET + sap_is_foreign: Union[bool, None, UnsetType] = UNSET """When true, this column is the foreign key for the SAP ERP table or view.""" - sap_erp_column_is_mandatory: Union[bool, None, UnsetType] = UNSET + sap_is_mandatory: Union[bool, None, UnsetType] = UNSET """When true, the values in this column can be null.""" sap_erp_table_name: Union[str, None, UnsetType] = UNSET @@ -176,10 +176,10 @@ class RelatedSapErpColumn(RelatedSAP): sap_erp_cds_view_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the SAP ERP CDS view in which this column asset exists.""" - sap_erp_column_check_table_name: Union[str, None, UnsetType] = UNSET + sap_check_table_name: Union[str, None, UnsetType] = UNSET """Defines the SAP ERP table name used as a foreign key reference to validate permissible values for this column.""" - sap_erp_column_check_table_qualified_name: Union[str, None, UnsetType] = UNSET + sap_check_table_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the SAP ERP Table used as a foreign key reference to validate permissible values for this column.""" def __post_init__(self) -> None: @@ -212,7 +212,7 @@ class RelatedSapErpFunctionModule(RelatedSAP): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SapErpFunctionModule" so it serializes correctly - sap_erp_function_module_group: Union[str, None, UnsetType] = UNSET + sap_group: Union[str, None, UnsetType] = UNSET """Represents the group to which the SAP ERP function module belongs.""" sap_erp_function_module_import_params: Union[ @@ -220,7 +220,7 @@ class RelatedSapErpFunctionModule(RelatedSAP): ] = UNSET """Parameters imported by the SAP ERP function module, defined as key-value pairs.""" - sap_erp_function_module_import_params_count: Union[int, None, UnsetType] = UNSET + sap_import_params_count: Union[int, None, UnsetType] = UNSET """Represents the total number of Import Parameters in a given SAP ERP Function Module.""" sap_erp_function_module_export_params: Union[ @@ -228,7 +228,7 @@ class RelatedSapErpFunctionModule(RelatedSAP): ] = UNSET """Parameters exported by the SAP ERP function module, defined as key-value pairs.""" - sap_erp_function_module_export_params_count: Union[int, None, UnsetType] = UNSET + sap_export_params_count: Union[int, None, UnsetType] = UNSET """Represents the total number of Export Parameters in a given SAP ERP Function Module.""" sap_erp_function_exception_list: Union[List[Dict[str, str]], None, UnsetType] = ( diff --git a/pyatlan_v9/model/assets/schema.py b/pyatlan_v9/model/assets/schema.py index 8c2947bdb..9b1bf4d40 100644 --- a/pyatlan_v9/model/assets/schema.py +++ b/pyatlan_v9/model/assets/schema.py @@ -50,6 +50,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -80,7 +81,6 @@ RelatedFunction, RelatedMaterialisedView, RelatedProcedure, - RelatedSchema, RelatedTable, RelatedView, ) @@ -97,7 +97,7 @@ class Schema(Asset): """ TABLE_COUNT: ClassVar[Any] = None - SCHEMA_EXTERNAL_LOCATION: ClassVar[Any] = None + SQL_EXTERNAL_LOCATION: ClassVar[Any] = None VIEWS_COUNT: ClassVar[Any] = None LINKED_SCHEMA_QUALIFIED_NAME: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None @@ -147,6 +147,7 @@ class Schema(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -182,10 +183,12 @@ class Schema(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Schema" + table_count: Union[int, None, UnsetType] = UNSET """Number of tables in this schema.""" - schema_external_location: Union[str, None, UnsetType] = UNSET + sql_external_location: Union[str, None, UnsetType] = UNSET """External location of this schema, for example: an S3 object location.""" views_count: Union[int, None, UnsetType] = UNSET @@ -345,6 +348,11 @@ class Schema(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -474,76 +482,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Schema instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.database is UNSET: - errors.append("database is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"Schema validation failed: {errors}") - - def minimize(self) -> "Schema": - """ - Return a minimal copy of this Schema with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Schema with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Schema instance with only the minimum required fields. - """ - self.validate() - return Schema(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSchema": - """ - Create a :class:`RelatedSchema` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSchema reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSchema(guid=self.guid) - return RelatedSchema(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -694,7 +632,7 @@ class SchemaAttributes(AssetAttributes): table_count: Union[int, None, UnsetType] = UNSET """Number of tables in this schema.""" - schema_external_location: Union[str, None, UnsetType] = UNSET + sql_external_location: Union[str, None, UnsetType] = UNSET """External location of this schema, for example: an S3 object location.""" views_count: Union[int, None, UnsetType] = UNSET @@ -858,6 +796,11 @@ class SchemaRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1020,6 +963,7 @@ class SchemaNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1061,7 +1005,7 @@ def _populate_schema__attrs(attrs: SchemaAttributes, obj: Schema) -> None: """Populate Schema-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) attrs.table_count = obj.table_count - attrs.schema_external_location = obj.schema_external_location + attrs.sql_external_location = obj.sql_external_location attrs.views_count = obj.views_count attrs.linked_schema_qualified_name = obj.linked_schema_qualified_name attrs.query_count = obj.query_count @@ -1099,7 +1043,7 @@ def _extract_schema__attrs(attrs: SchemaAttributes) -> dict: """Extract all Schema attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) result["table_count"] = attrs.table_count - result["schema_external_location"] = attrs.schema_external_location + result["sql_external_location"] = attrs.sql_external_location result["views_count"] = attrs.views_count result["linked_schema_qualified_name"] = attrs.linked_schema_qualified_name result["query_count"] = attrs.query_count @@ -1173,9 +1117,6 @@ def _schema__to_nested(schema_: Schema) -> SchemaNested: is_incomplete=schema_.is_incomplete, provenance_type=schema_.provenance_type, home_id=schema_.home_id, - depth=schema_.depth, - immediate_upstream=schema_.immediate_upstream, - immediate_downstream=schema_.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1205,6 +1146,7 @@ def _schema__from_nested(nested: SchemaNested) -> Schema: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1213,9 +1155,6 @@ def _schema__from_nested(nested: SchemaNested) -> Schema: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_schema__attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1244,8 +1183,8 @@ def _schema__from_nested_bytes(data: bytes, serde: Serde) -> Schema: ) Schema.TABLE_COUNT = NumericField("tableCount", "tableCount") -Schema.SCHEMA_EXTERNAL_LOCATION = KeywordField( - "schemaExternalLocation", "schemaExternalLocation" +Schema.SQL_EXTERNAL_LOCATION = KeywordField( + "sqlExternalLocation", "sqlExternalLocation" ) Schema.VIEWS_COUNT = NumericField("viewsCount", "viewsCount") Schema.LINKED_SCHEMA_QUALIFIED_NAME = KeywordField( @@ -1321,6 +1260,9 @@ def _schema__from_nested_bytes(data: bytes, serde: Serde) -> Schema: Schema.DBT_SOURCES = RelationField("dbtSources") Schema.SQL_DBT_SOURCES = RelationField("sqlDBTSources") Schema.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +Schema.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Schema.MEANINGS = RelationField("meanings") Schema.MC_MONITORS = RelationField("mcMonitors") Schema.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/schema_registry.py b/pyatlan_v9/model/assets/schema_registry.py index 0147ab887..44efe4456 100644 --- a/pyatlan_v9/model/assets/schema_registry.py +++ b/pyatlan_v9/model/assets/schema_registry.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -47,7 +48,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .schema_registry_related import RelatedSchemaRegistry, RelatedSchemaRegistrySubject +from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -79,6 +80,7 @@ class SchemaRegistry(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -96,6 +98,8 @@ class SchemaRegistry(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SchemaRegistry" + schema_registry_schema_type: Union[str, None, UnsetType] = UNSET """Type of language or specification used to define the schema, for example: JSON, Protobuf, etc.""" @@ -151,6 +155,11 @@ class SchemaRegistry(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -206,66 +215,6 @@ class SchemaRegistry(Asset): def __post_init__(self) -> None: self.type_name = "SchemaRegistry" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SchemaRegistry instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SchemaRegistry validation failed: {errors}") - - def minimize(self) -> "SchemaRegistry": - """ - Return a minimal copy of this SchemaRegistry with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SchemaRegistry with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SchemaRegistry instance with only the minimum required fields. - """ - self.validate() - return SchemaRegistry(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSchemaRegistry": - """ - Create a :class:`RelatedSchemaRegistry` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSchemaRegistry reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSchemaRegistry(guid=self.guid) - return RelatedSchemaRegistry(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -380,6 +329,11 @@ class SchemaRegistryRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -468,6 +422,7 @@ class SchemaRegistryNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -541,9 +496,6 @@ def _schema_registry_to_nested(schema_registry: SchemaRegistry) -> SchemaRegistr is_incomplete=schema_registry.is_incomplete, provenance_type=schema_registry.provenance_type, home_id=schema_registry.home_id, - depth=schema_registry.depth, - immediate_upstream=schema_registry.immediate_upstream, - immediate_downstream=schema_registry.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -577,6 +529,7 @@ def _schema_registry_from_nested(nested: SchemaRegistryNested) -> SchemaRegistry updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -585,9 +538,6 @@ def _schema_registry_from_nested(nested: SchemaRegistryNested) -> SchemaRegistry is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_schema_registry_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -639,6 +589,9 @@ def _schema_registry_from_nested_bytes(data: bytes, serde: Serde) -> SchemaRegis SchemaRegistry.METRICS = RelationField("metrics") SchemaRegistry.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SchemaRegistry.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SchemaRegistry.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SchemaRegistry.MEANINGS = RelationField("meanings") SchemaRegistry.MC_MONITORS = RelationField("mcMonitors") SchemaRegistry.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/schema_registry_subject.py b/pyatlan_v9/model/assets/schema_registry_subject.py index 84ccf1efe..bb9cfa7f4 100644 --- a/pyatlan_v9/model/assets/schema_registry_subject.py +++ b/pyatlan_v9/model/assets/schema_registry_subject.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -89,6 +90,7 @@ class SchemaRegistrySubject(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -108,6 +110,8 @@ class SchemaRegistrySubject(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SchemaRegistrySubject" + schema_registry_subject_base_name: Union[str, None, UnsetType] = UNSET """Base name of the subject, without -key, -value prefixes.""" @@ -185,6 +189,11 @@ class SchemaRegistrySubject(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -248,66 +257,6 @@ class SchemaRegistrySubject(Asset): def __post_init__(self) -> None: self.type_name = "SchemaRegistrySubject" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SchemaRegistrySubject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SchemaRegistrySubject validation failed: {errors}") - - def minimize(self) -> "SchemaRegistrySubject": - """ - Return a minimal copy of this SchemaRegistrySubject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SchemaRegistrySubject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SchemaRegistrySubject instance with only the minimum required fields. - """ - self.validate() - return SchemaRegistrySubject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSchemaRegistrySubject": - """ - Create a :class:`RelatedSchemaRegistrySubject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSchemaRegistrySubject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSchemaRegistrySubject(guid=self.guid) - return RelatedSchemaRegistrySubject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -446,6 +395,11 @@ class SchemaRegistrySubjectRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -542,6 +496,7 @@ class SchemaRegistrySubjectNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -655,9 +610,6 @@ def _schema_registry_subject_to_nested( is_incomplete=schema_registry_subject.is_incomplete, provenance_type=schema_registry_subject.provenance_type, home_id=schema_registry_subject.home_id, - depth=schema_registry_subject.depth, - immediate_upstream=schema_registry_subject.immediate_upstream, - immediate_downstream=schema_registry_subject.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -693,6 +645,7 @@ def _schema_registry_subject_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -701,9 +654,6 @@ def _schema_registry_subject_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_schema_registry_subject_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -793,6 +743,9 @@ def _schema_registry_subject_from_nested_bytes( SchemaRegistrySubject.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SchemaRegistrySubject.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SchemaRegistrySubject.MEANINGS = RelationField("meanings") SchemaRegistrySubject.MC_MONITORS = RelationField("mcMonitors") SchemaRegistrySubject.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/schema_registry_version.py b/pyatlan_v9/model/assets/schema_registry_version.py index 37523ee4d..72fb254c5 100644 --- a/pyatlan_v9/model/assets/schema_registry_version.py +++ b/pyatlan_v9/model/assets/schema_registry_version.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,10 +49,7 @@ from .process_related import RelatedProcess from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme -from .schema_registry_related import ( - RelatedSchemaRegistrySubject, - RelatedSchemaRegistryVersion, -) +from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -87,6 +85,7 @@ class SchemaRegistryVersion(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +104,8 @@ class SchemaRegistryVersion(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SchemaRegistryVersion" + schema_registry_version_number: Union[str, None, UnsetType] = UNSET """Version number of this schema version.""" @@ -172,6 +173,11 @@ class SchemaRegistryVersion(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -238,76 +244,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SchemaRegistryVersion instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.schema_registry_subject is UNSET: - errors.append("schema_registry_subject is required for creation") - if self.schema_registry_subject_qualified_name is UNSET: - errors.append( - "schema_registry_subject_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"SchemaRegistryVersion validation failed: {errors}") - - def minimize(self) -> "SchemaRegistryVersion": - """ - Return a minimal copy of this SchemaRegistryVersion with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SchemaRegistryVersion with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SchemaRegistryVersion instance with only the minimum required fields. - """ - self.validate() - return SchemaRegistryVersion(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSchemaRegistryVersion": - """ - Create a :class:`RelatedSchemaRegistryVersion` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSchemaRegistryVersion reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSchemaRegistryVersion(guid=self.guid) - return RelatedSchemaRegistryVersion(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -436,6 +372,11 @@ class SchemaRegistryVersionRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -529,6 +470,7 @@ class SchemaRegistryVersionNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -625,9 +567,6 @@ def _schema_registry_version_to_nested( is_incomplete=schema_registry_version.is_incomplete, provenance_type=schema_registry_version.provenance_type, home_id=schema_registry_version.home_id, - depth=schema_registry_version.depth, - immediate_upstream=schema_registry_version.immediate_upstream, - immediate_downstream=schema_registry_version.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -663,6 +602,7 @@ def _schema_registry_version_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -671,9 +611,6 @@ def _schema_registry_version_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_schema_registry_version_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -751,6 +688,9 @@ def _schema_registry_version_from_nested_bytes( SchemaRegistryVersion.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SchemaRegistryVersion.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SchemaRegistryVersion.MEANINGS = RelationField("meanings") SchemaRegistryVersion.MC_MONITORS = RelationField("mcMonitors") SchemaRegistryVersion.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/semantic.py b/pyatlan_v9/model/assets/semantic.py index 7092010b1..ac4c04f1f 100644 --- a/pyatlan_v9/model/assets/semantic.py +++ b/pyatlan_v9/model/assets/semantic.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,7 +49,6 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .semantic_related import RelatedSemantic from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -78,6 +78,7 @@ class Semantic(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +96,8 @@ class Semantic(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Semantic" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -144,6 +147,11 @@ class Semantic(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -199,66 +207,6 @@ class Semantic(Asset): def __post_init__(self) -> None: self.type_name = "Semantic" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Semantic instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Semantic validation failed: {errors}") - - def minimize(self) -> "Semantic": - """ - Return a minimal copy of this Semantic with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Semantic with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Semantic instance with only the minimum required fields. - """ - self.validate() - return Semantic(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSemantic": - """ - Create a :class:`RelatedSemantic` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSemantic reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSemantic(guid=self.guid) - return RelatedSemantic(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -367,6 +315,11 @@ class SemanticRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -453,6 +406,7 @@ class SemanticNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -518,9 +472,6 @@ def _semantic_to_nested(semantic: Semantic) -> SemanticNested: is_incomplete=semantic.is_incomplete, provenance_type=semantic.provenance_type, home_id=semantic.home_id, - depth=semantic.depth, - immediate_upstream=semantic.immediate_upstream, - immediate_downstream=semantic.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -552,6 +503,7 @@ def _semantic_from_nested(nested: SemanticNested) -> Semantic: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -560,9 +512,6 @@ def _semantic_from_nested(nested: SemanticNested) -> Semantic: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_semantic_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -600,6 +549,9 @@ def _semantic_from_nested_bytes(data: bytes, serde: Serde) -> Semantic: Semantic.METRICS = RelationField("metrics") Semantic.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Semantic.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Semantic.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Semantic.MEANINGS = RelationField("meanings") Semantic.MC_MONITORS = RelationField("mcMonitors") Semantic.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/semantic_dimension.py b/pyatlan_v9/model/assets/semantic_dimension.py index df27142c2..523fcb37c 100644 --- a/pyatlan_v9/model/assets/semantic_dimension.py +++ b/pyatlan_v9/model/assets/semantic_dimension.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -49,7 +50,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .semantic_related import RelatedSemanticDimension, RelatedSemanticModel +from .semantic_related import RelatedSemanticModel from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -86,6 +87,7 @@ class SemanticDimension(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -104,6 +106,8 @@ class SemanticDimension(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SemanticDimension" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -174,6 +178,11 @@ class SemanticDimension(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -238,72 +247,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SemanticDimension instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.semantic_model is UNSET: - errors.append("semantic_model is required for creation") - if errors: - raise ValueError(f"SemanticDimension validation failed: {errors}") - - def minimize(self) -> "SemanticDimension": - """ - Return a minimal copy of this SemanticDimension with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SemanticDimension with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SemanticDimension instance with only the minimum required fields. - """ - self.validate() - return SemanticDimension(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSemanticDimension": - """ - Create a :class:`RelatedSemanticDimension` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSemanticDimension reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSemanticDimension(guid=self.guid) - return RelatedSemanticDimension(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -435,6 +378,11 @@ class SemanticDimensionRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -526,6 +474,7 @@ class SemanticDimensionNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -612,9 +561,6 @@ def _semantic_dimension_to_nested( is_incomplete=semantic_dimension.is_incomplete, provenance_type=semantic_dimension.provenance_type, home_id=semantic_dimension.home_id, - depth=semantic_dimension.depth, - immediate_upstream=semantic_dimension.immediate_upstream, - immediate_downstream=semantic_dimension.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -650,6 +596,7 @@ def _semantic_dimension_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -658,9 +605,6 @@ def _semantic_dimension_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_semantic_dimension_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -729,6 +673,9 @@ def _semantic_dimension_from_nested_bytes( SemanticDimension.METRICS = RelationField("metrics") SemanticDimension.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SemanticDimension.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SemanticDimension.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SemanticDimension.MEANINGS = RelationField("meanings") SemanticDimension.MC_MONITORS = RelationField("mcMonitors") SemanticDimension.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/semantic_entity.py b/pyatlan_v9/model/assets/semantic_entity.py index bd6ae4c13..881e31bfe 100644 --- a/pyatlan_v9/model/assets/semantic_entity.py +++ b/pyatlan_v9/model/assets/semantic_entity.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -49,7 +50,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .semantic_related import RelatedSemanticEntity, RelatedSemanticModel +from .semantic_related import RelatedSemanticModel from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -86,6 +87,7 @@ class SemanticEntity(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -104,6 +106,8 @@ class SemanticEntity(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SemanticEntity" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -174,6 +178,11 @@ class SemanticEntity(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -238,72 +247,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SemanticEntity instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.semantic_model is UNSET: - errors.append("semantic_model is required for creation") - if errors: - raise ValueError(f"SemanticEntity validation failed: {errors}") - - def minimize(self) -> "SemanticEntity": - """ - Return a minimal copy of this SemanticEntity with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SemanticEntity with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SemanticEntity instance with only the minimum required fields. - """ - self.validate() - return SemanticEntity(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSemanticEntity": - """ - Create a :class:`RelatedSemanticEntity` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSemanticEntity reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSemanticEntity(guid=self.guid) - return RelatedSemanticEntity(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -433,6 +376,11 @@ class SemanticEntityRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -524,6 +472,7 @@ class SemanticEntityNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -608,9 +557,6 @@ def _semantic_entity_to_nested(semantic_entity: SemanticEntity) -> SemanticEntit is_incomplete=semantic_entity.is_incomplete, provenance_type=semantic_entity.provenance_type, home_id=semantic_entity.home_id, - depth=semantic_entity.depth, - immediate_upstream=semantic_entity.immediate_upstream, - immediate_downstream=semantic_entity.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -644,6 +590,7 @@ def _semantic_entity_from_nested(nested: SemanticEntityNested) -> SemanticEntity updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -652,9 +599,6 @@ def _semantic_entity_from_nested(nested: SemanticEntityNested) -> SemanticEntity is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_semantic_entity_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -717,6 +661,9 @@ def _semantic_entity_from_nested_bytes(data: bytes, serde: Serde) -> SemanticEnt SemanticEntity.METRICS = RelationField("metrics") SemanticEntity.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SemanticEntity.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SemanticEntity.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SemanticEntity.MEANINGS = RelationField("meanings") SemanticEntity.MC_MONITORS = RelationField("mcMonitors") SemanticEntity.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/semantic_field.py b/pyatlan_v9/model/assets/semantic_field.py index eeceda0c2..8a6bed554 100644 --- a/pyatlan_v9/model/assets/semantic_field.py +++ b/pyatlan_v9/model/assets/semantic_field.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,7 +49,6 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .semantic_related import RelatedSemanticField from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -85,6 +85,7 @@ class SemanticField(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +103,8 @@ class SemanticField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SemanticField" + semantic_expression: Union[str, None, UnsetType] = UNSET """Column name or SQL expression for the semantic field.""" @@ -172,6 +175,11 @@ class SemanticField(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -227,66 +235,6 @@ class SemanticField(Asset): def __post_init__(self) -> None: self.type_name = "SemanticField" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SemanticField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SemanticField validation failed: {errors}") - - def minimize(self) -> "SemanticField": - """ - Return a minimal copy of this SemanticField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SemanticField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SemanticField instance with only the minimum required fields. - """ - self.validate() - return SemanticField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSemanticField": - """ - Create a :class:`RelatedSemanticField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSemanticField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSemanticField(guid=self.guid) - return RelatedSemanticField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -416,6 +364,11 @@ class SemanticFieldRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -504,6 +457,7 @@ class SemanticFieldNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -585,9 +539,6 @@ def _semantic_field_to_nested(semantic_field: SemanticField) -> SemanticFieldNes is_incomplete=semantic_field.is_incomplete, provenance_type=semantic_field.provenance_type, home_id=semantic_field.home_id, - depth=semantic_field.depth, - immediate_upstream=semantic_field.immediate_upstream, - immediate_downstream=semantic_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -621,6 +572,7 @@ def _semantic_field_from_nested(nested: SemanticFieldNested) -> SemanticField: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -629,9 +581,6 @@ def _semantic_field_from_nested(nested: SemanticFieldNested) -> SemanticField: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_semantic_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -692,6 +641,9 @@ def _semantic_field_from_nested_bytes(data: bytes, serde: Serde) -> SemanticFiel SemanticField.METRICS = RelationField("metrics") SemanticField.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SemanticField.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SemanticField.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SemanticField.MEANINGS = RelationField("meanings") SemanticField.MC_MONITORS = RelationField("mcMonitors") SemanticField.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/semantic_measure.py b/pyatlan_v9/model/assets/semantic_measure.py index ecc6e4e7f..66647a5ef 100644 --- a/pyatlan_v9/model/assets/semantic_measure.py +++ b/pyatlan_v9/model/assets/semantic_measure.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -49,7 +50,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .semantic_related import RelatedSemanticMeasure, RelatedSemanticModel +from .semantic_related import RelatedSemanticModel from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -86,6 +87,7 @@ class SemanticMeasure(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -104,6 +106,8 @@ class SemanticMeasure(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SemanticMeasure" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -174,6 +178,11 @@ class SemanticMeasure(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -238,72 +247,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SemanticMeasure instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.semantic_model is UNSET: - errors.append("semantic_model is required for creation") - if errors: - raise ValueError(f"SemanticMeasure validation failed: {errors}") - - def minimize(self) -> "SemanticMeasure": - """ - Return a minimal copy of this SemanticMeasure with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SemanticMeasure with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SemanticMeasure instance with only the minimum required fields. - """ - self.validate() - return SemanticMeasure(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSemanticMeasure": - """ - Create a :class:`RelatedSemanticMeasure` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSemanticMeasure reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSemanticMeasure(guid=self.guid) - return RelatedSemanticMeasure(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -435,6 +378,11 @@ class SemanticMeasureRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -526,6 +474,7 @@ class SemanticMeasureNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -612,9 +561,6 @@ def _semantic_measure_to_nested( is_incomplete=semantic_measure.is_incomplete, provenance_type=semantic_measure.provenance_type, home_id=semantic_measure.home_id, - depth=semantic_measure.depth, - immediate_upstream=semantic_measure.immediate_upstream, - immediate_downstream=semantic_measure.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -648,6 +594,7 @@ def _semantic_measure_from_nested(nested: SemanticMeasureNested) -> SemanticMeas updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -656,9 +603,6 @@ def _semantic_measure_from_nested(nested: SemanticMeasureNested) -> SemanticMeas is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_semantic_measure_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -723,6 +667,9 @@ def _semantic_measure_from_nested_bytes(data: bytes, serde: Serde) -> SemanticMe SemanticMeasure.METRICS = RelationField("metrics") SemanticMeasure.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SemanticMeasure.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SemanticMeasure.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SemanticMeasure.MEANINGS = RelationField("meanings") SemanticMeasure.MC_MONITORS = RelationField("mcMonitors") SemanticMeasure.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/semantic_model.py b/pyatlan_v9/model/assets/semantic_model.py index a54f60b20..43d16e13d 100644 --- a/pyatlan_v9/model/assets/semantic_model.py +++ b/pyatlan_v9/model/assets/semantic_model.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -52,7 +53,6 @@ RelatedSemanticDimension, RelatedSemanticEntity, RelatedSemanticMeasure, - RelatedSemanticModel, ) from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -83,6 +83,7 @@ class SemanticModel(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,6 +104,8 @@ class SemanticModel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SemanticModel" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -152,6 +155,11 @@ class SemanticModel(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -216,66 +224,6 @@ class SemanticModel(Asset): def __post_init__(self) -> None: self.type_name = "SemanticModel" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SemanticModel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SemanticModel validation failed: {errors}") - - def minimize(self) -> "SemanticModel": - """ - Return a minimal copy of this SemanticModel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SemanticModel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SemanticModel instance with only the minimum required fields. - """ - self.validate() - return SemanticModel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSemanticModel": - """ - Create a :class:`RelatedSemanticModel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSemanticModel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSemanticModel(guid=self.guid) - return RelatedSemanticModel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -384,6 +332,11 @@ class SemanticModelRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -481,6 +434,7 @@ class SemanticModelNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -551,9 +505,6 @@ def _semantic_model_to_nested(semantic_model: SemanticModel) -> SemanticModelNes is_incomplete=semantic_model.is_incomplete, provenance_type=semantic_model.provenance_type, home_id=semantic_model.home_id, - depth=semantic_model.depth, - immediate_upstream=semantic_model.immediate_upstream, - immediate_downstream=semantic_model.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -587,6 +538,7 @@ def _semantic_model_from_nested(nested: SemanticModelNested) -> SemanticModel: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -595,9 +547,6 @@ def _semantic_model_from_nested(nested: SemanticModelNested) -> SemanticModel: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_semantic_model_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -641,6 +590,9 @@ def _semantic_model_from_nested_bytes(data: bytes, serde: Serde) -> SemanticMode SemanticModel.METRICS = RelationField("metrics") SemanticModel.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SemanticModel.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SemanticModel.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SemanticModel.MEANINGS = RelationField("meanings") SemanticModel.MC_MONITORS = RelationField("mcMonitors") SemanticModel.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sigma.py b/pyatlan_v9/model/assets/sigma.py index 2a2c25020..ccfe76b00 100644 --- a/pyatlan_v9/model/assets/sigma.py +++ b/pyatlan_v9/model/assets/sigma.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,7 +49,6 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sigma_related import RelatedSigma from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -84,6 +84,7 @@ class Sigma(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -101,6 +102,8 @@ class Sigma(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Sigma" + sigma_workbook_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the workbook in which this asset exists.""" @@ -168,6 +171,11 @@ class Sigma(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -223,66 +231,6 @@ class Sigma(Asset): def __post_init__(self) -> None: self.type_name = "Sigma" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Sigma instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Sigma validation failed: {errors}") - - def minimize(self) -> "Sigma": - """ - Return a minimal copy of this Sigma with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Sigma with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Sigma instance with only the minimum required fields. - """ - self.validate() - return Sigma(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSigma": - """ - Create a :class:`RelatedSigma` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSigma reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSigma(guid=self.guid) - return RelatedSigma(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -409,6 +357,11 @@ class SigmaRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -495,6 +448,7 @@ class SigmaNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -574,9 +528,6 @@ def _sigma_to_nested(sigma: Sigma) -> SigmaNested: is_incomplete=sigma.is_incomplete, provenance_type=sigma.provenance_type, home_id=sigma.home_id, - depth=sigma.depth, - immediate_upstream=sigma.immediate_upstream, - immediate_downstream=sigma.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -606,6 +557,7 @@ def _sigma_from_nested(nested: SigmaNested) -> Sigma: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -614,9 +566,6 @@ def _sigma_from_nested(nested: SigmaNested) -> Sigma: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sigma_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -676,6 +625,9 @@ def _sigma_from_nested_bytes(data: bytes, serde: Serde) -> Sigma: Sigma.METRICS = RelationField("metrics") Sigma.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Sigma.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Sigma.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Sigma.MEANINGS = RelationField("meanings") Sigma.MC_MONITORS = RelationField("mcMonitors") Sigma.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sigma_data_element.py b/pyatlan_v9/model/assets/sigma_data_element.py index 065d78f3c..94a9af7f6 100644 --- a/pyatlan_v9/model/assets/sigma_data_element.py +++ b/pyatlan_v9/model/assets/sigma_data_element.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -49,11 +50,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sigma_related import ( - RelatedSigmaDataElement, - RelatedSigmaDataElementField, - RelatedSigmaPage, -) +from .sigma_related import RelatedSigmaDataElementField, RelatedSigmaPage from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -92,6 +89,7 @@ class SigmaDataElement(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -111,6 +109,8 @@ class SigmaDataElement(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SigmaDataElement" + sigma_data_element_query: Union[str, None, UnsetType] = UNSET """""" @@ -187,6 +187,11 @@ class SigmaDataElement(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -258,80 +263,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SigmaDataElement instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sigma_page is UNSET: - errors.append("sigma_page is required for creation") - if self.sigma_page_name is UNSET: - errors.append("sigma_page_name is required for creation") - if self.sigma_page_qualified_name is UNSET: - errors.append("sigma_page_qualified_name is required for creation") - if self.sigma_workbook_name is UNSET: - errors.append("sigma_workbook_name is required for creation") - if self.sigma_workbook_qualified_name is UNSET: - errors.append("sigma_workbook_qualified_name is required for creation") - if errors: - raise ValueError(f"SigmaDataElement validation failed: {errors}") - - def minimize(self) -> "SigmaDataElement": - """ - Return a minimal copy of this SigmaDataElement with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SigmaDataElement with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SigmaDataElement instance with only the minimum required fields. - """ - self.validate() - return SigmaDataElement(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSigmaDataElement": - """ - Create a :class:`RelatedSigmaDataElement` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSigmaDataElement reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSigmaDataElement(guid=self.guid) - return RelatedSigmaDataElement(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -469,6 +400,11 @@ class SigmaDataElementRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -565,6 +501,7 @@ class SigmaDataElementNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -658,9 +595,6 @@ def _sigma_data_element_to_nested( is_incomplete=sigma_data_element.is_incomplete, provenance_type=sigma_data_element.provenance_type, home_id=sigma_data_element.home_id, - depth=sigma_data_element.depth, - immediate_upstream=sigma_data_element.immediate_upstream, - immediate_downstream=sigma_data_element.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -694,6 +628,7 @@ def _sigma_data_element_from_nested(nested: SigmaDataElementNested) -> SigmaData updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -702,9 +637,6 @@ def _sigma_data_element_from_nested(nested: SigmaDataElementNested) -> SigmaData is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sigma_data_element_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -786,6 +718,9 @@ def _sigma_data_element_from_nested_bytes( SigmaDataElement.METRICS = RelationField("metrics") SigmaDataElement.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SigmaDataElement.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SigmaDataElement.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SigmaDataElement.MEANINGS = RelationField("meanings") SigmaDataElement.MC_MONITORS = RelationField("mcMonitors") SigmaDataElement.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sigma_data_element_field.py b/pyatlan_v9/model/assets/sigma_data_element_field.py index d561a8bf6..a5f747265 100644 --- a/pyatlan_v9/model/assets/sigma_data_element_field.py +++ b/pyatlan_v9/model/assets/sigma_data_element_field.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -49,7 +50,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sigma_related import RelatedSigmaDataElement, RelatedSigmaDataElementField +from .sigma_related import RelatedSigmaDataElement from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -64,7 +65,7 @@ class SigmaDataElementField(Asset): Instance of a Sigma data element field in Atlan. """ - SIGMA_DATA_ELEMENT_FIELD_IS_HIDDEN: ClassVar[Any] = None + SIGMA_IS_HIDDEN: ClassVar[Any] = None SIGMA_DATA_ELEMENT_FIELD_FORMULA: ClassVar[Any] = None SIGMA_WORKBOOK_QUALIFIED_NAME: ClassVar[Any] = None SIGMA_WORKBOOK_NAME: ClassVar[Any] = None @@ -87,6 +88,7 @@ class SigmaDataElementField(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,7 +107,9 @@ class SigmaDataElementField(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sigma_data_element_field_is_hidden: Union[bool, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SigmaDataElementField" + + sigma_is_hidden: Union[bool, None, UnsetType] = UNSET """Whether this field is hidden (true) or not (false).""" sigma_data_element_field_formula: Union[str, None, UnsetType] = UNSET @@ -178,6 +182,11 @@ class SigmaDataElementField(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -244,86 +253,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SigmaDataElementField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sigma_data_element is UNSET: - errors.append("sigma_data_element is required for creation") - if self.sigma_data_element_name is UNSET: - errors.append("sigma_data_element_name is required for creation") - if self.sigma_data_element_qualified_name is UNSET: - errors.append( - "sigma_data_element_qualified_name is required for creation" - ) - if self.sigma_page_name is UNSET: - errors.append("sigma_page_name is required for creation") - if self.sigma_page_qualified_name is UNSET: - errors.append("sigma_page_qualified_name is required for creation") - if self.sigma_workbook_name is UNSET: - errors.append("sigma_workbook_name is required for creation") - if self.sigma_workbook_qualified_name is UNSET: - errors.append("sigma_workbook_qualified_name is required for creation") - if errors: - raise ValueError(f"SigmaDataElementField validation failed: {errors}") - - def minimize(self) -> "SigmaDataElementField": - """ - Return a minimal copy of this SigmaDataElementField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SigmaDataElementField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SigmaDataElementField instance with only the minimum required fields. - """ - self.validate() - return SigmaDataElementField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSigmaDataElementField": - """ - Create a :class:`RelatedSigmaDataElementField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSigmaDataElementField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSigmaDataElementField(guid=self.guid) - return RelatedSigmaDataElementField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -381,7 +310,7 @@ def from_json( class SigmaDataElementFieldAttributes(AssetAttributes): """SigmaDataElementField-specific attributes for nested API format.""" - sigma_data_element_field_is_hidden: Union[bool, None, UnsetType] = UNSET + sigma_is_hidden: Union[bool, None, UnsetType] = UNSET """Whether this field is hidden (true) or not (false).""" sigma_data_element_field_formula: Union[str, None, UnsetType] = UNSET @@ -458,6 +387,11 @@ class SigmaDataElementFieldRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -549,6 +483,7 @@ class SigmaDataElementFieldNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -574,7 +509,7 @@ def _populate_sigma_data_element_field_attrs( ) -> None: """Populate SigmaDataElementField-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sigma_data_element_field_is_hidden = obj.sigma_data_element_field_is_hidden + attrs.sigma_is_hidden = obj.sigma_is_hidden attrs.sigma_data_element_field_formula = obj.sigma_data_element_field_formula attrs.sigma_workbook_qualified_name = obj.sigma_workbook_qualified_name attrs.sigma_workbook_name = obj.sigma_workbook_name @@ -590,9 +525,7 @@ def _extract_sigma_data_element_field_attrs( ) -> dict: """Extract all SigmaDataElementField attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sigma_data_element_field_is_hidden"] = ( - attrs.sigma_data_element_field_is_hidden - ) + result["sigma_is_hidden"] = attrs.sigma_is_hidden result["sigma_data_element_field_formula"] = attrs.sigma_data_element_field_formula result["sigma_workbook_qualified_name"] = attrs.sigma_workbook_qualified_name result["sigma_workbook_name"] = attrs.sigma_workbook_name @@ -643,9 +576,6 @@ def _sigma_data_element_field_to_nested( is_incomplete=sigma_data_element_field.is_incomplete, provenance_type=sigma_data_element_field.provenance_type, home_id=sigma_data_element_field.home_id, - depth=sigma_data_element_field.depth, - immediate_upstream=sigma_data_element_field.immediate_upstream, - immediate_downstream=sigma_data_element_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -681,6 +611,7 @@ def _sigma_data_element_field_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -689,9 +620,6 @@ def _sigma_data_element_field_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sigma_data_element_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -723,9 +651,7 @@ def _sigma_data_element_field_from_nested_bytes( RelationField, ) -SigmaDataElementField.SIGMA_DATA_ELEMENT_FIELD_IS_HIDDEN = BooleanField( - "sigmaDataElementFieldIsHidden", "sigmaDataElementFieldIsHidden" -) +SigmaDataElementField.SIGMA_IS_HIDDEN = BooleanField("sigmaIsHidden", "sigmaIsHidden") SigmaDataElementField.SIGMA_DATA_ELEMENT_FIELD_FORMULA = KeywordField( "sigmaDataElementFieldFormula", "sigmaDataElementFieldFormula" ) @@ -778,6 +704,9 @@ def _sigma_data_element_field_from_nested_bytes( SigmaDataElementField.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SigmaDataElementField.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SigmaDataElementField.MEANINGS = RelationField("meanings") SigmaDataElementField.MC_MONITORS = RelationField("mcMonitors") SigmaDataElementField.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sigma_dataset.py b/pyatlan_v9/model/assets/sigma_dataset.py index fe41f2b10..1ae09ca53 100644 --- a/pyatlan_v9/model/assets/sigma_dataset.py +++ b/pyatlan_v9/model/assets/sigma_dataset.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,7 +49,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sigma_related import RelatedSigmaDataset, RelatedSigmaDatasetColumn +from .sigma_related import RelatedSigmaDatasetColumn from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -63,7 +64,7 @@ class SigmaDataset(Asset): Instance of a Sigma dataset in Atlan. """ - SIGMA_DATASET_COLUMN_COUNT: ClassVar[Any] = None + SIGMA_COLUMN_COUNT: ClassVar[Any] = None SIGMA_WORKBOOK_QUALIFIED_NAME: ClassVar[Any] = None SIGMA_WORKBOOK_NAME: ClassVar[Any] = None SIGMA_PAGE_QUALIFIED_NAME: ClassVar[Any] = None @@ -85,6 +86,7 @@ class SigmaDataset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,7 +105,9 @@ class SigmaDataset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sigma_dataset_column_count: Union[int, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SigmaDataset" + + sigma_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this dataset.""" sigma_workbook_qualified_name: Union[str, None, UnsetType] = UNSET @@ -173,6 +177,11 @@ class SigmaDataset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -233,66 +242,6 @@ class SigmaDataset(Asset): def __post_init__(self) -> None: self.type_name = "SigmaDataset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SigmaDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SigmaDataset validation failed: {errors}") - - def minimize(self) -> "SigmaDataset": - """ - Return a minimal copy of this SigmaDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SigmaDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SigmaDataset instance with only the minimum required fields. - """ - self.validate() - return SigmaDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSigmaDataset": - """ - Create a :class:`RelatedSigmaDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSigmaDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSigmaDataset(guid=self.guid) - return RelatedSigmaDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -348,7 +297,7 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> SigmaDatase class SigmaDatasetAttributes(AssetAttributes): """SigmaDataset-specific attributes for nested API format.""" - sigma_dataset_column_count: Union[int, None, UnsetType] = UNSET + sigma_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this dataset.""" sigma_workbook_qualified_name: Union[str, None, UnsetType] = UNSET @@ -422,6 +371,11 @@ class SigmaDatasetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -515,6 +469,7 @@ class SigmaDatasetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -540,7 +495,7 @@ def _populate_sigma_dataset_attrs( ) -> None: """Populate SigmaDataset-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sigma_dataset_column_count = obj.sigma_dataset_column_count + attrs.sigma_column_count = obj.sigma_column_count attrs.sigma_workbook_qualified_name = obj.sigma_workbook_qualified_name attrs.sigma_workbook_name = obj.sigma_workbook_name attrs.sigma_page_qualified_name = obj.sigma_page_qualified_name @@ -553,7 +508,7 @@ def _populate_sigma_dataset_attrs( def _extract_sigma_dataset_attrs(attrs: SigmaDatasetAttributes) -> dict: """Extract all SigmaDataset attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sigma_dataset_column_count"] = attrs.sigma_dataset_column_count + result["sigma_column_count"] = attrs.sigma_column_count result["sigma_workbook_qualified_name"] = attrs.sigma_workbook_qualified_name result["sigma_workbook_name"] = attrs.sigma_workbook_name result["sigma_page_qualified_name"] = attrs.sigma_page_qualified_name @@ -599,9 +554,6 @@ def _sigma_dataset_to_nested(sigma_dataset: SigmaDataset) -> SigmaDatasetNested: is_incomplete=sigma_dataset.is_incomplete, provenance_type=sigma_dataset.provenance_type, home_id=sigma_dataset.home_id, - depth=sigma_dataset.depth, - immediate_upstream=sigma_dataset.immediate_upstream, - immediate_downstream=sigma_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -635,6 +587,7 @@ def _sigma_dataset_from_nested(nested: SigmaDatasetNested) -> SigmaDataset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -643,9 +596,6 @@ def _sigma_dataset_from_nested(nested: SigmaDatasetNested) -> SigmaDataset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sigma_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -673,9 +623,7 @@ def _sigma_dataset_from_nested_bytes(data: bytes, serde: Serde) -> SigmaDataset: RelationField, ) -SigmaDataset.SIGMA_DATASET_COLUMN_COUNT = NumericField( - "sigmaDatasetColumnCount", "sigmaDatasetColumnCount" -) +SigmaDataset.SIGMA_COLUMN_COUNT = NumericField("sigmaColumnCount", "sigmaColumnCount") SigmaDataset.SIGMA_WORKBOOK_QUALIFIED_NAME = KeywordTextField( "sigmaWorkbookQualifiedName", "sigmaWorkbookQualifiedName", @@ -715,6 +663,9 @@ def _sigma_dataset_from_nested_bytes(data: bytes, serde: Serde) -> SigmaDataset: SigmaDataset.METRICS = RelationField("metrics") SigmaDataset.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SigmaDataset.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SigmaDataset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SigmaDataset.MEANINGS = RelationField("meanings") SigmaDataset.MC_MONITORS = RelationField("mcMonitors") SigmaDataset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sigma_dataset_column.py b/pyatlan_v9/model/assets/sigma_dataset_column.py index f7de4ee9c..6783a13df 100644 --- a/pyatlan_v9/model/assets/sigma_dataset_column.py +++ b/pyatlan_v9/model/assets/sigma_dataset_column.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -49,7 +50,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sigma_related import RelatedSigmaDataset, RelatedSigmaDatasetColumn +from .sigma_related import RelatedSigmaDataset from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -87,6 +88,7 @@ class SigmaDatasetColumn(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +107,8 @@ class SigmaDatasetColumn(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SigmaDatasetColumn" + sigma_dataset_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the dataset in which this column exists.""" @@ -178,6 +182,11 @@ class SigmaDatasetColumn(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -242,76 +251,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SigmaDatasetColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sigma_dataset is UNSET: - errors.append("sigma_dataset is required for creation") - if self.sigma_dataset_name is UNSET: - errors.append("sigma_dataset_name is required for creation") - if self.sigma_dataset_qualified_name is UNSET: - errors.append("sigma_dataset_qualified_name is required for creation") - if errors: - raise ValueError(f"SigmaDatasetColumn validation failed: {errors}") - - def minimize(self) -> "SigmaDatasetColumn": - """ - Return a minimal copy of this SigmaDatasetColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SigmaDatasetColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SigmaDatasetColumn instance with only the minimum required fields. - """ - self.validate() - return SigmaDatasetColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSigmaDatasetColumn": - """ - Create a :class:`RelatedSigmaDatasetColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSigmaDatasetColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSigmaDatasetColumn(guid=self.guid) - return RelatedSigmaDatasetColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -446,6 +385,11 @@ class SigmaDatasetColumnRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -537,6 +481,7 @@ class SigmaDatasetColumnNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -627,9 +572,6 @@ def _sigma_dataset_column_to_nested( is_incomplete=sigma_dataset_column.is_incomplete, provenance_type=sigma_dataset_column.provenance_type, home_id=sigma_dataset_column.home_id, - depth=sigma_dataset_column.depth, - immediate_upstream=sigma_dataset_column.immediate_upstream, - immediate_downstream=sigma_dataset_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -665,6 +607,7 @@ def _sigma_dataset_column_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -673,9 +616,6 @@ def _sigma_dataset_column_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sigma_dataset_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -757,6 +697,9 @@ def _sigma_dataset_column_from_nested_bytes( SigmaDatasetColumn.METRICS = RelationField("metrics") SigmaDatasetColumn.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SigmaDatasetColumn.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SigmaDatasetColumn.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SigmaDatasetColumn.MEANINGS = RelationField("meanings") SigmaDatasetColumn.MC_MONITORS = RelationField("mcMonitors") SigmaDatasetColumn.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sigma_page.py b/pyatlan_v9/model/assets/sigma_page.py index 00d2c9878..787e6e727 100644 --- a/pyatlan_v9/model/assets/sigma_page.py +++ b/pyatlan_v9/model/assets/sigma_page.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -49,11 +50,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sigma_related import ( - RelatedSigmaDataElement, - RelatedSigmaPage, - RelatedSigmaWorkbook, -) +from .sigma_related import RelatedSigmaDataElement, RelatedSigmaWorkbook from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -90,6 +87,7 @@ class SigmaPage(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -109,6 +107,8 @@ class SigmaPage(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SigmaPage" + sigma_data_element_count: Union[int, None, UnsetType] = UNSET """Number of data elements on this page.""" @@ -179,6 +179,11 @@ class SigmaPage(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -246,76 +251,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SigmaPage instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sigma_workbook is UNSET: - errors.append("sigma_workbook is required for creation") - if self.sigma_workbook_name is UNSET: - errors.append("sigma_workbook_name is required for creation") - if self.sigma_workbook_qualified_name is UNSET: - errors.append("sigma_workbook_qualified_name is required for creation") - if errors: - raise ValueError(f"SigmaPage validation failed: {errors}") - - def minimize(self) -> "SigmaPage": - """ - Return a minimal copy of this SigmaPage with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SigmaPage with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SigmaPage instance with only the minimum required fields. - """ - self.validate() - return SigmaPage(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSigmaPage": - """ - Create a :class:`RelatedSigmaPage` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSigmaPage reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSigmaPage(guid=self.guid) - return RelatedSigmaPage(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -445,6 +380,11 @@ class SigmaPageRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -537,6 +477,7 @@ class SigmaPageNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -620,9 +561,6 @@ def _sigma_page_to_nested(sigma_page: SigmaPage) -> SigmaPageNested: is_incomplete=sigma_page.is_incomplete, provenance_type=sigma_page.provenance_type, home_id=sigma_page.home_id, - depth=sigma_page.depth, - immediate_upstream=sigma_page.immediate_upstream, - immediate_downstream=sigma_page.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -654,6 +592,7 @@ def _sigma_page_from_nested(nested: SigmaPageNested) -> SigmaPage: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -662,9 +601,6 @@ def _sigma_page_from_nested(nested: SigmaPageNested) -> SigmaPage: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sigma_page_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -730,6 +666,9 @@ def _sigma_page_from_nested_bytes(data: bytes, serde: Serde) -> SigmaPage: SigmaPage.METRICS = RelationField("metrics") SigmaPage.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SigmaPage.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SigmaPage.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SigmaPage.MEANINGS = RelationField("meanings") SigmaPage.MC_MONITORS = RelationField("mcMonitors") SigmaPage.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sigma_related.py b/pyatlan_v9/model/assets/sigma_related.py index 57a58a497..62c3e8fca 100644 --- a/pyatlan_v9/model/assets/sigma_related.py +++ b/pyatlan_v9/model/assets/sigma_related.py @@ -96,7 +96,7 @@ class RelatedSigmaDataElementField(RelatedSigma): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SigmaDataElementField" so it serializes correctly - sigma_data_element_field_is_hidden: Union[bool, None, UnsetType] = UNSET + sigma_is_hidden: Union[bool, None, UnsetType] = UNSET """Whether this field is hidden (true) or not (false).""" sigma_data_element_field_formula: Union[str, None, UnsetType] = UNSET @@ -117,7 +117,7 @@ class RelatedSigmaDataset(RelatedSigma): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SigmaDataset" so it serializes correctly - sigma_dataset_column_count: Union[int, None, UnsetType] = UNSET + sigma_column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this dataset.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/sigma_workbook.py b/pyatlan_v9/model/assets/sigma_workbook.py index 4081c2826..3246447af 100644 --- a/pyatlan_v9/model/assets/sigma_workbook.py +++ b/pyatlan_v9/model/assets/sigma_workbook.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,7 +49,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sigma_related import RelatedSigmaPage, RelatedSigmaWorkbook +from .sigma_related import RelatedSigmaPage from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -85,6 +86,7 @@ class SigmaWorkbook(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,6 +105,8 @@ class SigmaWorkbook(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SigmaWorkbook" + sigma_page_count: Union[int, None, UnsetType] = UNSET """Number of pages in this workbook.""" @@ -173,6 +177,11 @@ class SigmaWorkbook(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -231,66 +240,6 @@ class SigmaWorkbook(Asset): def __post_init__(self) -> None: self.type_name = "SigmaWorkbook" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SigmaWorkbook instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SigmaWorkbook validation failed: {errors}") - - def minimize(self) -> "SigmaWorkbook": - """ - Return a minimal copy of this SigmaWorkbook with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SigmaWorkbook with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SigmaWorkbook instance with only the minimum required fields. - """ - self.validate() - return SigmaWorkbook(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSigmaWorkbook": - """ - Create a :class:`RelatedSigmaWorkbook` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSigmaWorkbook reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSigmaWorkbook(guid=self.guid) - return RelatedSigmaWorkbook(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -420,6 +369,11 @@ class SigmaWorkbookRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -511,6 +465,7 @@ class SigmaWorkbookNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -595,9 +550,6 @@ def _sigma_workbook_to_nested(sigma_workbook: SigmaWorkbook) -> SigmaWorkbookNes is_incomplete=sigma_workbook.is_incomplete, provenance_type=sigma_workbook.provenance_type, home_id=sigma_workbook.home_id, - depth=sigma_workbook.depth, - immediate_upstream=sigma_workbook.immediate_upstream, - immediate_downstream=sigma_workbook.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -631,6 +583,7 @@ def _sigma_workbook_from_nested(nested: SigmaWorkbookNested) -> SigmaWorkbook: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -639,9 +592,6 @@ def _sigma_workbook_from_nested(nested: SigmaWorkbookNested) -> SigmaWorkbook: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sigma_workbook_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -711,6 +661,9 @@ def _sigma_workbook_from_nested_bytes(data: bytes, serde: Serde) -> SigmaWorkboo SigmaWorkbook.METRICS = RelationField("metrics") SigmaWorkbook.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SigmaWorkbook.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SigmaWorkbook.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SigmaWorkbook.MEANINGS = RelationField("meanings") SigmaWorkbook.MC_MONITORS = RelationField("mcMonitors") SigmaWorkbook.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sisense.py b/pyatlan_v9/model/assets/sisense.py index abe9c12a1..dbd7fa1c9 100644 --- a/pyatlan_v9/model/assets/sisense.py +++ b/pyatlan_v9/model/assets/sisense.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,7 +49,6 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sisense_related import RelatedSisense from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -78,6 +78,7 @@ class Sisense(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +96,8 @@ class Sisense(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Sisense" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -144,6 +147,11 @@ class Sisense(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -199,66 +207,6 @@ class Sisense(Asset): def __post_init__(self) -> None: self.type_name = "Sisense" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Sisense instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Sisense validation failed: {errors}") - - def minimize(self) -> "Sisense": - """ - Return a minimal copy of this Sisense with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Sisense with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Sisense instance with only the minimum required fields. - """ - self.validate() - return Sisense(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSisense": - """ - Create a :class:`RelatedSisense` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSisense reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSisense(guid=self.guid) - return RelatedSisense(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -367,6 +315,11 @@ class SisenseRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -453,6 +406,7 @@ class SisenseNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -518,9 +472,6 @@ def _sisense_to_nested(sisense: Sisense) -> SisenseNested: is_incomplete=sisense.is_incomplete, provenance_type=sisense.provenance_type, home_id=sisense.home_id, - depth=sisense.depth, - immediate_upstream=sisense.immediate_upstream, - immediate_downstream=sisense.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -550,6 +501,7 @@ def _sisense_from_nested(nested: SisenseNested) -> Sisense: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -558,9 +510,6 @@ def _sisense_from_nested(nested: SisenseNested) -> Sisense: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sisense_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -598,6 +547,9 @@ def _sisense_from_nested_bytes(data: bytes, serde: Serde) -> Sisense: Sisense.METRICS = RelationField("metrics") Sisense.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Sisense.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Sisense.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Sisense.MEANINGS = RelationField("meanings") Sisense.MC_MONITORS = RelationField("mcMonitors") Sisense.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sisense_dashboard.py b/pyatlan_v9/model/assets/sisense_dashboard.py index 14f56fa30..554df1c54 100644 --- a/pyatlan_v9/model/assets/sisense_dashboard.py +++ b/pyatlan_v9/model/assets/sisense_dashboard.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,6 @@ from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .sisense_related import ( - RelatedSisenseDashboard, RelatedSisenseDatamodel, RelatedSisenseFolder, RelatedSisenseWidget, @@ -70,7 +70,7 @@ class SisenseDashboard(Asset): """ SISENSE_DASHBOARD_FOLDER_QUALIFIED_NAME: ClassVar[Any] = None - SISENSE_DASHBOARD_WIDGET_COUNT: ClassVar[Any] = None + SISENSE_WIDGET_COUNT: ClassVar[Any] = None CATALOG_DATASET_GUID: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None @@ -86,6 +86,7 @@ class SisenseDashboard(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -106,10 +107,12 @@ class SisenseDashboard(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SisenseDashboard" + sisense_dashboard_folder_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the folder in which this dashboard exists.""" - sisense_dashboard_widget_count: Union[int, None, UnsetType] = UNSET + sisense_widget_count: Union[int, None, UnsetType] = UNSET """Number of widgets in this dashboard.""" catalog_dataset_guid: Union[str, None, UnsetType] = UNSET @@ -161,6 +164,11 @@ class SisenseDashboard(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -231,72 +239,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SisenseDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sisense_datamodels is UNSET: - errors.append("sisense_datamodels is required for creation") - if errors: - raise ValueError(f"SisenseDashboard validation failed: {errors}") - - def minimize(self) -> "SisenseDashboard": - """ - Return a minimal copy of this SisenseDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SisenseDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SisenseDashboard instance with only the minimum required fields. - """ - self.validate() - return SisenseDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSisenseDashboard": - """ - Create a :class:`RelatedSisenseDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSisenseDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSisenseDashboard(guid=self.guid) - return RelatedSisenseDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -357,7 +299,7 @@ class SisenseDashboardAttributes(AssetAttributes): sisense_dashboard_folder_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the folder in which this dashboard exists.""" - sisense_dashboard_widget_count: Union[int, None, UnsetType] = UNSET + sisense_widget_count: Union[int, None, UnsetType] = UNSET """Number of widgets in this dashboard.""" catalog_dataset_guid: Union[str, None, UnsetType] = UNSET @@ -413,6 +355,11 @@ class SisenseDashboardRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -510,6 +457,7 @@ class SisenseDashboardNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -540,7 +488,7 @@ def _populate_sisense_dashboard_attrs( attrs.sisense_dashboard_folder_qualified_name = ( obj.sisense_dashboard_folder_qualified_name ) - attrs.sisense_dashboard_widget_count = obj.sisense_dashboard_widget_count + attrs.sisense_widget_count = obj.sisense_widget_count attrs.catalog_dataset_guid = obj.catalog_dataset_guid @@ -550,7 +498,7 @@ def _extract_sisense_dashboard_attrs(attrs: SisenseDashboardAttributes) -> dict: result["sisense_dashboard_folder_qualified_name"] = ( attrs.sisense_dashboard_folder_qualified_name ) - result["sisense_dashboard_widget_count"] = attrs.sisense_dashboard_widget_count + result["sisense_widget_count"] = attrs.sisense_widget_count result["catalog_dataset_guid"] = attrs.catalog_dataset_guid return result @@ -592,9 +540,6 @@ def _sisense_dashboard_to_nested( is_incomplete=sisense_dashboard.is_incomplete, provenance_type=sisense_dashboard.provenance_type, home_id=sisense_dashboard.home_id, - depth=sisense_dashboard.depth, - immediate_upstream=sisense_dashboard.immediate_upstream, - immediate_downstream=sisense_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -628,6 +573,7 @@ def _sisense_dashboard_from_nested(nested: SisenseDashboardNested) -> SisenseDas updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -636,9 +582,6 @@ def _sisense_dashboard_from_nested(nested: SisenseDashboardNested) -> SisenseDas is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sisense_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -673,8 +616,8 @@ def _sisense_dashboard_from_nested_bytes(data: bytes, serde: Serde) -> SisenseDa "sisenseDashboardFolderQualifiedName", "sisenseDashboardFolderQualifiedName.text", ) -SisenseDashboard.SISENSE_DASHBOARD_WIDGET_COUNT = NumericField( - "sisenseDashboardWidgetCount", "sisenseDashboardWidgetCount" +SisenseDashboard.SISENSE_WIDGET_COUNT = NumericField( + "sisenseWidgetCount", "sisenseWidgetCount" ) SisenseDashboard.CATALOG_DATASET_GUID = KeywordField( "catalogDatasetGuid", "catalogDatasetGuid" @@ -697,6 +640,9 @@ def _sisense_dashboard_from_nested_bytes(data: bytes, serde: Serde) -> SisenseDa SisenseDashboard.METRICS = RelationField("metrics") SisenseDashboard.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SisenseDashboard.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SisenseDashboard.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SisenseDashboard.MEANINGS = RelationField("meanings") SisenseDashboard.MC_MONITORS = RelationField("mcMonitors") SisenseDashboard.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sisense_datamodel.py b/pyatlan_v9/model/assets/sisense_datamodel.py index d6f77fbd8..4a6295842 100644 --- a/pyatlan_v9/model/assets/sisense_datamodel.py +++ b/pyatlan_v9/model/assets/sisense_datamodel.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,11 +49,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sisense_related import ( - RelatedSisenseDashboard, - RelatedSisenseDatamodel, - RelatedSisenseDatamodelTable, -) +from .sisense_related import RelatedSisenseDashboard, RelatedSisenseDatamodelTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -67,12 +64,12 @@ class SisenseDatamodel(Asset): Instance of a Sisense datamodel in Atlan. These group tables together that you can use to build dashboards. """ - SISENSE_DATAMODEL_TABLE_COUNT: ClassVar[Any] = None + SISENSE_TABLE_COUNT: ClassVar[Any] = None SISENSE_DATAMODEL_SERVER: ClassVar[Any] = None - SISENSE_DATAMODEL_REVISION: ClassVar[Any] = None - SISENSE_DATAMODEL_LAST_BUILD_TIME: ClassVar[Any] = None - SISENSE_DATAMODEL_LAST_SUCCESSFUL_BUILD_TIME: ClassVar[Any] = None - SISENSE_DATAMODEL_LAST_PUBLISH_TIME: ClassVar[Any] = None + SISENSE_REVISION: ClassVar[Any] = None + SISENSE_LAST_BUILD_TIME: ClassVar[Any] = None + SISENSE_LAST_SUCCESSFUL_BUILD_TIME: ClassVar[Any] = None + SISENSE_LAST_PUBLISH_TIME: ClassVar[Any] = None SISENSE_DATAMODEL_TYPE: ClassVar[Any] = None SISENSE_DATAMODEL_RELATION_TYPE: ClassVar[Any] = None CATALOG_DATASET_GUID: ClassVar[Any] = None @@ -90,6 +87,7 @@ class SisenseDatamodel(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -109,22 +107,24 @@ class SisenseDatamodel(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sisense_datamodel_table_count: Union[int, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SisenseDatamodel" + + sisense_table_count: Union[int, None, UnsetType] = UNSET """Number of tables in this datamodel.""" sisense_datamodel_server: Union[str, None, UnsetType] = UNSET """Hostname of the server on which this datamodel was created.""" - sisense_datamodel_revision: Union[str, None, UnsetType] = UNSET + sisense_revision: Union[str, None, UnsetType] = UNSET """Revision of this datamodel.""" - sisense_datamodel_last_build_time: Union[int, None, UnsetType] = UNSET + sisense_last_build_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last built, in milliseconds.""" - sisense_datamodel_last_successful_build_time: Union[int, None, UnsetType] = UNSET + sisense_last_successful_build_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last built successfully, in milliseconds.""" - sisense_datamodel_last_publish_time: Union[int, None, UnsetType] = UNSET + sisense_last_publish_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last published, in milliseconds.""" sisense_datamodel_type: Union[str, None, UnsetType] = UNSET @@ -182,6 +182,11 @@ class SisenseDatamodel(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -245,66 +250,6 @@ class SisenseDatamodel(Asset): def __post_init__(self) -> None: self.type_name = "SisenseDatamodel" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SisenseDatamodel instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SisenseDatamodel validation failed: {errors}") - - def minimize(self) -> "SisenseDatamodel": - """ - Return a minimal copy of this SisenseDatamodel with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SisenseDatamodel with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SisenseDatamodel instance with only the minimum required fields. - """ - self.validate() - return SisenseDatamodel(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSisenseDatamodel": - """ - Create a :class:`RelatedSisenseDatamodel` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSisenseDatamodel reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSisenseDatamodel(guid=self.guid) - return RelatedSisenseDatamodel(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -362,22 +307,22 @@ def from_json( class SisenseDatamodelAttributes(AssetAttributes): """SisenseDatamodel-specific attributes for nested API format.""" - sisense_datamodel_table_count: Union[int, None, UnsetType] = UNSET + sisense_table_count: Union[int, None, UnsetType] = UNSET """Number of tables in this datamodel.""" sisense_datamodel_server: Union[str, None, UnsetType] = UNSET """Hostname of the server on which this datamodel was created.""" - sisense_datamodel_revision: Union[str, None, UnsetType] = UNSET + sisense_revision: Union[str, None, UnsetType] = UNSET """Revision of this datamodel.""" - sisense_datamodel_last_build_time: Union[int, None, UnsetType] = UNSET + sisense_last_build_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last built, in milliseconds.""" - sisense_datamodel_last_successful_build_time: Union[int, None, UnsetType] = UNSET + sisense_last_successful_build_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last built successfully, in milliseconds.""" - sisense_datamodel_last_publish_time: Union[int, None, UnsetType] = UNSET + sisense_last_publish_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last published, in milliseconds.""" sisense_datamodel_type: Union[str, None, UnsetType] = UNSET @@ -439,6 +384,11 @@ class SisenseDatamodelRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -535,6 +485,7 @@ class SisenseDatamodelNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -561,14 +512,12 @@ def _populate_sisense_datamodel_attrs( ) -> None: """Populate SisenseDatamodel-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sisense_datamodel_table_count = obj.sisense_datamodel_table_count + attrs.sisense_table_count = obj.sisense_table_count attrs.sisense_datamodel_server = obj.sisense_datamodel_server - attrs.sisense_datamodel_revision = obj.sisense_datamodel_revision - attrs.sisense_datamodel_last_build_time = obj.sisense_datamodel_last_build_time - attrs.sisense_datamodel_last_successful_build_time = ( - obj.sisense_datamodel_last_successful_build_time - ) - attrs.sisense_datamodel_last_publish_time = obj.sisense_datamodel_last_publish_time + attrs.sisense_revision = obj.sisense_revision + attrs.sisense_last_build_time = obj.sisense_last_build_time + attrs.sisense_last_successful_build_time = obj.sisense_last_successful_build_time + attrs.sisense_last_publish_time = obj.sisense_last_publish_time attrs.sisense_datamodel_type = obj.sisense_datamodel_type attrs.sisense_datamodel_relation_type = obj.sisense_datamodel_relation_type attrs.catalog_dataset_guid = obj.catalog_dataset_guid @@ -577,18 +526,14 @@ def _populate_sisense_datamodel_attrs( def _extract_sisense_datamodel_attrs(attrs: SisenseDatamodelAttributes) -> dict: """Extract all SisenseDatamodel attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sisense_datamodel_table_count"] = attrs.sisense_datamodel_table_count + result["sisense_table_count"] = attrs.sisense_table_count result["sisense_datamodel_server"] = attrs.sisense_datamodel_server - result["sisense_datamodel_revision"] = attrs.sisense_datamodel_revision - result["sisense_datamodel_last_build_time"] = ( - attrs.sisense_datamodel_last_build_time - ) - result["sisense_datamodel_last_successful_build_time"] = ( - attrs.sisense_datamodel_last_successful_build_time - ) - result["sisense_datamodel_last_publish_time"] = ( - attrs.sisense_datamodel_last_publish_time + result["sisense_revision"] = attrs.sisense_revision + result["sisense_last_build_time"] = attrs.sisense_last_build_time + result["sisense_last_successful_build_time"] = ( + attrs.sisense_last_successful_build_time ) + result["sisense_last_publish_time"] = attrs.sisense_last_publish_time result["sisense_datamodel_type"] = attrs.sisense_datamodel_type result["sisense_datamodel_relation_type"] = attrs.sisense_datamodel_relation_type result["catalog_dataset_guid"] = attrs.catalog_dataset_guid @@ -632,9 +577,6 @@ def _sisense_datamodel_to_nested( is_incomplete=sisense_datamodel.is_incomplete, provenance_type=sisense_datamodel.provenance_type, home_id=sisense_datamodel.home_id, - depth=sisense_datamodel.depth, - immediate_upstream=sisense_datamodel.immediate_upstream, - immediate_downstream=sisense_datamodel.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -668,6 +610,7 @@ def _sisense_datamodel_from_nested(nested: SisenseDatamodelNested) -> SisenseDat updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -676,9 +619,6 @@ def _sisense_datamodel_from_nested(nested: SisenseDatamodelNested) -> SisenseDat is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sisense_datamodel_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -707,23 +647,21 @@ def _sisense_datamodel_from_nested_bytes(data: bytes, serde: Serde) -> SisenseDa RelationField, ) -SisenseDatamodel.SISENSE_DATAMODEL_TABLE_COUNT = NumericField( - "sisenseDatamodelTableCount", "sisenseDatamodelTableCount" +SisenseDatamodel.SISENSE_TABLE_COUNT = NumericField( + "sisenseTableCount", "sisenseTableCount" ) SisenseDatamodel.SISENSE_DATAMODEL_SERVER = KeywordField( "sisenseDatamodelServer", "sisenseDatamodelServer" ) -SisenseDatamodel.SISENSE_DATAMODEL_REVISION = KeywordField( - "sisenseDatamodelRevision", "sisenseDatamodelRevision" +SisenseDatamodel.SISENSE_REVISION = KeywordField("sisenseRevision", "sisenseRevision") +SisenseDatamodel.SISENSE_LAST_BUILD_TIME = NumericField( + "sisenseLastBuildTime", "sisenseLastBuildTime" ) -SisenseDatamodel.SISENSE_DATAMODEL_LAST_BUILD_TIME = NumericField( - "sisenseDatamodelLastBuildTime", "sisenseDatamodelLastBuildTime" +SisenseDatamodel.SISENSE_LAST_SUCCESSFUL_BUILD_TIME = NumericField( + "sisenseLastSuccessfulBuildTime", "sisenseLastSuccessfulBuildTime" ) -SisenseDatamodel.SISENSE_DATAMODEL_LAST_SUCCESSFUL_BUILD_TIME = NumericField( - "sisenseDatamodelLastSuccessfulBuildTime", "sisenseDatamodelLastSuccessfulBuildTime" -) -SisenseDatamodel.SISENSE_DATAMODEL_LAST_PUBLISH_TIME = NumericField( - "sisenseDatamodelLastPublishTime", "sisenseDatamodelLastPublishTime" +SisenseDatamodel.SISENSE_LAST_PUBLISH_TIME = NumericField( + "sisenseLastPublishTime", "sisenseLastPublishTime" ) SisenseDatamodel.SISENSE_DATAMODEL_TYPE = KeywordField( "sisenseDatamodelType", "sisenseDatamodelType" @@ -752,6 +690,9 @@ def _sisense_datamodel_from_nested_bytes(data: bytes, serde: Serde) -> SisenseDa SisenseDatamodel.METRICS = RelationField("metrics") SisenseDatamodel.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SisenseDatamodel.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SisenseDatamodel.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SisenseDatamodel.MEANINGS = RelationField("meanings") SisenseDatamodel.MC_MONITORS = RelationField("mcMonitors") SisenseDatamodel.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sisense_datamodel_table.py b/pyatlan_v9/model/assets/sisense_datamodel_table.py index 8b82117f5..914b6c513 100644 --- a/pyatlan_v9/model/assets/sisense_datamodel_table.py +++ b/pyatlan_v9/model/assets/sisense_datamodel_table.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -49,11 +50,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .sisense_related import ( - RelatedSisenseDatamodel, - RelatedSisenseDatamodelTable, - RelatedSisenseWidget, -) +from .sisense_related import RelatedSisenseDatamodel, RelatedSisenseWidget from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -69,13 +66,13 @@ class SisenseDatamodelTable(Asset): """ SISENSE_DATAMODEL_QUALIFIED_NAME: ClassVar[Any] = None - SISENSE_DATAMODEL_TABLE_COLUMN_COUNT: ClassVar[Any] = None - SISENSE_DATAMODEL_TABLE_TYPE: ClassVar[Any] = None + SISENSE_COLUMN_COUNT: ClassVar[Any] = None + SISENSE_TYPE: ClassVar[Any] = None SISENSE_DATAMODEL_TABLE_EXPRESSION: ClassVar[Any] = None - SISENSE_DATAMODEL_TABLE_IS_MATERIALIZED: ClassVar[Any] = None - SISENSE_DATAMODEL_TABLE_IS_HIDDEN: ClassVar[Any] = None - SISENSE_DATAMODEL_TABLE_SCHEDULE: ClassVar[Any] = None - SISENSE_DATAMODEL_TABLE_LIVE_QUERY_SETTINGS: ClassVar[Any] = None + SISENSE_IS_MATERIALIZED: ClassVar[Any] = None + SISENSE_IS_HIDDEN: ClassVar[Any] = None + SISENSE_SCHEDULE: ClassVar[Any] = None + SISENSE_LIVE_QUERY_SETTINGS: ClassVar[Any] = None CATALOG_DATASET_GUID: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None @@ -91,6 +88,7 @@ class SisenseDatamodelTable(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -110,28 +108,30 @@ class SisenseDatamodelTable(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SisenseDatamodelTable" + sisense_datamodel_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the datamodel in which this datamodel table exists.""" - sisense_datamodel_table_column_count: Union[int, None, UnsetType] = UNSET + sisense_column_count: Union[int, None, UnsetType] = UNSET """Number of columns present in this datamodel table.""" - sisense_datamodel_table_type: Union[str, None, UnsetType] = UNSET + sisense_type: Union[str, None, UnsetType] = UNSET """Type of this datamodel table, for example: 'base' for regular tables, 'custom' for SQL expression-based tables.""" sisense_datamodel_table_expression: Union[str, None, UnsetType] = UNSET """SQL expression of this datamodel table.""" - sisense_datamodel_table_is_materialized: Union[bool, None, UnsetType] = UNSET + sisense_is_materialized: Union[bool, None, UnsetType] = UNSET """Whether this datamodel table is materialised (true) or not (false).""" - sisense_datamodel_table_is_hidden: Union[bool, None, UnsetType] = UNSET + sisense_is_hidden: Union[bool, None, UnsetType] = UNSET """Whether this datamodel table is hidden in Sisense (true) or not (false).""" - sisense_datamodel_table_schedule: Union[str, None, UnsetType] = UNSET + sisense_schedule: Union[str, None, UnsetType] = UNSET """JSON specifying the refresh schedule of this datamodel table.""" - sisense_datamodel_table_live_query_settings: Union[str, None, UnsetType] = UNSET + sisense_live_query_settings: Union[str, None, UnsetType] = UNSET """JSON specifying the LiveQuery settings of this datamodel table.""" catalog_dataset_guid: Union[str, None, UnsetType] = UNSET @@ -183,6 +183,11 @@ class SisenseDatamodelTable(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -250,76 +255,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SisenseDatamodelTable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sisense_datamodel is UNSET: - errors.append("sisense_datamodel is required for creation") - if self.sisense_datamodel_qualified_name is UNSET: - errors.append( - "sisense_datamodel_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"SisenseDatamodelTable validation failed: {errors}") - - def minimize(self) -> "SisenseDatamodelTable": - """ - Return a minimal copy of this SisenseDatamodelTable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SisenseDatamodelTable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SisenseDatamodelTable instance with only the minimum required fields. - """ - self.validate() - return SisenseDatamodelTable(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSisenseDatamodelTable": - """ - Create a :class:`RelatedSisenseDatamodelTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSisenseDatamodelTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSisenseDatamodelTable(guid=self.guid) - return RelatedSisenseDatamodelTable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -380,25 +315,25 @@ class SisenseDatamodelTableAttributes(AssetAttributes): sisense_datamodel_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the datamodel in which this datamodel table exists.""" - sisense_datamodel_table_column_count: Union[int, None, UnsetType] = UNSET + sisense_column_count: Union[int, None, UnsetType] = UNSET """Number of columns present in this datamodel table.""" - sisense_datamodel_table_type: Union[str, None, UnsetType] = UNSET + sisense_type: Union[str, None, UnsetType] = UNSET """Type of this datamodel table, for example: 'base' for regular tables, 'custom' for SQL expression-based tables.""" sisense_datamodel_table_expression: Union[str, None, UnsetType] = UNSET """SQL expression of this datamodel table.""" - sisense_datamodel_table_is_materialized: Union[bool, None, UnsetType] = UNSET + sisense_is_materialized: Union[bool, None, UnsetType] = UNSET """Whether this datamodel table is materialised (true) or not (false).""" - sisense_datamodel_table_is_hidden: Union[bool, None, UnsetType] = UNSET + sisense_is_hidden: Union[bool, None, UnsetType] = UNSET """Whether this datamodel table is hidden in Sisense (true) or not (false).""" - sisense_datamodel_table_schedule: Union[str, None, UnsetType] = UNSET + sisense_schedule: Union[str, None, UnsetType] = UNSET """JSON specifying the refresh schedule of this datamodel table.""" - sisense_datamodel_table_live_query_settings: Union[str, None, UnsetType] = UNSET + sisense_live_query_settings: Union[str, None, UnsetType] = UNSET """JSON specifying the LiveQuery settings of this datamodel table.""" catalog_dataset_guid: Union[str, None, UnsetType] = UNSET @@ -454,6 +389,11 @@ class SisenseDatamodelTableRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -548,6 +488,7 @@ class SisenseDatamodelTableNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -575,19 +516,13 @@ def _populate_sisense_datamodel_table_attrs( """Populate SisenseDatamodelTable-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) attrs.sisense_datamodel_qualified_name = obj.sisense_datamodel_qualified_name - attrs.sisense_datamodel_table_column_count = ( - obj.sisense_datamodel_table_column_count - ) - attrs.sisense_datamodel_table_type = obj.sisense_datamodel_table_type + attrs.sisense_column_count = obj.sisense_column_count + attrs.sisense_type = obj.sisense_type attrs.sisense_datamodel_table_expression = obj.sisense_datamodel_table_expression - attrs.sisense_datamodel_table_is_materialized = ( - obj.sisense_datamodel_table_is_materialized - ) - attrs.sisense_datamodel_table_is_hidden = obj.sisense_datamodel_table_is_hidden - attrs.sisense_datamodel_table_schedule = obj.sisense_datamodel_table_schedule - attrs.sisense_datamodel_table_live_query_settings = ( - obj.sisense_datamodel_table_live_query_settings - ) + attrs.sisense_is_materialized = obj.sisense_is_materialized + attrs.sisense_is_hidden = obj.sisense_is_hidden + attrs.sisense_schedule = obj.sisense_schedule + attrs.sisense_live_query_settings = obj.sisense_live_query_settings attrs.catalog_dataset_guid = obj.catalog_dataset_guid @@ -597,23 +532,15 @@ def _extract_sisense_datamodel_table_attrs( """Extract all SisenseDatamodelTable attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) result["sisense_datamodel_qualified_name"] = attrs.sisense_datamodel_qualified_name - result["sisense_datamodel_table_column_count"] = ( - attrs.sisense_datamodel_table_column_count - ) - result["sisense_datamodel_table_type"] = attrs.sisense_datamodel_table_type + result["sisense_column_count"] = attrs.sisense_column_count + result["sisense_type"] = attrs.sisense_type result["sisense_datamodel_table_expression"] = ( attrs.sisense_datamodel_table_expression ) - result["sisense_datamodel_table_is_materialized"] = ( - attrs.sisense_datamodel_table_is_materialized - ) - result["sisense_datamodel_table_is_hidden"] = ( - attrs.sisense_datamodel_table_is_hidden - ) - result["sisense_datamodel_table_schedule"] = attrs.sisense_datamodel_table_schedule - result["sisense_datamodel_table_live_query_settings"] = ( - attrs.sisense_datamodel_table_live_query_settings - ) + result["sisense_is_materialized"] = attrs.sisense_is_materialized + result["sisense_is_hidden"] = attrs.sisense_is_hidden + result["sisense_schedule"] = attrs.sisense_schedule + result["sisense_live_query_settings"] = attrs.sisense_live_query_settings result["catalog_dataset_guid"] = attrs.catalog_dataset_guid return result @@ -655,9 +582,6 @@ def _sisense_datamodel_table_to_nested( is_incomplete=sisense_datamodel_table.is_incomplete, provenance_type=sisense_datamodel_table.provenance_type, home_id=sisense_datamodel_table.home_id, - depth=sisense_datamodel_table.depth, - immediate_upstream=sisense_datamodel_table.immediate_upstream, - immediate_downstream=sisense_datamodel_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -693,6 +617,7 @@ def _sisense_datamodel_table_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -701,9 +626,6 @@ def _sisense_datamodel_table_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sisense_datamodel_table_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -741,26 +663,24 @@ def _sisense_datamodel_table_from_nested_bytes( "sisenseDatamodelQualifiedName", "sisenseDatamodelQualifiedName.text", ) -SisenseDatamodelTable.SISENSE_DATAMODEL_TABLE_COLUMN_COUNT = NumericField( - "sisenseDatamodelTableColumnCount", "sisenseDatamodelTableColumnCount" -) -SisenseDatamodelTable.SISENSE_DATAMODEL_TABLE_TYPE = KeywordField( - "sisenseDatamodelTableType", "sisenseDatamodelTableType" +SisenseDatamodelTable.SISENSE_COLUMN_COUNT = NumericField( + "sisenseColumnCount", "sisenseColumnCount" ) +SisenseDatamodelTable.SISENSE_TYPE = KeywordField("sisenseType", "sisenseType") SisenseDatamodelTable.SISENSE_DATAMODEL_TABLE_EXPRESSION = KeywordField( "sisenseDatamodelTableExpression", "sisenseDatamodelTableExpression" ) -SisenseDatamodelTable.SISENSE_DATAMODEL_TABLE_IS_MATERIALIZED = BooleanField( - "sisenseDatamodelTableIsMaterialized", "sisenseDatamodelTableIsMaterialized" +SisenseDatamodelTable.SISENSE_IS_MATERIALIZED = BooleanField( + "sisenseIsMaterialized", "sisenseIsMaterialized" ) -SisenseDatamodelTable.SISENSE_DATAMODEL_TABLE_IS_HIDDEN = BooleanField( - "sisenseDatamodelTableIsHidden", "sisenseDatamodelTableIsHidden" +SisenseDatamodelTable.SISENSE_IS_HIDDEN = BooleanField( + "sisenseIsHidden", "sisenseIsHidden" ) -SisenseDatamodelTable.SISENSE_DATAMODEL_TABLE_SCHEDULE = KeywordField( - "sisenseDatamodelTableSchedule", "sisenseDatamodelTableSchedule" +SisenseDatamodelTable.SISENSE_SCHEDULE = KeywordField( + "sisenseSchedule", "sisenseSchedule" ) -SisenseDatamodelTable.SISENSE_DATAMODEL_TABLE_LIVE_QUERY_SETTINGS = KeywordField( - "sisenseDatamodelTableLiveQuerySettings", "sisenseDatamodelTableLiveQuerySettings" +SisenseDatamodelTable.SISENSE_LIVE_QUERY_SETTINGS = KeywordField( + "sisenseLiveQuerySettings", "sisenseLiveQuerySettings" ) SisenseDatamodelTable.CATALOG_DATASET_GUID = KeywordField( "catalogDatasetGuid", "catalogDatasetGuid" @@ -791,6 +711,9 @@ def _sisense_datamodel_table_from_nested_bytes( SisenseDatamodelTable.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SisenseDatamodelTable.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SisenseDatamodelTable.MEANINGS = RelationField("meanings") SisenseDatamodelTable.MC_MONITORS = RelationField("mcMonitors") SisenseDatamodelTable.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sisense_folder.py b/pyatlan_v9/model/assets/sisense_folder.py index 5b2a8a44c..c83835d6f 100644 --- a/pyatlan_v9/model/assets/sisense_folder.py +++ b/pyatlan_v9/model/assets/sisense_folder.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -84,6 +85,7 @@ class SisenseFolder(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +107,8 @@ class SisenseFolder(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SisenseFolder" + sisense_folder_parent_folder_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the parent folder in which this folder exists.""" @@ -157,6 +161,11 @@ class SisenseFolder(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -230,70 +239,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SisenseFolder instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"SisenseFolder validation failed: {errors}") - - def minimize(self) -> "SisenseFolder": - """ - Return a minimal copy of this SisenseFolder with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SisenseFolder with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SisenseFolder instance with only the minimum required fields. - """ - self.validate() - return SisenseFolder(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSisenseFolder": - """ - Create a :class:`RelatedSisenseFolder` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSisenseFolder reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSisenseFolder(guid=self.guid) - return RelatedSisenseFolder(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -405,6 +350,11 @@ class SisenseFolderRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -505,6 +455,7 @@ class SisenseFolderNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -582,9 +533,6 @@ def _sisense_folder_to_nested(sisense_folder: SisenseFolder) -> SisenseFolderNes is_incomplete=sisense_folder.is_incomplete, provenance_type=sisense_folder.provenance_type, home_id=sisense_folder.home_id, - depth=sisense_folder.depth, - immediate_upstream=sisense_folder.immediate_upstream, - immediate_downstream=sisense_folder.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -618,6 +566,7 @@ def _sisense_folder_from_nested(nested: SisenseFolderNested) -> SisenseFolder: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -626,9 +575,6 @@ def _sisense_folder_from_nested(nested: SisenseFolderNested) -> SisenseFolder: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sisense_folder_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -681,6 +627,9 @@ def _sisense_folder_from_nested_bytes(data: bytes, serde: Serde) -> SisenseFolde SisenseFolder.METRICS = RelationField("metrics") SisenseFolder.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SisenseFolder.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SisenseFolder.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SisenseFolder.MEANINGS = RelationField("meanings") SisenseFolder.MC_MONITORS = RelationField("mcMonitors") SisenseFolder.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sisense_related.py b/pyatlan_v9/model/assets/sisense_related.py index aa86c502b..7c64a5103 100644 --- a/pyatlan_v9/model/assets/sisense_related.py +++ b/pyatlan_v9/model/assets/sisense_related.py @@ -56,7 +56,7 @@ class RelatedSisenseDashboard(RelatedSisense): sisense_dashboard_folder_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the folder in which this dashboard exists.""" - sisense_dashboard_widget_count: Union[int, None, UnsetType] = UNSET + sisense_widget_count: Union[int, None, UnsetType] = UNSET """Number of widgets in this dashboard.""" def __post_init__(self) -> None: @@ -74,22 +74,22 @@ class RelatedSisenseDatamodel(RelatedSisense): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SisenseDatamodel" so it serializes correctly - sisense_datamodel_table_count: Union[int, None, UnsetType] = UNSET + sisense_table_count: Union[int, None, UnsetType] = UNSET """Number of tables in this datamodel.""" sisense_datamodel_server: Union[str, None, UnsetType] = UNSET """Hostname of the server on which this datamodel was created.""" - sisense_datamodel_revision: Union[str, None, UnsetType] = UNSET + sisense_revision: Union[str, None, UnsetType] = UNSET """Revision of this datamodel.""" - sisense_datamodel_last_build_time: Union[int, None, UnsetType] = UNSET + sisense_last_build_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last built, in milliseconds.""" - sisense_datamodel_last_successful_build_time: Union[int, None, UnsetType] = UNSET + sisense_last_successful_build_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last built successfully, in milliseconds.""" - sisense_datamodel_last_publish_time: Union[int, None, UnsetType] = UNSET + sisense_last_publish_time: Union[int, None, UnsetType] = UNSET """Time (epoch) when this datamodel was last published, in milliseconds.""" sisense_datamodel_type: Union[str, None, UnsetType] = UNSET @@ -116,25 +116,25 @@ class RelatedSisenseDatamodelTable(RelatedSisense): sisense_datamodel_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the datamodel in which this datamodel table exists.""" - sisense_datamodel_table_column_count: Union[int, None, UnsetType] = UNSET + sisense_column_count: Union[int, None, UnsetType] = UNSET """Number of columns present in this datamodel table.""" - sisense_datamodel_table_type: Union[str, None, UnsetType] = UNSET + sisense_type: Union[str, None, UnsetType] = UNSET """Type of this datamodel table, for example: 'base' for regular tables, 'custom' for SQL expression-based tables.""" sisense_datamodel_table_expression: Union[str, None, UnsetType] = UNSET """SQL expression of this datamodel table.""" - sisense_datamodel_table_is_materialized: Union[bool, None, UnsetType] = UNSET + sisense_is_materialized: Union[bool, None, UnsetType] = UNSET """Whether this datamodel table is materialised (true) or not (false).""" - sisense_datamodel_table_is_hidden: Union[bool, None, UnsetType] = UNSET + sisense_is_hidden: Union[bool, None, UnsetType] = UNSET """Whether this datamodel table is hidden in Sisense (true) or not (false).""" - sisense_datamodel_table_schedule: Union[str, None, UnsetType] = UNSET + sisense_schedule: Union[str, None, UnsetType] = UNSET """JSON specifying the refresh schedule of this datamodel table.""" - sisense_datamodel_table_live_query_settings: Union[str, None, UnsetType] = UNSET + sisense_live_query_settings: Union[str, None, UnsetType] = UNSET """JSON specifying the LiveQuery settings of this datamodel table.""" def __post_init__(self) -> None: @@ -170,13 +170,13 @@ class RelatedSisenseWidget(RelatedSisense): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SisenseWidget" so it serializes correctly - sisense_widget_column_count: Union[int, None, UnsetType] = UNSET + sisense_column_count: Union[int, None, UnsetType] = UNSET """Number of columns used in this widget.""" - sisense_widget_sub_type: Union[str, None, UnsetType] = UNSET + sisense_sub_type: Union[str, None, UnsetType] = UNSET """Subtype of this widget.""" - sisense_widget_size: Union[str, None, UnsetType] = UNSET + sisense_size: Union[str, None, UnsetType] = UNSET """Size of this widget.""" sisense_widget_dashboard_qualified_name: Union[str, None, UnsetType] = UNSET diff --git a/pyatlan_v9/model/assets/sisense_widget.py b/pyatlan_v9/model/assets/sisense_widget.py index 42efc6091..379ec926c 100644 --- a/pyatlan_v9/model/assets/sisense_widget.py +++ b/pyatlan_v9/model/assets/sisense_widget.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -53,7 +54,6 @@ RelatedSisenseDashboard, RelatedSisenseDatamodelTable, RelatedSisenseFolder, - RelatedSisenseWidget, ) from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob @@ -69,9 +69,9 @@ class SisenseWidget(Asset): Instance of a Sisense widget in Atlan. """ - SISENSE_WIDGET_COLUMN_COUNT: ClassVar[Any] = None - SISENSE_WIDGET_SUB_TYPE: ClassVar[Any] = None - SISENSE_WIDGET_SIZE: ClassVar[Any] = None + SISENSE_COLUMN_COUNT: ClassVar[Any] = None + SISENSE_SUB_TYPE: ClassVar[Any] = None + SISENSE_SIZE: ClassVar[Any] = None SISENSE_WIDGET_DASHBOARD_QUALIFIED_NAME: ClassVar[Any] = None SISENSE_WIDGET_FOLDER_QUALIFIED_NAME: ClassVar[Any] = None CATALOG_DATASET_GUID: ClassVar[Any] = None @@ -89,6 +89,7 @@ class SisenseWidget(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -109,13 +110,15 @@ class SisenseWidget(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - sisense_widget_column_count: Union[int, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SisenseWidget" + + sisense_column_count: Union[int, None, UnsetType] = UNSET """Number of columns used in this widget.""" - sisense_widget_sub_type: Union[str, None, UnsetType] = UNSET + sisense_sub_type: Union[str, None, UnsetType] = UNSET """Subtype of this widget.""" - sisense_widget_size: Union[str, None, UnsetType] = UNSET + sisense_size: Union[str, None, UnsetType] = UNSET """Size of this widget.""" sisense_widget_dashboard_qualified_name: Union[str, None, UnsetType] = UNSET @@ -173,6 +176,11 @@ class SisenseWidget(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -247,72 +255,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SisenseWidget instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.sisense_dashboard is UNSET: - errors.append("sisense_dashboard is required for creation") - if errors: - raise ValueError(f"SisenseWidget validation failed: {errors}") - - def minimize(self) -> "SisenseWidget": - """ - Return a minimal copy of this SisenseWidget with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SisenseWidget with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SisenseWidget instance with only the minimum required fields. - """ - self.validate() - return SisenseWidget(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSisenseWidget": - """ - Create a :class:`RelatedSisenseWidget` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSisenseWidget reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSisenseWidget(guid=self.guid) - return RelatedSisenseWidget(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -368,13 +310,13 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> SisenseWidg class SisenseWidgetAttributes(AssetAttributes): """SisenseWidget-specific attributes for nested API format.""" - sisense_widget_column_count: Union[int, None, UnsetType] = UNSET + sisense_column_count: Union[int, None, UnsetType] = UNSET """Number of columns used in this widget.""" - sisense_widget_sub_type: Union[str, None, UnsetType] = UNSET + sisense_sub_type: Union[str, None, UnsetType] = UNSET """Subtype of this widget.""" - sisense_widget_size: Union[str, None, UnsetType] = UNSET + sisense_size: Union[str, None, UnsetType] = UNSET """Size of this widget.""" sisense_widget_dashboard_qualified_name: Union[str, None, UnsetType] = UNSET @@ -436,6 +378,11 @@ class SisenseWidgetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -535,6 +482,7 @@ class SisenseWidgetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -562,9 +510,9 @@ def _populate_sisense_widget_attrs( ) -> None: """Populate SisenseWidget-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sisense_widget_column_count = obj.sisense_widget_column_count - attrs.sisense_widget_sub_type = obj.sisense_widget_sub_type - attrs.sisense_widget_size = obj.sisense_widget_size + attrs.sisense_column_count = obj.sisense_column_count + attrs.sisense_sub_type = obj.sisense_sub_type + attrs.sisense_size = obj.sisense_size attrs.sisense_widget_dashboard_qualified_name = ( obj.sisense_widget_dashboard_qualified_name ) @@ -577,9 +525,9 @@ def _populate_sisense_widget_attrs( def _extract_sisense_widget_attrs(attrs: SisenseWidgetAttributes) -> dict: """Extract all SisenseWidget attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sisense_widget_column_count"] = attrs.sisense_widget_column_count - result["sisense_widget_sub_type"] = attrs.sisense_widget_sub_type - result["sisense_widget_size"] = attrs.sisense_widget_size + result["sisense_column_count"] = attrs.sisense_column_count + result["sisense_sub_type"] = attrs.sisense_sub_type + result["sisense_size"] = attrs.sisense_size result["sisense_widget_dashboard_qualified_name"] = ( attrs.sisense_widget_dashboard_qualified_name ) @@ -623,9 +571,6 @@ def _sisense_widget_to_nested(sisense_widget: SisenseWidget) -> SisenseWidgetNes is_incomplete=sisense_widget.is_incomplete, provenance_type=sisense_widget.provenance_type, home_id=sisense_widget.home_id, - depth=sisense_widget.depth, - immediate_upstream=sisense_widget.immediate_upstream, - immediate_downstream=sisense_widget.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -659,6 +604,7 @@ def _sisense_widget_from_nested(nested: SisenseWidgetNested) -> SisenseWidget: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -667,9 +613,6 @@ def _sisense_widget_from_nested(nested: SisenseWidgetNested) -> SisenseWidget: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sisense_widget_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -699,15 +642,11 @@ def _sisense_widget_from_nested_bytes(data: bytes, serde: Serde) -> SisenseWidge RelationField, ) -SisenseWidget.SISENSE_WIDGET_COLUMN_COUNT = NumericField( - "sisenseWidgetColumnCount", "sisenseWidgetColumnCount" -) -SisenseWidget.SISENSE_WIDGET_SUB_TYPE = KeywordField( - "sisenseWidgetSubType", "sisenseWidgetSubType" -) -SisenseWidget.SISENSE_WIDGET_SIZE = KeywordField( - "sisenseWidgetSize", "sisenseWidgetSize" +SisenseWidget.SISENSE_COLUMN_COUNT = NumericField( + "sisenseColumnCount", "sisenseColumnCount" ) +SisenseWidget.SISENSE_SUB_TYPE = KeywordField("sisenseSubType", "sisenseSubType") +SisenseWidget.SISENSE_SIZE = KeywordField("sisenseSize", "sisenseSize") SisenseWidget.SISENSE_WIDGET_DASHBOARD_QUALIFIED_NAME = KeywordTextField( "sisenseWidgetDashboardQualifiedName", "sisenseWidgetDashboardQualifiedName", @@ -737,6 +676,9 @@ def _sisense_widget_from_nested_bytes(data: bytes, serde: Serde) -> SisenseWidge SisenseWidget.METRICS = RelationField("metrics") SisenseWidget.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SisenseWidget.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SisenseWidget.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SisenseWidget.MEANINGS = RelationField("meanings") SisenseWidget.MC_MONITORS = RelationField("mcMonitors") SisenseWidget.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/skill.py b/pyatlan_v9/model/assets/skill.py new file mode 100644 index 000000000..20cbbf27f --- /dev/null +++ b/pyatlan_v9/model/assets/skill.py @@ -0,0 +1,553 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# ruff: noqa: ARG002 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +Skill asset model with flattened inheritance. + +This module provides: +- Skill: Flat asset class (easy to use) +- SkillAttributes: Nested attributes struct (extends AssetAttributes) +- SkillNested: Nested API format struct +""" + +from __future__ import annotations + +from typing import Any, ClassVar, Dict, List, Set, Union + +import msgspec +from msgspec import UNSET, UnsetType + +from .airflow_related import RelatedAirflowTask +from .anomalo_related import RelatedAnomaloCheck +from .app_related import RelatedApplication, RelatedApplicationField +from .asset import ( + _ASSET_REL_FIELDS, + Asset, + AssetAttributes, + AssetNested, + AssetRelationshipAttributes, + _extract_asset_attrs, + _populate_asset_attrs, +) +from .data_contract_related import RelatedDataContract +from .data_mesh_related import RelatedDataProduct +from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType +from .gtc_related import RelatedAtlasGlossaryTerm +from .model_related import RelatedModelAttribute, RelatedModelEntity +from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor +from .partial_related import RelatedPartialField, RelatedPartialObject +from .process_related import RelatedProcess +from .referenceable_related import RelatedReferenceable +from .resource_related import RelatedFile, RelatedLink, RelatedReadme +from .schema_registry_related import RelatedSchemaRegistrySubject +from .skill_artifact_related import RelatedSkillArtifact +from .soda_related import RelatedSodaCheck +from .spark_related import RelatedSparkJob +from pyatlan_v9.model.conversion_utils import categorize_relationships, merge_relationships +from pyatlan_v9.model.serde import Serde, get_serde +from pyatlan_v9.model.transform import register_asset + +# ============================================================================= +# FLAT ASSET CLASS +# ============================================================================= + +@register_asset +class Skill(Asset): + """ + Base class for skills in Atlan. A skill is a reusable, versionable unit of capability that can be consumed by agents. + """ + + SKILL_VERSION: ClassVar[Any] = None + CATALOG_DATASET_GUID: ClassVar[Any] = None + INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None + OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None + ANOMALO_CHECKS: ClassVar[Any] = None + APPLICATION: ClassVar[Any] = None + APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None + OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + MODEL_IMPLEMENTED_ENTITIES: ClassVar[Any] = None + MODEL_IMPLEMENTED_ATTRIBUTES: ClassVar[Any] = None + METRICS: ClassVar[Any] = None + DQ_BASE_DATASET_RULES: ClassVar[Any] = None + DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None + MEANINGS: ClassVar[Any] = None + MC_MONITORS: ClassVar[Any] = None + MC_INCIDENTS: ClassVar[Any] = None + PARTIAL_CHILD_FIELDS: ClassVar[Any] = None + PARTIAL_CHILD_OBJECTS: ClassVar[Any] = None + INPUT_TO_PROCESSES: ClassVar[Any] = None + OUTPUT_FROM_PROCESSES: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_TO: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_FROM: ClassVar[Any] = None + FILES: ClassVar[Any] = None + LINKS: ClassVar[Any] = None + README: ClassVar[Any] = None + SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None + SKILL_ARTIFACTS: ClassVar[Any] = None + SODA_CHECKS: ClassVar[Any] = None + INPUT_TO_SPARK_JOBS: ClassVar[Any] = None + OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + + type_name: Union[str, UnsetType] = "Skill" + + skill_version: Union[str, None, UnsetType] = UNSET + """Version identifier for this skill.""" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + skill_artifacts: Union[List[RelatedSkillArtifact], None, UnsetType] = UNSET + """Artifacts belonging to this skill.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + def __post_init__(self) -> None: + self.type_name = "Skill" + + + + # ========================================================================= + # Optimized Serialization Methods (override Asset base class) + # ========================================================================= + + def to_json(self, nested: bool = True, serde: Serde | None = None) -> str: + """ + Convert to JSON string using optimized nested struct serialization. + + Args: + nested: If True (default), use nested API format. If False, use flat format. + serde: Optional Serde instance for encoder reuse. Uses shared singleton if None. + + Returns: + JSON string representation + """ + if serde is None: + serde = get_serde() + if nested: + return self.to_nested_bytes(serde).decode("utf-8") + else: + return serde.encode(self).decode("utf-8") + + def to_nested_bytes(self, serde: Serde | None = None) -> bytes: + """Serialize to Atlas nested-format JSON bytes (pure msgspec, no dict intermediate).""" + if serde is None: + serde = get_serde() + return _skill_to_nested_bytes(self, serde) + + @staticmethod + def from_json(json_data: str | bytes, serde: Serde | None = None) -> Skill: + """ + Create from JSON string or bytes using optimized nested struct deserialization. + + Args: + json_data: JSON string or bytes to deserialize + serde: Optional Serde instance for decoder reuse. Uses shared singleton if None. + + Returns: + Skill instance + """ + if isinstance(json_data, str): + json_data = json_data.encode("utf-8") + if serde is None: + serde = get_serde() + return _skill_from_nested_bytes(json_data, serde) + + +# ============================================================================= +# NESTED FORMAT CLASSES +# ============================================================================= + +class SkillAttributes(AssetAttributes): + """Skill-specific attributes for nested API format.""" + + skill_version: Union[str, None, UnsetType] = UNSET + """Version identifier for this skill.""" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + +class SkillRelationshipAttributes(AssetRelationshipAttributes): + """Skill-specific relationship attributes for nested API format.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + skill_artifacts: Union[List[RelatedSkillArtifact], None, UnsetType] = UNSET + """Artifacts belonging to this skill.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + +class SkillNested(AssetNested): + """Skill in nested API format for high-performance serialization.""" + + attributes: Union[SkillAttributes, UnsetType] = UNSET + relationship_attributes: Union[SkillRelationshipAttributes, UnsetType] = UNSET + append_relationship_attributes: Union[SkillRelationshipAttributes, UnsetType] = UNSET + remove_relationship_attributes: Union[SkillRelationshipAttributes, UnsetType] = UNSET + +# ============================================================================= +# CONVERSION HELPERS & CONSTANTS +# ============================================================================= + +_SKILL_REL_FIELDS: List[str] = [ + *_ASSET_REL_FIELDS, + "input_to_airflow_tasks", + "output_from_airflow_tasks", + "anomalo_checks", + "application", + "application_field", + "data_contract_latest", + "data_contract_latest_certified", + "output_port_data_products", + "input_port_data_products", + "model_implemented_entities", + "model_implemented_attributes", + "metrics", + "dq_base_dataset_rules", + "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", + "meanings", + "mc_monitors", + "mc_incidents", + "partial_child_fields", + "partial_child_objects", + "input_to_processes", + "output_from_processes", + "user_def_relationship_to", + "user_def_relationship_from", + "files", + "links", + "readme", + "schema_registry_subjects", + "skill_artifacts", + "soda_checks", + "input_to_spark_jobs", + "output_from_spark_jobs", +] + +def _populate_skill_attrs(attrs: SkillAttributes, obj: Skill) -> None: + """Populate Skill-specific attributes on the attrs struct.""" + _populate_asset_attrs(attrs, obj) + attrs.skill_version = obj.skill_version + attrs.catalog_dataset_guid = obj.catalog_dataset_guid + +def _extract_skill_attrs(attrs: SkillAttributes) -> dict: + """Extract all Skill attributes from the attrs struct into a flat dict.""" + result = _extract_asset_attrs(attrs) + result["skill_version"] = attrs.skill_version + result["catalog_dataset_guid"] = attrs.catalog_dataset_guid + return result + +# ============================================================================= +# CONVERSION FUNCTIONS +# ============================================================================= + + +def _skill_to_nested(skill: Skill) -> SkillNested: + """Convert flat Skill to nested format.""" + attrs = SkillAttributes() + _populate_skill_attrs(attrs, skill) + # Categorize relationships by save semantic (REPLACE, APPEND, REMOVE) + replace_rels, append_rels, remove_rels = categorize_relationships( + skill, _SKILL_REL_FIELDS, SkillRelationshipAttributes + ) + return SkillNested( + guid=skill.guid, + type_name=skill.type_name, + status=skill.status, + version=skill.version, + create_time=skill.create_time, + update_time=skill.update_time, + created_by=skill.created_by, + updated_by=skill.updated_by, + classifications=skill.classifications, + classification_names=skill.classification_names, + meanings=skill.meanings, + labels=skill.labels, + business_attributes=skill.business_attributes, + custom_attributes=skill.custom_attributes, + pending_tasks=skill.pending_tasks, + proxy=skill.proxy, + is_incomplete=skill.is_incomplete, + provenance_type=skill.provenance_type, + home_id=skill.home_id, + attributes=attrs, + relationship_attributes=replace_rels, + append_relationship_attributes=append_rels, + remove_relationship_attributes=remove_rels, + ) + +def _skill_from_nested(nested: SkillNested) -> Skill: + """Convert nested format to flat Skill.""" + attrs = nested.attributes if nested.attributes is not UNSET else SkillAttributes() + # Merge relationships from all three buckets + merged_rels = merge_relationships( + nested.relationship_attributes, + nested.append_relationship_attributes, + nested.remove_relationship_attributes, + _SKILL_REL_FIELDS, + SkillRelationshipAttributes + ) + return Skill( + guid=nested.guid, + type_name=nested.type_name, + status=nested.status, + version=nested.version, + create_time=nested.create_time, + update_time=nested.update_time, + created_by=nested.created_by, + updated_by=nested.updated_by, + classifications=nested.classifications, + classification_names=nested.classification_names, + meanings=nested.meanings, + labels=nested.labels, + business_attributes=nested.business_attributes, + custom_attributes=nested.custom_attributes, + pending_tasks=nested.pending_tasks, + proxy=nested.proxy, + is_incomplete=nested.is_incomplete, + provenance_type=nested.provenance_type, + home_id=nested.home_id, + **_extract_skill_attrs(attrs), + # Merged relationship attributes + **merged_rels, + ) + +def _skill_to_nested_bytes(skill: Skill, serde: Serde) -> bytes: + """Convert flat Skill to nested JSON bytes.""" + return serde.encode(_skill_to_nested(skill)) + + +def _skill_from_nested_bytes(data: bytes, serde: Serde) -> Skill: + """Convert nested JSON bytes to flat Skill.""" + nested = serde.decode(data, SkillNested) + return _skill_from_nested(nested) + +# --------------------------------------------------------------------------- +# Deferred field descriptor initialization +# --------------------------------------------------------------------------- +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + KeywordField, + RelationField, +) + +Skill.SKILL_VERSION = KeywordField("skillVersion", "skillVersion") +Skill.CATALOG_DATASET_GUID = KeywordField("catalogDatasetGuid", "catalogDatasetGuid") +Skill.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") +Skill.OUTPUT_FROM_AIRFLOW_TASKS = RelationField("outputFromAirflowTasks") +Skill.ANOMALO_CHECKS = RelationField("anomaloChecks") +Skill.APPLICATION = RelationField("application") +Skill.APPLICATION_FIELD = RelationField("applicationField") +Skill.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +Skill.DATA_CONTRACT_LATEST_CERTIFIED = RelationField("dataContractLatestCertified") +Skill.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") +Skill.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") +Skill.MODEL_IMPLEMENTED_ENTITIES = RelationField("modelImplementedEntities") +Skill.MODEL_IMPLEMENTED_ATTRIBUTES = RelationField("modelImplementedAttributes") +Skill.METRICS = RelationField("metrics") +Skill.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") +Skill.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Skill.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField("gcpDataplexAspectTypeMetadataEntities") +Skill.MEANINGS = RelationField("meanings") +Skill.MC_MONITORS = RelationField("mcMonitors") +Skill.MC_INCIDENTS = RelationField("mcIncidents") +Skill.PARTIAL_CHILD_FIELDS = RelationField("partialChildFields") +Skill.PARTIAL_CHILD_OBJECTS = RelationField("partialChildObjects") +Skill.INPUT_TO_PROCESSES = RelationField("inputToProcesses") +Skill.OUTPUT_FROM_PROCESSES = RelationField("outputFromProcesses") +Skill.USER_DEF_RELATIONSHIP_TO = RelationField("userDefRelationshipTo") +Skill.USER_DEF_RELATIONSHIP_FROM = RelationField("userDefRelationshipFrom") +Skill.FILES = RelationField("files") +Skill.LINKS = RelationField("links") +Skill.README = RelationField("readme") +Skill.SCHEMA_REGISTRY_SUBJECTS = RelationField("schemaRegistrySubjects") +Skill.SKILL_ARTIFACTS = RelationField("skillArtifacts") +Skill.SODA_CHECKS = RelationField("sodaChecks") +Skill.INPUT_TO_SPARK_JOBS = RelationField("inputToSparkJobs") +Skill.OUTPUT_FROM_SPARK_JOBS = RelationField("outputFromSparkJobs") \ No newline at end of file diff --git a/pyatlan_v9/model/assets/skill_artifact.py b/pyatlan_v9/model/assets/skill_artifact.py new file mode 100644 index 000000000..960bc1be3 --- /dev/null +++ b/pyatlan_v9/model/assets/skill_artifact.py @@ -0,0 +1,614 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# ruff: noqa: ARG002 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +SkillArtifact asset model with flattened inheritance. + +This module provides: +- SkillArtifact: Flat asset class (easy to use) +- SkillArtifactAttributes: Nested attributes struct (extends AssetAttributes) +- SkillArtifactNested: Nested API format struct +""" + +from __future__ import annotations + +from typing import Any, ClassVar, Dict, List, Set, Union + +import msgspec +from msgspec import UNSET, UnsetType + +from .airflow_related import RelatedAirflowTask +from .anomalo_related import RelatedAnomaloCheck +from .app_related import RelatedApplication, RelatedApplicationField +from .asset import ( + _ASSET_REL_FIELDS, + Asset, + AssetAttributes, + AssetNested, + AssetRelationshipAttributes, + _extract_asset_attrs, + _populate_asset_attrs, +) +from .data_contract_related import RelatedDataContract +from .data_mesh_related import RelatedDataProduct +from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType +from .gtc_related import RelatedAtlasGlossaryTerm +from .model_related import RelatedModelAttribute, RelatedModelEntity +from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor +from .partial_related import RelatedPartialField, RelatedPartialObject +from .process_related import RelatedProcess +from .referenceable_related import RelatedReferenceable +from .resource_related import RelatedFile, RelatedLink, RelatedReadme +from .schema_registry_related import RelatedSchemaRegistrySubject +from .skill_related import RelatedSkill +from .soda_related import RelatedSodaCheck +from .spark_related import RelatedSparkJob +from pyatlan_v9.model.conversion_utils import categorize_relationships, merge_relationships +from pyatlan_v9.model.serde import Serde, get_serde +from pyatlan_v9.model.transform import register_asset + +# ============================================================================= +# FLAT ASSET CLASS +# ============================================================================= + +@register_asset +class SkillArtifact(Asset): + """ + A file or data object associated with a skill. Extends Artifact for common artifact attributes (version, fileType, filePath). Linked to skills via containment relationship. + """ + + ARTIFACT_VERSION: ClassVar[Any] = None + CATALOG_DATASET_GUID: ClassVar[Any] = None + FILE_TYPE: ClassVar[Any] = None + FILE_PATH: ClassVar[Any] = None + LINK: ClassVar[Any] = None + IS_GLOBAL: ClassVar[Any] = None + REFERENCE: ClassVar[Any] = None + RESOURCE_METADATA: ClassVar[Any] = None + INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None + OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None + ANOMALO_CHECKS: ClassVar[Any] = None + APPLICATION: ClassVar[Any] = None + APPLICATION_FIELD: ClassVar[Any] = None + DATA_CONTRACT_LATEST: ClassVar[Any] = None + DATA_CONTRACT_LATEST_CERTIFIED: ClassVar[Any] = None + OUTPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + INPUT_PORT_DATA_PRODUCTS: ClassVar[Any] = None + MODEL_IMPLEMENTED_ENTITIES: ClassVar[Any] = None + MODEL_IMPLEMENTED_ATTRIBUTES: ClassVar[Any] = None + METRICS: ClassVar[Any] = None + DQ_BASE_DATASET_RULES: ClassVar[Any] = None + DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None + MEANINGS: ClassVar[Any] = None + MC_MONITORS: ClassVar[Any] = None + MC_INCIDENTS: ClassVar[Any] = None + PARTIAL_CHILD_FIELDS: ClassVar[Any] = None + PARTIAL_CHILD_OBJECTS: ClassVar[Any] = None + INPUT_TO_PROCESSES: ClassVar[Any] = None + OUTPUT_FROM_PROCESSES: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_TO: ClassVar[Any] = None + USER_DEF_RELATIONSHIP_FROM: ClassVar[Any] = None + FILES: ClassVar[Any] = None + LINKS: ClassVar[Any] = None + README: ClassVar[Any] = None + SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None + SKILL_SOURCE: ClassVar[Any] = None + SODA_CHECKS: ClassVar[Any] = None + INPUT_TO_SPARK_JOBS: ClassVar[Any] = None + OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + + type_name: Union[str, UnsetType] = "SkillArtifact" + + artifact_version: Union[str, None, UnsetType] = UNSET + """Version identifier for this artifact.""" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + + file_type: Union[str, None, UnsetType] = UNSET + """Type (extension) of the file.""" + + file_path: Union[str, None, UnsetType] = UNSET + """URL giving the online location where the file can be accessed.""" + + link: Union[str, None, UnsetType] = UNSET + """URL to the resource.""" + + is_global: Union[bool, None, UnsetType] = UNSET + """Whether the resource is global (true) or not (false).""" + + reference: Union[str, None, UnsetType] = UNSET + """Reference to the resource.""" + + resource_metadata: Union[Dict[str, str], None, UnsetType] = UNSET + """Metadata of the resource.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + skill_source: Union[RelatedSkill, None, UnsetType] = UNSET + """Skill that owns this artifact.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + def __post_init__(self) -> None: + self.type_name = "SkillArtifact" + + + + # ========================================================================= + # Optimized Serialization Methods (override Asset base class) + # ========================================================================= + + def to_json(self, nested: bool = True, serde: Serde | None = None) -> str: + """ + Convert to JSON string using optimized nested struct serialization. + + Args: + nested: If True (default), use nested API format. If False, use flat format. + serde: Optional Serde instance for encoder reuse. Uses shared singleton if None. + + Returns: + JSON string representation + """ + if serde is None: + serde = get_serde() + if nested: + return self.to_nested_bytes(serde).decode("utf-8") + else: + return serde.encode(self).decode("utf-8") + + def to_nested_bytes(self, serde: Serde | None = None) -> bytes: + """Serialize to Atlas nested-format JSON bytes (pure msgspec, no dict intermediate).""" + if serde is None: + serde = get_serde() + return _skill_artifact_to_nested_bytes(self, serde) + + @staticmethod + def from_json(json_data: str | bytes, serde: Serde | None = None) -> SkillArtifact: + """ + Create from JSON string or bytes using optimized nested struct deserialization. + + Args: + json_data: JSON string or bytes to deserialize + serde: Optional Serde instance for decoder reuse. Uses shared singleton if None. + + Returns: + SkillArtifact instance + """ + if isinstance(json_data, str): + json_data = json_data.encode("utf-8") + if serde is None: + serde = get_serde() + return _skill_artifact_from_nested_bytes(json_data, serde) + + +# ============================================================================= +# NESTED FORMAT CLASSES +# ============================================================================= + +class SkillArtifactAttributes(AssetAttributes): + """SkillArtifact-specific attributes for nested API format.""" + + artifact_version: Union[str, None, UnsetType] = UNSET + """Version identifier for this artifact.""" + + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET + """Unique identifier of the dataset this asset belongs to.""" + + file_type: Union[str, None, UnsetType] = UNSET + """Type (extension) of the file.""" + + file_path: Union[str, None, UnsetType] = UNSET + """URL giving the online location where the file can be accessed.""" + + link: Union[str, None, UnsetType] = UNSET + """URL to the resource.""" + + is_global: Union[bool, None, UnsetType] = UNSET + """Whether the resource is global (true) or not (false).""" + + reference: Union[str, None, UnsetType] = UNSET + """Reference to the resource.""" + + resource_metadata: Union[Dict[str, str], None, UnsetType] = UNSET + """Metadata of the resource.""" + +class SkillArtifactRelationshipAttributes(AssetRelationshipAttributes): + """SkillArtifact-specific relationship attributes for nested API format.""" + + input_to_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks to which this asset provides input.""" + + output_from_airflow_tasks: Union[List[RelatedAirflowTask], None, UnsetType] = UNSET + """Tasks from which this asset is output.""" + + anomalo_checks: Union[List[RelatedAnomaloCheck], None, UnsetType] = UNSET + """Checks that run on this asset.""" + + application: Union[RelatedApplication, None, UnsetType] = UNSET + """Application owning the Asset.""" + + application_field: Union[RelatedApplicationField, None, UnsetType] = UNSET + """ApplicationField owning the Asset.""" + + data_contract_latest: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest version of the data contract (in any status) for this asset.""" + + data_contract_latest_certified: Union[RelatedDataContract, None, UnsetType] = UNSET + """Latest certified version of the data contract for this asset.""" + + output_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an output port.""" + + input_port_data_products: Union[List[RelatedDataProduct], None, UnsetType] = UNSET + """Data products for which this asset is an input port.""" + + model_implemented_entities: Union[List[RelatedModelEntity], None, UnsetType] = UNSET + """Entities implemented by this asset.""" + + model_implemented_attributes: Union[List[RelatedModelAttribute], None, UnsetType] = UNSET + """Attributes implemented by this asset.""" + + metrics: Union[List[RelatedMetric], None, UnsetType] = UNSET + """""" + + dq_base_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules that are applied on this dataset.""" + + dq_reference_dataset_rules: Union[List[RelatedDataQualityRule], None, UnsetType] = UNSET + """Rules where this dataset is referenced.""" + + gcp_dataplex_aspect_type_metadata_entities: Union[List[RelatedGCPDataplexAspectType], None, UnsetType] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET + """Glossary terms that are linked to this asset.""" + + mc_monitors: Union[List[RelatedMCMonitor], None, UnsetType] = UNSET + """Monitors that observe this asset.""" + + mc_incidents: Union[List[RelatedMCIncident], None, UnsetType] = UNSET + """""" + + partial_child_fields: Union[List[RelatedPartialField], None, UnsetType] = UNSET + """Partial fields contained in the asset.""" + + partial_child_objects: Union[List[RelatedPartialObject], None, UnsetType] = UNSET + """Partial objects contained in the asset.""" + + input_to_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes to which this asset provides input.""" + + output_from_processes: Union[List[RelatedProcess], None, UnsetType] = UNSET + """Processes from which this asset is produced as output.""" + + user_def_relationship_to: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + user_def_relationship_from: Union[List[RelatedReferenceable], None, UnsetType] = UNSET + """""" + + files: Union[List[RelatedFile], None, UnsetType] = UNSET + """""" + + links: Union[List[RelatedLink], None, UnsetType] = UNSET + """Links that are attached to this asset.""" + + readme: Union[RelatedReadme, None, UnsetType] = UNSET + """README that is linked to this asset.""" + + schema_registry_subjects: Union[List[RelatedSchemaRegistrySubject], None, UnsetType] = UNSET + """Schema registry subjects associated with this asset.""" + + skill_source: Union[RelatedSkill, None, UnsetType] = UNSET + """Skill that owns this artifact.""" + + soda_checks: Union[List[RelatedSodaCheck], None, UnsetType] = UNSET + """""" + + input_to_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + + output_from_spark_jobs: Union[List[RelatedSparkJob], None, UnsetType] = UNSET + """""" + +class SkillArtifactNested(AssetNested): + """SkillArtifact in nested API format for high-performance serialization.""" + + attributes: Union[SkillArtifactAttributes, UnsetType] = UNSET + relationship_attributes: Union[SkillArtifactRelationshipAttributes, UnsetType] = UNSET + append_relationship_attributes: Union[SkillArtifactRelationshipAttributes, UnsetType] = UNSET + remove_relationship_attributes: Union[SkillArtifactRelationshipAttributes, UnsetType] = UNSET + +# ============================================================================= +# CONVERSION HELPERS & CONSTANTS +# ============================================================================= + +_SKILL_ARTIFACT_REL_FIELDS: List[str] = [ + *_ASSET_REL_FIELDS, + "input_to_airflow_tasks", + "output_from_airflow_tasks", + "anomalo_checks", + "application", + "application_field", + "data_contract_latest", + "data_contract_latest_certified", + "output_port_data_products", + "input_port_data_products", + "model_implemented_entities", + "model_implemented_attributes", + "metrics", + "dq_base_dataset_rules", + "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", + "meanings", + "mc_monitors", + "mc_incidents", + "partial_child_fields", + "partial_child_objects", + "input_to_processes", + "output_from_processes", + "user_def_relationship_to", + "user_def_relationship_from", + "files", + "links", + "readme", + "schema_registry_subjects", + "skill_source", + "soda_checks", + "input_to_spark_jobs", + "output_from_spark_jobs", +] + +def _populate_skill_artifact_attrs(attrs: SkillArtifactAttributes, obj: SkillArtifact) -> None: + """Populate SkillArtifact-specific attributes on the attrs struct.""" + _populate_asset_attrs(attrs, obj) + attrs.artifact_version = obj.artifact_version + attrs.catalog_dataset_guid = obj.catalog_dataset_guid + attrs.file_type = obj.file_type + attrs.file_path = obj.file_path + attrs.link = obj.link + attrs.is_global = obj.is_global + attrs.reference = obj.reference + attrs.resource_metadata = obj.resource_metadata + +def _extract_skill_artifact_attrs(attrs: SkillArtifactAttributes) -> dict: + """Extract all SkillArtifact attributes from the attrs struct into a flat dict.""" + result = _extract_asset_attrs(attrs) + result["artifact_version"] = attrs.artifact_version + result["catalog_dataset_guid"] = attrs.catalog_dataset_guid + result["file_type"] = attrs.file_type + result["file_path"] = attrs.file_path + result["link"] = attrs.link + result["is_global"] = attrs.is_global + result["reference"] = attrs.reference + result["resource_metadata"] = attrs.resource_metadata + return result + +# ============================================================================= +# CONVERSION FUNCTIONS +# ============================================================================= + + +def _skill_artifact_to_nested(skill_artifact: SkillArtifact) -> SkillArtifactNested: + """Convert flat SkillArtifact to nested format.""" + attrs = SkillArtifactAttributes() + _populate_skill_artifact_attrs(attrs, skill_artifact) + # Categorize relationships by save semantic (REPLACE, APPEND, REMOVE) + replace_rels, append_rels, remove_rels = categorize_relationships( + skill_artifact, _SKILL_ARTIFACT_REL_FIELDS, SkillArtifactRelationshipAttributes + ) + return SkillArtifactNested( + guid=skill_artifact.guid, + type_name=skill_artifact.type_name, + status=skill_artifact.status, + version=skill_artifact.version, + create_time=skill_artifact.create_time, + update_time=skill_artifact.update_time, + created_by=skill_artifact.created_by, + updated_by=skill_artifact.updated_by, + classifications=skill_artifact.classifications, + classification_names=skill_artifact.classification_names, + meanings=skill_artifact.meanings, + labels=skill_artifact.labels, + business_attributes=skill_artifact.business_attributes, + custom_attributes=skill_artifact.custom_attributes, + pending_tasks=skill_artifact.pending_tasks, + proxy=skill_artifact.proxy, + is_incomplete=skill_artifact.is_incomplete, + provenance_type=skill_artifact.provenance_type, + home_id=skill_artifact.home_id, + attributes=attrs, + relationship_attributes=replace_rels, + append_relationship_attributes=append_rels, + remove_relationship_attributes=remove_rels, + ) + +def _skill_artifact_from_nested(nested: SkillArtifactNested) -> SkillArtifact: + """Convert nested format to flat SkillArtifact.""" + attrs = nested.attributes if nested.attributes is not UNSET else SkillArtifactAttributes() + # Merge relationships from all three buckets + merged_rels = merge_relationships( + nested.relationship_attributes, + nested.append_relationship_attributes, + nested.remove_relationship_attributes, + _SKILL_ARTIFACT_REL_FIELDS, + SkillArtifactRelationshipAttributes + ) + return SkillArtifact( + guid=nested.guid, + type_name=nested.type_name, + status=nested.status, + version=nested.version, + create_time=nested.create_time, + update_time=nested.update_time, + created_by=nested.created_by, + updated_by=nested.updated_by, + classifications=nested.classifications, + classification_names=nested.classification_names, + meanings=nested.meanings, + labels=nested.labels, + business_attributes=nested.business_attributes, + custom_attributes=nested.custom_attributes, + pending_tasks=nested.pending_tasks, + proxy=nested.proxy, + is_incomplete=nested.is_incomplete, + provenance_type=nested.provenance_type, + home_id=nested.home_id, + **_extract_skill_artifact_attrs(attrs), + # Merged relationship attributes + **merged_rels, + ) + +def _skill_artifact_to_nested_bytes(skill_artifact: SkillArtifact, serde: Serde) -> bytes: + """Convert flat SkillArtifact to nested JSON bytes.""" + return serde.encode(_skill_artifact_to_nested(skill_artifact)) + + +def _skill_artifact_from_nested_bytes(data: bytes, serde: Serde) -> SkillArtifact: + """Convert nested JSON bytes to flat SkillArtifact.""" + nested = serde.decode(data, SkillArtifactNested) + return _skill_artifact_from_nested(nested) + +# --------------------------------------------------------------------------- +# Deferred field descriptor initialization +# --------------------------------------------------------------------------- +from pyatlan.model.fields.atlan_fields import ( # noqa: E402 + BooleanField, + KeywordField, + RelationField, +) + +SkillArtifact.ARTIFACT_VERSION = KeywordField("artifactVersion", "artifactVersion") +SkillArtifact.CATALOG_DATASET_GUID = KeywordField("catalogDatasetGuid", "catalogDatasetGuid") +SkillArtifact.FILE_TYPE = KeywordField("fileType", "fileType") +SkillArtifact.FILE_PATH = KeywordField("filePath", "filePath") +SkillArtifact.LINK = KeywordField("link", "link") +SkillArtifact.IS_GLOBAL = BooleanField("isGlobal", "isGlobal") +SkillArtifact.REFERENCE = KeywordField("reference", "reference") +SkillArtifact.RESOURCE_METADATA = KeywordField("resourceMetadata", "resourceMetadata") +SkillArtifact.INPUT_TO_AIRFLOW_TASKS = RelationField("inputToAirflowTasks") +SkillArtifact.OUTPUT_FROM_AIRFLOW_TASKS = RelationField("outputFromAirflowTasks") +SkillArtifact.ANOMALO_CHECKS = RelationField("anomaloChecks") +SkillArtifact.APPLICATION = RelationField("application") +SkillArtifact.APPLICATION_FIELD = RelationField("applicationField") +SkillArtifact.DATA_CONTRACT_LATEST = RelationField("dataContractLatest") +SkillArtifact.DATA_CONTRACT_LATEST_CERTIFIED = RelationField("dataContractLatestCertified") +SkillArtifact.OUTPUT_PORT_DATA_PRODUCTS = RelationField("outputPortDataProducts") +SkillArtifact.INPUT_PORT_DATA_PRODUCTS = RelationField("inputPortDataProducts") +SkillArtifact.MODEL_IMPLEMENTED_ENTITIES = RelationField("modelImplementedEntities") +SkillArtifact.MODEL_IMPLEMENTED_ATTRIBUTES = RelationField("modelImplementedAttributes") +SkillArtifact.METRICS = RelationField("metrics") +SkillArtifact.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") +SkillArtifact.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SkillArtifact.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField("gcpDataplexAspectTypeMetadataEntities") +SkillArtifact.MEANINGS = RelationField("meanings") +SkillArtifact.MC_MONITORS = RelationField("mcMonitors") +SkillArtifact.MC_INCIDENTS = RelationField("mcIncidents") +SkillArtifact.PARTIAL_CHILD_FIELDS = RelationField("partialChildFields") +SkillArtifact.PARTIAL_CHILD_OBJECTS = RelationField("partialChildObjects") +SkillArtifact.INPUT_TO_PROCESSES = RelationField("inputToProcesses") +SkillArtifact.OUTPUT_FROM_PROCESSES = RelationField("outputFromProcesses") +SkillArtifact.USER_DEF_RELATIONSHIP_TO = RelationField("userDefRelationshipTo") +SkillArtifact.USER_DEF_RELATIONSHIP_FROM = RelationField("userDefRelationshipFrom") +SkillArtifact.FILES = RelationField("files") +SkillArtifact.LINKS = RelationField("links") +SkillArtifact.README = RelationField("readme") +SkillArtifact.SCHEMA_REGISTRY_SUBJECTS = RelationField("schemaRegistrySubjects") +SkillArtifact.SKILL_SOURCE = RelationField("skillSource") +SkillArtifact.SODA_CHECKS = RelationField("sodaChecks") +SkillArtifact.INPUT_TO_SPARK_JOBS = RelationField("inputToSparkJobs") +SkillArtifact.OUTPUT_FROM_SPARK_JOBS = RelationField("outputFromSparkJobs") \ No newline at end of file diff --git a/pyatlan_v9/model/assets/skill_artifact_related.py b/pyatlan_v9/model/assets/skill_artifact_related.py new file mode 100644 index 000000000..30e8e5709 --- /dev/null +++ b/pyatlan_v9/model/assets/skill_artifact_related.py @@ -0,0 +1,39 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +Related type classes for SkillArtifact module. + +This module contains all Related{Type} classes for the SkillArtifact type hierarchy. +These classes are used for relationship attributes to reference related entities. +""" + +from __future__ import annotations + +from typing import Dict, List, Set, Union + +import msgspec +from msgspec import UNSET, UnsetType + +from .artifact_related import RelatedArtifact +from .referenceable_related import RelatedReferenceable + +__all__ = [ + "RelatedSkillArtifact", +] + + +class RelatedSkillArtifact(RelatedArtifact): + """ + Related entity reference for SkillArtifact assets. + + Extends RelatedArtifact with SkillArtifact-specific attributes. + """ + + # type_name inherited from parent with default=UNSET + # __post_init__ sets it to "SkillArtifact" so it serializes correctly + + def __post_init__(self) -> None: + RelatedReferenceable.__post_init__(self) + self.type_name = "SkillArtifact" diff --git a/pyatlan_v9/model/assets/skill_related.py b/pyatlan_v9/model/assets/skill_related.py new file mode 100644 index 000000000..aecd22bc6 --- /dev/null +++ b/pyatlan_v9/model/assets/skill_related.py @@ -0,0 +1,42 @@ +# Auto-generated by PythonMsgspecRenderer.pkl - DO NOT EDIT +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2024 Atlan Pte. Ltd. + +""" +Related type classes for Skill module. + +This module contains all Related{Type} classes for the Skill type hierarchy. +These classes are used for relationship attributes to reference related entities. +""" + +from __future__ import annotations + +from typing import Dict, List, Set, Union + +import msgspec +from msgspec import UNSET, UnsetType + +from .agentic_related import RelatedAgentic +from .referenceable_related import RelatedReferenceable + +__all__ = [ + "RelatedSkill", +] + + +class RelatedSkill(RelatedAgentic): + """ + Related entity reference for Skill assets. + + Extends RelatedAgentic with Skill-specific attributes. + """ + + # type_name inherited from parent with default=UNSET + # __post_init__ sets it to "Skill" so it serializes correctly + + skill_version: Union[str, None, UnsetType] = UNSET + """Version identifier for this skill.""" + + def __post_init__(self) -> None: + RelatedReferenceable.__post_init__(self) + self.type_name = "Skill" diff --git a/pyatlan_v9/model/assets/snowflake.py b/pyatlan_v9/model/assets/snowflake.py index 0ec684249..db3cb31bf 100644 --- a/pyatlan_v9/model/assets/snowflake.py +++ b/pyatlan_v9/model/assets/snowflake.py @@ -47,6 +47,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -55,7 +56,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .snowflake_related import RelatedSnowflake, RelatedSnowflakeSemanticLogicalTable +from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob from .sql_insight_related import ( @@ -119,6 +120,7 @@ class Snowflake(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -140,6 +142,8 @@ class Snowflake(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Snowflake" + query_count: Union[int, None, UnsetType] = UNSET """Number of times this asset has been queried.""" @@ -283,6 +287,11 @@ class Snowflake(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -358,66 +367,6 @@ class Snowflake(Asset): def __post_init__(self) -> None: self.type_name = "Snowflake" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Snowflake instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Snowflake validation failed: {errors}") - - def minimize(self) -> "Snowflake": - """ - Return a minimal copy of this Snowflake with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Snowflake with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Snowflake instance with only the minimum required fields. - """ - self.validate() - return Snowflake(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSnowflake": - """ - Create a :class:`RelatedSnowflake` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSnowflake reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSnowflake(guid=self.guid) - return RelatedSnowflake(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -620,6 +569,11 @@ class SnowflakeRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -732,6 +686,7 @@ class SnowflakeNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -863,9 +818,6 @@ def _snowflake_to_nested(snowflake: Snowflake) -> SnowflakeNested: is_incomplete=snowflake.is_incomplete, provenance_type=snowflake.provenance_type, home_id=snowflake.home_id, - depth=snowflake.depth, - immediate_upstream=snowflake.immediate_upstream, - immediate_downstream=snowflake.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -897,6 +849,7 @@ def _snowflake_from_nested(nested: SnowflakeNested) -> Snowflake: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -905,9 +858,6 @@ def _snowflake_from_nested(nested: SnowflakeNested) -> Snowflake: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_snowflake_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1007,6 +957,9 @@ def _snowflake_from_nested_bytes(data: bytes, serde: Serde) -> Snowflake: Snowflake.DBT_SOURCES = RelationField("dbtSources") Snowflake.SQL_DBT_SOURCES = RelationField("sqlDBTSources") Snowflake.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +Snowflake.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Snowflake.MEANINGS = RelationField("meanings") Snowflake.MC_MONITORS = RelationField("mcMonitors") Snowflake.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/snowflake_ai_model_context.py b/pyatlan_v9/model/assets/snowflake_ai_model_context.py index bb2eb846e..c07bdf666 100644 --- a/pyatlan_v9/model/assets/snowflake_ai_model_context.py +++ b/pyatlan_v9/model/assets/snowflake_ai_model_context.py @@ -49,6 +49,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -58,7 +59,6 @@ from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .snowflake_related import ( - RelatedSnowflakeAIModelContext, RelatedSnowflakeAIModelVersion, RelatedSnowflakeSemanticLogicalTable, ) @@ -138,6 +138,7 @@ class SnowflakeAIModelContext(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -161,6 +162,8 @@ class SnowflakeAIModelContext(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SnowflakeAIModelContext" + query_count: Union[int, None, UnsetType] = UNSET """Number of times this asset has been queried.""" @@ -356,6 +359,11 @@ class SnowflakeAIModelContext(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -449,80 +457,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SnowflakeAIModelContext instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"SnowflakeAIModelContext validation failed: {errors}") - - def minimize(self) -> "SnowflakeAIModelContext": - """ - Return a minimal copy of this SnowflakeAIModelContext with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SnowflakeAIModelContext with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SnowflakeAIModelContext instance with only the minimum required fields. - """ - self.validate() - return SnowflakeAIModelContext( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSnowflakeAIModelContext": - """ - Create a :class:`RelatedSnowflakeAIModelContext` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSnowflakeAIModelContext reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSnowflakeAIModelContext(guid=self.guid) - return RelatedSnowflakeAIModelContext(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -779,6 +713,11 @@ class SnowflakeAIModelContextRelationshipAttributes(AssetRelationshipAttributes) dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -905,6 +844,7 @@ class SnowflakeAIModelContextNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1076,9 +1016,6 @@ def _snowflake_ai_model_context_to_nested( is_incomplete=snowflake_ai_model_context.is_incomplete, provenance_type=snowflake_ai_model_context.provenance_type, home_id=snowflake_ai_model_context.home_id, - depth=snowflake_ai_model_context.depth, - immediate_upstream=snowflake_ai_model_context.immediate_upstream, - immediate_downstream=snowflake_ai_model_context.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1114,6 +1051,7 @@ def _snowflake_ai_model_context_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1122,9 +1060,6 @@ def _snowflake_ai_model_context_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_snowflake_ai_model_context_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1283,6 +1218,9 @@ def _snowflake_ai_model_context_from_nested_bytes( SnowflakeAIModelContext.DBT_SOURCES = RelationField("dbtSources") SnowflakeAIModelContext.SQL_DBT_SOURCES = RelationField("sqlDBTSources") SnowflakeAIModelContext.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +SnowflakeAIModelContext.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SnowflakeAIModelContext.MEANINGS = RelationField("meanings") SnowflakeAIModelContext.MC_MONITORS = RelationField("mcMonitors") SnowflakeAIModelContext.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/snowflake_ai_model_version.py b/pyatlan_v9/model/assets/snowflake_ai_model_version.py index 4337717a6..6d029ad40 100644 --- a/pyatlan_v9/model/assets/snowflake_ai_model_version.py +++ b/pyatlan_v9/model/assets/snowflake_ai_model_version.py @@ -49,6 +49,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -59,7 +60,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .snowflake_related import ( RelatedSnowflakeAIModelContext, - RelatedSnowflakeAIModelVersion, RelatedSnowflakeSemanticLogicalTable, ) from .soda_related import RelatedSodaCheck @@ -80,11 +80,11 @@ class SnowflakeAIModelVersion(Asset): Instance of an ai model version in snowflake. """ - SNOWFLAKE_AI_MODEL_VERSION_NAME: ClassVar[Any] = None - SNOWFLAKE_AI_MODEL_VERSION_TYPE: ClassVar[Any] = None - SNOWFLAKE_AI_MODEL_VERSION_ALIASES: ClassVar[Any] = None - SNOWFLAKE_AI_MODEL_VERSION_METRICS: ClassVar[Any] = None - SNOWFLAKE_AI_MODEL_VERSION_FUNCTIONS: ClassVar[Any] = None + SNOWFLAKE_NAME: ClassVar[Any] = None + SNOWFLAKE_TYPE: ClassVar[Any] = None + SNOWFLAKE_ALIASES: ClassVar[Any] = None + SNOWFLAKE_METRICS: ClassVar[Any] = None + SNOWFLAKE_FUNCTIONS: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -141,6 +141,7 @@ class SnowflakeAIModelVersion(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -163,29 +164,21 @@ class SnowflakeAIModelVersion(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None - snowflake_ai_model_version_name: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="snowflakeAIModelVersionName" - ) + type_name: Union[str, UnsetType] = "SnowflakeAIModelVersion" + + snowflake_name: Union[str, None, UnsetType] = UNSET """Version part of the model name.""" - snowflake_ai_model_version_type: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="snowflakeAIModelVersionType" - ) + snowflake_type: Union[str, None, UnsetType] = UNSET """The type of the model version.""" - snowflake_ai_model_version_aliases: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionAliases") - ) + snowflake_aliases: Union[List[str], None, UnsetType] = UNSET """The aliases for the model version.""" - snowflake_ai_model_version_metrics: Union[Dict[str, str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionMetrics") - ) + snowflake_metrics: Union[Dict[str, str], None, UnsetType] = UNSET """Metrics for an individual experiment.""" - snowflake_ai_model_version_functions: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionFunctions") - ) + snowflake_functions: Union[List[str], None, UnsetType] = UNSET """Functions used in the model version.""" query_count: Union[int, None, UnsetType] = UNSET @@ -378,6 +371,11 @@ class SnowflakeAIModelVersion(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -466,82 +464,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SnowflakeAIModelVersion instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.snowflake_ai_model_context is UNSET: - errors.append("snowflake_ai_model_context is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"SnowflakeAIModelVersion validation failed: {errors}") - - def minimize(self) -> "SnowflakeAIModelVersion": - """ - Return a minimal copy of this SnowflakeAIModelVersion with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SnowflakeAIModelVersion with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SnowflakeAIModelVersion instance with only the minimum required fields. - """ - self.validate() - return SnowflakeAIModelVersion( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSnowflakeAIModelVersion": - """ - Create a :class:`RelatedSnowflakeAIModelVersion` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSnowflakeAIModelVersion reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSnowflakeAIModelVersion(guid=self.guid) - return RelatedSnowflakeAIModelVersion(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -599,29 +521,19 @@ def from_json( class SnowflakeAIModelVersionAttributes(AssetAttributes): """SnowflakeAIModelVersion-specific attributes for nested API format.""" - snowflake_ai_model_version_name: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="snowflakeAIModelVersionName" - ) + snowflake_name: Union[str, None, UnsetType] = UNSET """Version part of the model name.""" - snowflake_ai_model_version_type: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="snowflakeAIModelVersionType" - ) + snowflake_type: Union[str, None, UnsetType] = UNSET """The type of the model version.""" - snowflake_ai_model_version_aliases: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionAliases") - ) + snowflake_aliases: Union[List[str], None, UnsetType] = UNSET """The aliases for the model version.""" - snowflake_ai_model_version_metrics: Union[Dict[str, str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionMetrics") - ) + snowflake_metrics: Union[Dict[str, str], None, UnsetType] = UNSET """Metrics for an individual experiment.""" - snowflake_ai_model_version_functions: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionFunctions") - ) + snowflake_functions: Union[List[str], None, UnsetType] = UNSET """Functions used in the model version.""" query_count: Union[int, None, UnsetType] = UNSET @@ -818,6 +730,11 @@ class SnowflakeAIModelVersionRelationshipAttributes(AssetRelationshipAttributes) dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -938,6 +855,7 @@ class SnowflakeAIModelVersionNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -967,13 +885,11 @@ def _populate_snowflake_ai_model_version_attrs( ) -> None: """Populate SnowflakeAIModelVersion-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.snowflake_ai_model_version_name = obj.snowflake_ai_model_version_name - attrs.snowflake_ai_model_version_type = obj.snowflake_ai_model_version_type - attrs.snowflake_ai_model_version_aliases = obj.snowflake_ai_model_version_aliases - attrs.snowflake_ai_model_version_metrics = obj.snowflake_ai_model_version_metrics - attrs.snowflake_ai_model_version_functions = ( - obj.snowflake_ai_model_version_functions - ) + attrs.snowflake_name = obj.snowflake_name + attrs.snowflake_type = obj.snowflake_type + attrs.snowflake_aliases = obj.snowflake_aliases + attrs.snowflake_metrics = obj.snowflake_metrics + attrs.snowflake_functions = obj.snowflake_functions attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -1024,17 +940,11 @@ def _extract_snowflake_ai_model_version_attrs( ) -> dict: """Extract all SnowflakeAIModelVersion attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["snowflake_ai_model_version_name"] = attrs.snowflake_ai_model_version_name - result["snowflake_ai_model_version_type"] = attrs.snowflake_ai_model_version_type - result["snowflake_ai_model_version_aliases"] = ( - attrs.snowflake_ai_model_version_aliases - ) - result["snowflake_ai_model_version_metrics"] = ( - attrs.snowflake_ai_model_version_metrics - ) - result["snowflake_ai_model_version_functions"] = ( - attrs.snowflake_ai_model_version_functions - ) + result["snowflake_name"] = attrs.snowflake_name + result["snowflake_type"] = attrs.snowflake_type + result["snowflake_aliases"] = attrs.snowflake_aliases + result["snowflake_metrics"] = attrs.snowflake_metrics + result["snowflake_functions"] = attrs.snowflake_functions result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -1126,9 +1036,6 @@ def _snowflake_ai_model_version_to_nested( is_incomplete=snowflake_ai_model_version.is_incomplete, provenance_type=snowflake_ai_model_version.provenance_type, home_id=snowflake_ai_model_version.home_id, - depth=snowflake_ai_model_version.depth, - immediate_upstream=snowflake_ai_model_version.immediate_upstream, - immediate_downstream=snowflake_ai_model_version.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1164,6 +1071,7 @@ def _snowflake_ai_model_version_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1172,9 +1080,6 @@ def _snowflake_ai_model_version_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_snowflake_ai_model_version_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1208,20 +1113,16 @@ def _snowflake_ai_model_version_from_nested_bytes( RelationField, ) -SnowflakeAIModelVersion.SNOWFLAKE_AI_MODEL_VERSION_NAME = KeywordField( - "snowflakeAIModelVersionName", "snowflakeAIModelVersionName" -) -SnowflakeAIModelVersion.SNOWFLAKE_AI_MODEL_VERSION_TYPE = KeywordField( - "snowflakeAIModelVersionType", "snowflakeAIModelVersionType" +SnowflakeAIModelVersion.SNOWFLAKE_NAME = KeywordField("snowflakeName", "snowflakeName") +SnowflakeAIModelVersion.SNOWFLAKE_TYPE = KeywordField("snowflakeType", "snowflakeType") +SnowflakeAIModelVersion.SNOWFLAKE_ALIASES = KeywordField( + "snowflakeAliases", "snowflakeAliases" ) -SnowflakeAIModelVersion.SNOWFLAKE_AI_MODEL_VERSION_ALIASES = KeywordField( - "snowflakeAIModelVersionAliases", "snowflakeAIModelVersionAliases" +SnowflakeAIModelVersion.SNOWFLAKE_METRICS = KeywordField( + "snowflakeMetrics", "snowflakeMetrics" ) -SnowflakeAIModelVersion.SNOWFLAKE_AI_MODEL_VERSION_METRICS = KeywordField( - "snowflakeAIModelVersionMetrics", "snowflakeAIModelVersionMetrics" -) -SnowflakeAIModelVersion.SNOWFLAKE_AI_MODEL_VERSION_FUNCTIONS = KeywordField( - "snowflakeAIModelVersionFunctions", "snowflakeAIModelVersionFunctions" +SnowflakeAIModelVersion.SNOWFLAKE_FUNCTIONS = KeywordField( + "snowflakeFunctions", "snowflakeFunctions" ) SnowflakeAIModelVersion.QUERY_COUNT = NumericField("queryCount", "queryCount") SnowflakeAIModelVersion.QUERY_USER_COUNT = NumericField( @@ -1349,6 +1250,9 @@ def _snowflake_ai_model_version_from_nested_bytes( SnowflakeAIModelVersion.DBT_SOURCES = RelationField("dbtSources") SnowflakeAIModelVersion.SQL_DBT_SOURCES = RelationField("sqlDBTSources") SnowflakeAIModelVersion.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +SnowflakeAIModelVersion.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SnowflakeAIModelVersion.MEANINGS = RelationField("meanings") SnowflakeAIModelVersion.MC_MONITORS = RelationField("mcMonitors") SnowflakeAIModelVersion.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/snowflake_related.py b/pyatlan_v9/model/assets/snowflake_related.py index 7705f0ac7..a7a72aa2a 100644 --- a/pyatlan_v9/model/assets/snowflake_related.py +++ b/pyatlan_v9/model/assets/snowflake_related.py @@ -13,7 +13,6 @@ from typing import Dict, List, Union -import msgspec from msgspec import UNSET, UnsetType from .referenceable_related import RelatedReferenceable @@ -82,7 +81,7 @@ class RelatedSnowflakePipe(RelatedSnowflake): definition: Union[str, None, UnsetType] = UNSET """SQL definition of this pipe.""" - snowflake_pipe_is_auto_ingest_enabled: Union[bool, None, UnsetType] = UNSET + snowflake_is_auto_ingest_enabled: Union[bool, None, UnsetType] = UNSET """Whether auto-ingest is enabled for this pipe (true) or not (false).""" snowflake_pipe_notification_channel_name: Union[str, None, UnsetType] = UNSET @@ -103,16 +102,16 @@ class RelatedSnowflakeStage(RelatedSnowflake): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SnowflakeStage" so it serializes correctly - snowflake_stage_external_location: Union[str, None, UnsetType] = UNSET + snowflake_external_location: Union[str, None, UnsetType] = UNSET """The URL or cloud storage path specifying the external location where the stage data files are stored. This is NULL for internal stages.""" - snowflake_stage_external_location_region: Union[str, None, UnsetType] = UNSET + snowflake_external_location_region: Union[str, None, UnsetType] = UNSET """The geographic region identifier where the external stage is located in cloud storage. This is NULL for internal stages.""" - snowflake_stage_storage_integration: Union[str, None, UnsetType] = UNSET + snowflake_storage_integration: Union[str, None, UnsetType] = UNSET """The name of the storage integration associated with the stage; NULL for internal stages or stages that do not use a storage integration.""" - snowflake_stage_type: Union[str, None, UnsetType] = UNSET + snowflake_type: Union[str, None, UnsetType] = UNSET """Categorization of the stage type in Snowflake, which can be 'Internal Named' or 'External Named', indicating whether the stage storage is within Snowflake or in external cloud storage.""" def __post_init__(self) -> None: @@ -130,19 +129,19 @@ class RelatedSnowflakeStream(RelatedSnowflake): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SnowflakeStream" so it serializes correctly - snowflake_stream_type: Union[str, None, UnsetType] = UNSET + snowflake_type: Union[str, None, UnsetType] = UNSET """Type of this stream, for example: standard, append-only, insert-only, etc.""" - snowflake_stream_source_type: Union[str, None, UnsetType] = UNSET + snowflake_source_type: Union[str, None, UnsetType] = UNSET """Type of the source of this stream.""" - snowflake_stream_mode: Union[str, None, UnsetType] = UNSET + snowflake_mode: Union[str, None, UnsetType] = UNSET """Mode of this stream.""" - snowflake_stream_is_stale: Union[bool, None, UnsetType] = UNSET + snowflake_is_stale: Union[bool, None, UnsetType] = UNSET """Whether this stream is stale (true) or not (false).""" - snowflake_stream_stale_after: Union[int, None, UnsetType] = UNSET + snowflake_stale_after: Union[int, None, UnsetType] = UNSET """Time (epoch) after which this stream will be stale, in milliseconds.""" def __post_init__(self) -> None: @@ -190,29 +189,19 @@ class RelatedSnowflakeAIModelVersion(RelatedSnowflake): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SnowflakeAIModelVersion" so it serializes correctly - snowflake_ai_model_version_name: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="snowflakeAIModelVersionName" - ) + snowflake_name: Union[str, None, UnsetType] = UNSET """Version part of the model name.""" - snowflake_ai_model_version_type: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="snowflakeAIModelVersionType" - ) + snowflake_type: Union[str, None, UnsetType] = UNSET """The type of the model version.""" - snowflake_ai_model_version_aliases: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionAliases") - ) + snowflake_aliases: Union[List[str], None, UnsetType] = UNSET """The aliases for the model version.""" - snowflake_ai_model_version_metrics: Union[Dict[str, str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionMetrics") - ) + snowflake_metrics: Union[Dict[str, str], None, UnsetType] = UNSET """Metrics for an individual experiment.""" - snowflake_ai_model_version_functions: Union[List[str], None, UnsetType] = ( - msgspec.field(default=UNSET, name="snowflakeAIModelVersionFunctions") - ) + snowflake_functions: Union[List[str], None, UnsetType] = UNSET """Functions used in the model version.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/snowflake_semantic_dimension.py b/pyatlan_v9/model/assets/snowflake_semantic_dimension.py index 750603fc3..90e7cf9a2 100644 --- a/pyatlan_v9/model/assets/snowflake_semantic_dimension.py +++ b/pyatlan_v9/model/assets/snowflake_semantic_dimension.py @@ -48,6 +48,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -57,10 +58,7 @@ from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .semantic_related import RelatedSemanticModel -from .snowflake_related import ( - RelatedSnowflakeSemanticDimension, - RelatedSnowflakeSemanticLogicalTable, -) +from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob from .sql_insight_related import ( @@ -135,6 +133,7 @@ class SnowflakeSemanticDimension(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -158,6 +157,8 @@ class SnowflakeSemanticDimension(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SnowflakeSemanticDimension" + snowflake_semantic_view_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the semantic view in which this dimension exists.""" @@ -334,6 +335,11 @@ class SnowflakeSemanticDimension(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -425,90 +431,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SnowflakeSemanticDimension instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.snowflake_semantic_logical_table is UNSET: - errors.append( - "snowflake_semantic_logical_table is required for creation" - ) - if self.snowflake_semantic_view_name is UNSET: - errors.append("snowflake_semantic_view_name is required for creation") - if self.snowflake_semantic_view_qualified_name is UNSET: - errors.append( - "snowflake_semantic_view_qualified_name is required for creation" - ) - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"SnowflakeSemanticDimension validation failed: {errors}") - - def minimize(self) -> "SnowflakeSemanticDimension": - """ - Return a minimal copy of this SnowflakeSemanticDimension with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SnowflakeSemanticDimension with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SnowflakeSemanticDimension instance with only the minimum required fields. - """ - self.validate() - return SnowflakeSemanticDimension( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSnowflakeSemanticDimension": - """ - Create a :class:`RelatedSnowflakeSemanticDimension` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSnowflakeSemanticDimension reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSnowflakeSemanticDimension(guid=self.guid) - return RelatedSnowflakeSemanticDimension(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -746,6 +668,11 @@ class SnowflakeSemanticDimensionRelationshipAttributes(AssetRelationshipAttribut dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -868,6 +795,7 @@ class SnowflakeSemanticDimensionNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1039,9 +967,6 @@ def _snowflake_semantic_dimension_to_nested( is_incomplete=snowflake_semantic_dimension.is_incomplete, provenance_type=snowflake_semantic_dimension.provenance_type, home_id=snowflake_semantic_dimension.home_id, - depth=snowflake_semantic_dimension.depth, - immediate_upstream=snowflake_semantic_dimension.immediate_upstream, - immediate_downstream=snowflake_semantic_dimension.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1077,6 +1002,7 @@ def _snowflake_semantic_dimension_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1085,9 +1011,6 @@ def _snowflake_semantic_dimension_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_snowflake_semantic_dimension_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1249,6 +1172,9 @@ def _snowflake_semantic_dimension_from_nested_bytes( SnowflakeSemanticDimension.DBT_SOURCES = RelationField("dbtSources") SnowflakeSemanticDimension.SQL_DBT_SOURCES = RelationField("sqlDBTSources") SnowflakeSemanticDimension.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +SnowflakeSemanticDimension.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SnowflakeSemanticDimension.MEANINGS = RelationField("meanings") SnowflakeSemanticDimension.MC_MONITORS = RelationField("mcMonitors") SnowflakeSemanticDimension.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/snowflake_semantic_fact.py b/pyatlan_v9/model/assets/snowflake_semantic_fact.py index b2f0247f0..b04186c63 100644 --- a/pyatlan_v9/model/assets/snowflake_semantic_fact.py +++ b/pyatlan_v9/model/assets/snowflake_semantic_fact.py @@ -48,6 +48,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -57,10 +58,7 @@ from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .semantic_related import RelatedSemanticModel -from .snowflake_related import ( - RelatedSnowflakeSemanticFact, - RelatedSnowflakeSemanticLogicalTable, -) +from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob from .sql_insight_related import ( @@ -135,6 +133,7 @@ class SnowflakeSemanticFact(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -158,6 +157,8 @@ class SnowflakeSemanticFact(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SnowflakeSemanticFact" + snowflake_semantic_view_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the semantic view in which this fact exists.""" @@ -334,6 +335,11 @@ class SnowflakeSemanticFact(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -425,88 +431,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SnowflakeSemanticFact instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.snowflake_semantic_logical_table is UNSET: - errors.append( - "snowflake_semantic_logical_table is required for creation" - ) - if self.snowflake_semantic_view_name is UNSET: - errors.append("snowflake_semantic_view_name is required for creation") - if self.snowflake_semantic_view_qualified_name is UNSET: - errors.append( - "snowflake_semantic_view_qualified_name is required for creation" - ) - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"SnowflakeSemanticFact validation failed: {errors}") - - def minimize(self) -> "SnowflakeSemanticFact": - """ - Return a minimal copy of this SnowflakeSemanticFact with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SnowflakeSemanticFact with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SnowflakeSemanticFact instance with only the minimum required fields. - """ - self.validate() - return SnowflakeSemanticFact(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSnowflakeSemanticFact": - """ - Create a :class:`RelatedSnowflakeSemanticFact` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSnowflakeSemanticFact reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSnowflakeSemanticFact(guid=self.guid) - return RelatedSnowflakeSemanticFact(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -744,6 +668,11 @@ class SnowflakeSemanticFactRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -866,6 +795,7 @@ class SnowflakeSemanticFactNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1037,9 +967,6 @@ def _snowflake_semantic_fact_to_nested( is_incomplete=snowflake_semantic_fact.is_incomplete, provenance_type=snowflake_semantic_fact.provenance_type, home_id=snowflake_semantic_fact.home_id, - depth=snowflake_semantic_fact.depth, - immediate_upstream=snowflake_semantic_fact.immediate_upstream, - immediate_downstream=snowflake_semantic_fact.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1075,6 +1002,7 @@ def _snowflake_semantic_fact_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1083,9 +1011,6 @@ def _snowflake_semantic_fact_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_snowflake_semantic_fact_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1239,6 +1164,9 @@ def _snowflake_semantic_fact_from_nested_bytes( SnowflakeSemanticFact.DBT_SOURCES = RelationField("dbtSources") SnowflakeSemanticFact.SQL_DBT_SOURCES = RelationField("sqlDBTSources") SnowflakeSemanticFact.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +SnowflakeSemanticFact.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SnowflakeSemanticFact.MEANINGS = RelationField("meanings") SnowflakeSemanticFact.MC_MONITORS = RelationField("mcMonitors") SnowflakeSemanticFact.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/snowflake_semantic_logical_table.py b/pyatlan_v9/model/assets/snowflake_semantic_logical_table.py index 49255972c..5c1786f35 100644 --- a/pyatlan_v9/model/assets/snowflake_semantic_logical_table.py +++ b/pyatlan_v9/model/assets/snowflake_semantic_logical_table.py @@ -48,6 +48,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -140,6 +141,7 @@ class SnowflakeSemanticLogicalTable(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -167,6 +169,8 @@ class SnowflakeSemanticLogicalTable(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SnowflakeSemanticLogicalTable" + snowflake_semantic_view_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the semantic view in which this logical table exists.""" @@ -346,6 +350,11 @@ class SnowflakeSemanticLogicalTable(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -455,90 +464,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SnowflakeSemanticLogicalTable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.snowflake_semantic_view is UNSET: - errors.append("snowflake_semantic_view is required for creation") - if self.snowflake_semantic_view_name is UNSET: - errors.append("snowflake_semantic_view_name is required for creation") - if self.snowflake_semantic_view_qualified_name is UNSET: - errors.append( - "snowflake_semantic_view_qualified_name is required for creation" - ) - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError( - f"SnowflakeSemanticLogicalTable validation failed: {errors}" - ) - - def minimize(self) -> "SnowflakeSemanticLogicalTable": - """ - Return a minimal copy of this SnowflakeSemanticLogicalTable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SnowflakeSemanticLogicalTable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SnowflakeSemanticLogicalTable instance with only the minimum required fields. - """ - self.validate() - return SnowflakeSemanticLogicalTable( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSnowflakeSemanticLogicalTable": - """ - Create a :class:`RelatedSnowflakeSemanticLogicalTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSnowflakeSemanticLogicalTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSnowflakeSemanticLogicalTable(guid=self.guid) - return RelatedSnowflakeSemanticLogicalTable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -779,6 +704,11 @@ class SnowflakeSemanticLogicalTableRelationshipAttributes(AssetRelationshipAttri dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -919,6 +849,7 @@ class SnowflakeSemanticLogicalTableNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1106,9 +1037,6 @@ def _snowflake_semantic_logical_table_to_nested( is_incomplete=snowflake_semantic_logical_table.is_incomplete, provenance_type=snowflake_semantic_logical_table.provenance_type, home_id=snowflake_semantic_logical_table.home_id, - depth=snowflake_semantic_logical_table.depth, - immediate_upstream=snowflake_semantic_logical_table.immediate_upstream, - immediate_downstream=snowflake_semantic_logical_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1144,6 +1072,7 @@ def _snowflake_semantic_logical_table_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1152,9 +1081,6 @@ def _snowflake_semantic_logical_table_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_snowflake_semantic_logical_table_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1329,6 +1255,9 @@ def _snowflake_semantic_logical_table_from_nested_bytes( SnowflakeSemanticLogicalTable.DBT_SOURCES = RelationField("dbtSources") SnowflakeSemanticLogicalTable.SQL_DBT_SOURCES = RelationField("sqlDBTSources") SnowflakeSemanticLogicalTable.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +SnowflakeSemanticLogicalTable.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = ( + RelationField("gcpDataplexAspectTypeMetadataEntities") +) SnowflakeSemanticLogicalTable.MEANINGS = RelationField("meanings") SnowflakeSemanticLogicalTable.MC_MONITORS = RelationField("mcMonitors") SnowflakeSemanticLogicalTable.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/snowflake_semantic_metric.py b/pyatlan_v9/model/assets/snowflake_semantic_metric.py index eb725ad70..23de3709c 100644 --- a/pyatlan_v9/model/assets/snowflake_semantic_metric.py +++ b/pyatlan_v9/model/assets/snowflake_semantic_metric.py @@ -49,6 +49,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -57,10 +58,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .snowflake_related import ( - RelatedSnowflakeSemanticLogicalTable, - RelatedSnowflakeSemanticMetric, -) +from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob from .sql_insight_related import ( @@ -140,6 +138,7 @@ class SnowflakeSemanticMetric(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -162,6 +161,8 @@ class SnowflakeSemanticMetric(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SnowflakeSemanticMetric" + snowflake_semantic_view_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the semantic view in which this metric exists.""" @@ -352,6 +353,11 @@ class SnowflakeSemanticMetric(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -440,90 +446,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SnowflakeSemanticMetric instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.snowflake_semantic_logical_table is UNSET: - errors.append( - "snowflake_semantic_logical_table is required for creation" - ) - if self.snowflake_semantic_view_name is UNSET: - errors.append("snowflake_semantic_view_name is required for creation") - if self.snowflake_semantic_view_qualified_name is UNSET: - errors.append( - "snowflake_semantic_view_qualified_name is required for creation" - ) - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"SnowflakeSemanticMetric validation failed: {errors}") - - def minimize(self) -> "SnowflakeSemanticMetric": - """ - Return a minimal copy of this SnowflakeSemanticMetric with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SnowflakeSemanticMetric with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SnowflakeSemanticMetric instance with only the minimum required fields. - """ - self.validate() - return SnowflakeSemanticMetric( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSnowflakeSemanticMetric": - """ - Create a :class:`RelatedSnowflakeSemanticMetric` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSnowflakeSemanticMetric reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSnowflakeSemanticMetric(guid=self.guid) - return RelatedSnowflakeSemanticMetric(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -775,6 +697,11 @@ class SnowflakeSemanticMetricRelationshipAttributes(AssetRelationshipAttributes) dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -897,6 +824,7 @@ class SnowflakeSemanticMetricNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1081,9 +1009,6 @@ def _snowflake_semantic_metric_to_nested( is_incomplete=snowflake_semantic_metric.is_incomplete, provenance_type=snowflake_semantic_metric.provenance_type, home_id=snowflake_semantic_metric.home_id, - depth=snowflake_semantic_metric.depth, - immediate_upstream=snowflake_semantic_metric.immediate_upstream, - immediate_downstream=snowflake_semantic_metric.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1119,6 +1044,7 @@ def _snowflake_semantic_metric_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1127,9 +1053,6 @@ def _snowflake_semantic_metric_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_snowflake_semantic_metric_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1290,6 +1213,9 @@ def _snowflake_semantic_metric_from_nested_bytes( SnowflakeSemanticMetric.DBT_SOURCES = RelationField("dbtSources") SnowflakeSemanticMetric.SQL_DBT_SOURCES = RelationField("sqlDBTSources") SnowflakeSemanticMetric.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +SnowflakeSemanticMetric.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SnowflakeSemanticMetric.MEANINGS = RelationField("meanings") SnowflakeSemanticMetric.MC_MONITORS = RelationField("mcMonitors") SnowflakeSemanticMetric.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/snowflake_semantic_view.py b/pyatlan_v9/model/assets/snowflake_semantic_view.py index 09cbec68e..53001e3d0 100644 --- a/pyatlan_v9/model/assets/snowflake_semantic_view.py +++ b/pyatlan_v9/model/assets/snowflake_semantic_view.py @@ -48,6 +48,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -61,10 +62,7 @@ RelatedSemanticEntity, RelatedSemanticMeasure, ) -from .snowflake_related import ( - RelatedSnowflakeSemanticLogicalTable, - RelatedSnowflakeSemanticView, -) +from .snowflake_related import RelatedSnowflakeSemanticLogicalTable from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob from .sql_insight_related import ( @@ -130,6 +128,7 @@ class SnowflakeSemanticView(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -155,6 +154,8 @@ class SnowflakeSemanticView(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SnowflakeSemanticView" + snowflake_definition: Union[str, None, UnsetType] = UNSET """DDL definition of the semantic view (via GET_DDL).""" @@ -301,6 +302,11 @@ class SnowflakeSemanticView(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -396,78 +402,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SnowflakeSemanticView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"SnowflakeSemanticView validation failed: {errors}") - - def minimize(self) -> "SnowflakeSemanticView": - """ - Return a minimal copy of this SnowflakeSemanticView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SnowflakeSemanticView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SnowflakeSemanticView instance with only the minimum required fields. - """ - self.validate() - return SnowflakeSemanticView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSnowflakeSemanticView": - """ - Create a :class:`RelatedSnowflakeSemanticView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSnowflakeSemanticView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSnowflakeSemanticView(guid=self.guid) - return RelatedSnowflakeSemanticView(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -675,6 +609,11 @@ class SnowflakeSemanticViewRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -801,6 +740,7 @@ class SnowflakeSemanticViewNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -946,9 +886,6 @@ def _snowflake_semantic_view_to_nested( is_incomplete=snowflake_semantic_view.is_incomplete, provenance_type=snowflake_semantic_view.provenance_type, home_id=snowflake_semantic_view.home_id, - depth=snowflake_semantic_view.depth, - immediate_upstream=snowflake_semantic_view.immediate_upstream, - immediate_downstream=snowflake_semantic_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -984,6 +921,7 @@ def _snowflake_semantic_view_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -992,9 +930,6 @@ def _snowflake_semantic_view_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_snowflake_semantic_view_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1121,6 +1056,9 @@ def _snowflake_semantic_view_from_nested_bytes( SnowflakeSemanticView.DBT_SOURCES = RelationField("dbtSources") SnowflakeSemanticView.SQL_DBT_SOURCES = RelationField("sqlDBTSources") SnowflakeSemanticView.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +SnowflakeSemanticView.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SnowflakeSemanticView.MEANINGS = RelationField("meanings") SnowflakeSemanticView.MC_MONITORS = RelationField("mcMonitors") SnowflakeSemanticView.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/soda.py b/pyatlan_v9/model/assets/soda.py index 20a48b1a8..4ed41dabb 100644 --- a/pyatlan_v9/model/assets/soda.py +++ b/pyatlan_v9/model/assets/soda.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -48,7 +49,7 @@ from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject -from .soda_related import RelatedSoda, RelatedSodaCheck +from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob # ============================================================================= @@ -78,6 +79,7 @@ class Soda(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +97,8 @@ class Soda(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Soda" + dq_is_part_of_contract: Union[bool, None, UnsetType] = UNSET """Whether this data quality is part of contract (true) or not (false).""" @@ -147,6 +151,11 @@ class Soda(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -202,66 +211,6 @@ class Soda(Asset): def __post_init__(self) -> None: self.type_name = "Soda" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Soda instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Soda validation failed: {errors}") - - def minimize(self) -> "Soda": - """ - Return a minimal copy of this Soda with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Soda with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Soda instance with only the minimum required fields. - """ - self.validate() - return Soda(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSoda": - """ - Create a :class:`RelatedSoda` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSoda reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSoda(guid=self.guid) - return RelatedSoda(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -373,6 +322,11 @@ class SodaRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -455,6 +409,7 @@ class SodaNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -522,9 +477,6 @@ def _soda_to_nested(soda: Soda) -> SodaNested: is_incomplete=soda.is_incomplete, provenance_type=soda.provenance_type, home_id=soda.home_id, - depth=soda.depth, - immediate_upstream=soda.immediate_upstream, - immediate_downstream=soda.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -554,6 +506,7 @@ def _soda_from_nested(nested: SodaNested) -> Soda: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -562,9 +515,6 @@ def _soda_from_nested(nested: SodaNested) -> Soda: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_soda_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -607,6 +557,9 @@ def _soda_from_nested_bytes(data: bytes, serde: Serde) -> Soda: Soda.METRICS = RelationField("metrics") Soda.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Soda.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Soda.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Soda.MEANINGS = RelationField("meanings") Soda.MC_MONITORS = RelationField("mcMonitors") Soda.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/soda_check.py b/pyatlan_v9/model/assets/soda_check.py index 4546b1aec..9d20e2b12 100644 --- a/pyatlan_v9/model/assets/soda_check.py +++ b/pyatlan_v9/model/assets/soda_check.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -64,12 +65,12 @@ class SodaCheck(Asset): Instance of a Soda check in Atlan. """ - SODA_CHECK_ID: ClassVar[Any] = None - SODA_CHECK_EVALUATION_STATUS: ClassVar[Any] = None + SODA_ID: ClassVar[Any] = None + SODA_EVALUATION_STATUS: ClassVar[Any] = None SODA_CHECK_DEFINITION: ClassVar[Any] = None - SODA_CHECK_LAST_SCAN_AT: ClassVar[Any] = None - SODA_CHECK_INCIDENT_COUNT: ClassVar[Any] = None - SODA_CHECK_LINKED_ASSET_QUALIFIED_NAME: ClassVar[Any] = None + SODA_LAST_SCAN_AT: ClassVar[Any] = None + SODA_INCIDENT_COUNT: ClassVar[Any] = None + SODA_LINKED_ASSET_QUALIFIED_NAME: ClassVar[Any] = None DQ_IS_PART_OF_CONTRACT: ClassVar[Any] = None CATALOG_DATASET_GUID: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None @@ -86,6 +87,7 @@ class SodaCheck(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,22 +107,24 @@ class SodaCheck(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None - soda_check_id: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SodaCheck" + + soda_id: Union[str, None, UnsetType] = UNSET """Identifier of the check in Soda.""" - soda_check_evaluation_status: Union[str, None, UnsetType] = UNSET + soda_evaluation_status: Union[str, None, UnsetType] = UNSET """Status of the check in Soda.""" soda_check_definition: Union[str, None, UnsetType] = UNSET """Definition of the check in Soda.""" - soda_check_last_scan_at: Union[int, None, UnsetType] = UNSET + soda_last_scan_at: Union[int, None, UnsetType] = UNSET """""" - soda_check_incident_count: Union[int, None, UnsetType] = UNSET + soda_incident_count: Union[int, None, UnsetType] = UNSET """""" - soda_check_linked_asset_qualified_name: Union[str, None, UnsetType] = UNSET + soda_linked_asset_qualified_name: Union[str, None, UnsetType] = UNSET """QualifiedName of the asset associated with the check.""" dq_is_part_of_contract: Union[bool, None, UnsetType] = UNSET @@ -175,6 +179,11 @@ class SodaCheck(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -236,66 +245,6 @@ class SodaCheck(Asset): def __post_init__(self) -> None: self.type_name = "SodaCheck" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SodaCheck instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SodaCheck validation failed: {errors}") - - def minimize(self) -> "SodaCheck": - """ - Return a minimal copy of this SodaCheck with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SodaCheck with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SodaCheck instance with only the minimum required fields. - """ - self.validate() - return SodaCheck(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSodaCheck": - """ - Create a :class:`RelatedSodaCheck` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSodaCheck reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSodaCheck(guid=self.guid) - return RelatedSodaCheck(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -351,22 +300,22 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> SodaCheck: class SodaCheckAttributes(AssetAttributes): """SodaCheck-specific attributes for nested API format.""" - soda_check_id: Union[str, None, UnsetType] = UNSET + soda_id: Union[str, None, UnsetType] = UNSET """Identifier of the check in Soda.""" - soda_check_evaluation_status: Union[str, None, UnsetType] = UNSET + soda_evaluation_status: Union[str, None, UnsetType] = UNSET """Status of the check in Soda.""" soda_check_definition: Union[str, None, UnsetType] = UNSET """Definition of the check in Soda.""" - soda_check_last_scan_at: Union[int, None, UnsetType] = UNSET + soda_last_scan_at: Union[int, None, UnsetType] = UNSET """""" - soda_check_incident_count: Union[int, None, UnsetType] = UNSET + soda_incident_count: Union[int, None, UnsetType] = UNSET """""" - soda_check_linked_asset_qualified_name: Union[str, None, UnsetType] = UNSET + soda_linked_asset_qualified_name: Union[str, None, UnsetType] = UNSET """QualifiedName of the asset associated with the check.""" dq_is_part_of_contract: Union[bool, None, UnsetType] = UNSET @@ -425,6 +374,11 @@ class SodaCheckRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -517,6 +471,7 @@ class SodaCheckNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -541,14 +496,12 @@ class SodaCheckNested(AssetNested): def _populate_soda_check_attrs(attrs: SodaCheckAttributes, obj: SodaCheck) -> None: """Populate SodaCheck-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.soda_check_id = obj.soda_check_id - attrs.soda_check_evaluation_status = obj.soda_check_evaluation_status + attrs.soda_id = obj.soda_id + attrs.soda_evaluation_status = obj.soda_evaluation_status attrs.soda_check_definition = obj.soda_check_definition - attrs.soda_check_last_scan_at = obj.soda_check_last_scan_at - attrs.soda_check_incident_count = obj.soda_check_incident_count - attrs.soda_check_linked_asset_qualified_name = ( - obj.soda_check_linked_asset_qualified_name - ) + attrs.soda_last_scan_at = obj.soda_last_scan_at + attrs.soda_incident_count = obj.soda_incident_count + attrs.soda_linked_asset_qualified_name = obj.soda_linked_asset_qualified_name attrs.dq_is_part_of_contract = obj.dq_is_part_of_contract attrs.catalog_dataset_guid = obj.catalog_dataset_guid @@ -556,14 +509,12 @@ def _populate_soda_check_attrs(attrs: SodaCheckAttributes, obj: SodaCheck) -> No def _extract_soda_check_attrs(attrs: SodaCheckAttributes) -> dict: """Extract all SodaCheck attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["soda_check_id"] = attrs.soda_check_id - result["soda_check_evaluation_status"] = attrs.soda_check_evaluation_status + result["soda_id"] = attrs.soda_id + result["soda_evaluation_status"] = attrs.soda_evaluation_status result["soda_check_definition"] = attrs.soda_check_definition - result["soda_check_last_scan_at"] = attrs.soda_check_last_scan_at - result["soda_check_incident_count"] = attrs.soda_check_incident_count - result["soda_check_linked_asset_qualified_name"] = ( - attrs.soda_check_linked_asset_qualified_name - ) + result["soda_last_scan_at"] = attrs.soda_last_scan_at + result["soda_incident_count"] = attrs.soda_incident_count + result["soda_linked_asset_qualified_name"] = attrs.soda_linked_asset_qualified_name result["dq_is_part_of_contract"] = attrs.dq_is_part_of_contract result["catalog_dataset_guid"] = attrs.catalog_dataset_guid return result @@ -602,9 +553,6 @@ def _soda_check_to_nested(soda_check: SodaCheck) -> SodaCheckNested: is_incomplete=soda_check.is_incomplete, provenance_type=soda_check.provenance_type, home_id=soda_check.home_id, - depth=soda_check.depth, - immediate_upstream=soda_check.immediate_upstream, - immediate_downstream=soda_check.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -636,6 +584,7 @@ def _soda_check_from_nested(nested: SodaCheckNested) -> SodaCheck: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -644,9 +593,6 @@ def _soda_check_from_nested(nested: SodaCheckNested) -> SodaCheck: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_soda_check_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -674,21 +620,17 @@ def _soda_check_from_nested_bytes(data: bytes, serde: Serde) -> SodaCheck: RelationField, ) -SodaCheck.SODA_CHECK_ID = KeywordField("sodaCheckId", "sodaCheckId") -SodaCheck.SODA_CHECK_EVALUATION_STATUS = KeywordField( - "sodaCheckEvaluationStatus", "sodaCheckEvaluationStatus" +SodaCheck.SODA_ID = KeywordField("sodaId", "sodaId") +SodaCheck.SODA_EVALUATION_STATUS = KeywordField( + "sodaEvaluationStatus", "sodaEvaluationStatus" ) SodaCheck.SODA_CHECK_DEFINITION = KeywordField( "sodaCheckDefinition", "sodaCheckDefinition" ) -SodaCheck.SODA_CHECK_LAST_SCAN_AT = NumericField( - "sodaCheckLastScanAt", "sodaCheckLastScanAt" -) -SodaCheck.SODA_CHECK_INCIDENT_COUNT = NumericField( - "sodaCheckIncidentCount", "sodaCheckIncidentCount" -) -SodaCheck.SODA_CHECK_LINKED_ASSET_QUALIFIED_NAME = KeywordField( - "sodaCheckLinkedAssetQualifiedName", "sodaCheckLinkedAssetQualifiedName" +SodaCheck.SODA_LAST_SCAN_AT = NumericField("sodaLastScanAt", "sodaLastScanAt") +SodaCheck.SODA_INCIDENT_COUNT = NumericField("sodaIncidentCount", "sodaIncidentCount") +SodaCheck.SODA_LINKED_ASSET_QUALIFIED_NAME = KeywordField( + "sodaLinkedAssetQualifiedName", "sodaLinkedAssetQualifiedName" ) SodaCheck.DQ_IS_PART_OF_CONTRACT = BooleanField( "dqIsPartOfContract", "dqIsPartOfContract" @@ -710,6 +652,9 @@ def _soda_check_from_nested_bytes(data: bytes, serde: Serde) -> SodaCheck: SodaCheck.METRICS = RelationField("metrics") SodaCheck.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SodaCheck.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SodaCheck.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SodaCheck.MEANINGS = RelationField("meanings") SodaCheck.MC_MONITORS = RelationField("mcMonitors") SodaCheck.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/soda_related.py b/pyatlan_v9/model/assets/soda_related.py index 9c4431e95..43979f158 100644 --- a/pyatlan_v9/model/assets/soda_related.py +++ b/pyatlan_v9/model/assets/soda_related.py @@ -49,22 +49,22 @@ class RelatedSodaCheck(RelatedSoda): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SodaCheck" so it serializes correctly - soda_check_id: Union[str, None, UnsetType] = UNSET + soda_id: Union[str, None, UnsetType] = UNSET """Identifier of the check in Soda.""" - soda_check_evaluation_status: Union[str, None, UnsetType] = UNSET + soda_evaluation_status: Union[str, None, UnsetType] = UNSET """Status of the check in Soda.""" soda_check_definition: Union[str, None, UnsetType] = UNSET """Definition of the check in Soda.""" - soda_check_last_scan_at: Union[int, None, UnsetType] = UNSET + soda_last_scan_at: Union[int, None, UnsetType] = UNSET """""" - soda_check_incident_count: Union[int, None, UnsetType] = UNSET + soda_incident_count: Union[int, None, UnsetType] = UNSET """""" - soda_check_linked_asset_qualified_name: Union[str, None, UnsetType] = UNSET + soda_linked_asset_qualified_name: Union[str, None, UnsetType] = UNSET """QualifiedName of the asset associated with the check.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/source_tag.py b/pyatlan_v9/model/assets/source_tag.py index de4754316..e940c95e1 100644 --- a/pyatlan_v9/model/assets/source_tag.py +++ b/pyatlan_v9/model/assets/source_tag.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .tag_related import RelatedSourceTag # ============================================================================= # FLAT ASSET CLASS @@ -83,6 +83,7 @@ class SourceTag(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -100,6 +101,8 @@ class SourceTag(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SourceTag" + tag_custom_configuration: Union[str, None, UnsetType] = UNSET """Specifies custom configuration elements based on the system the tag is being imported from.""" @@ -164,6 +167,11 @@ class SourceTag(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -219,73 +227,6 @@ class SourceTag(Asset): def __post_init__(self) -> None: self.type_name = "SourceTag" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SourceTag instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.tag_id is UNSET: - errors.append("tag_id is required for creation") - if self.tag_allowed_values is UNSET: - errors.append("tag_allowed_values is required for creation") - if self.mapped_classification_name is UNSET: - errors.append("mapped_classification_name is required for creation") - if errors: - raise ValueError(f"SourceTag validation failed: {errors}") - - def minimize(self) -> "SourceTag": - """ - Return a minimal copy of this SourceTag with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SourceTag with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SourceTag instance with only the minimum required fields. - """ - self.validate() - return SourceTag(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSourceTag": - """ - Create a :class:`RelatedSourceTag` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSourceTag reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSourceTag(guid=self.guid) - return RelatedSourceTag(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -409,6 +350,11 @@ class SourceTagRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -495,6 +441,7 @@ class SourceTagNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -570,9 +517,6 @@ def _source_tag_to_nested(source_tag: SourceTag) -> SourceTagNested: is_incomplete=source_tag.is_incomplete, provenance_type=source_tag.provenance_type, home_id=source_tag.home_id, - depth=source_tag.depth, - immediate_upstream=source_tag.immediate_upstream, - immediate_downstream=source_tag.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -604,6 +548,7 @@ def _source_tag_from_nested(nested: SourceTagNested) -> SourceTag: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -612,9 +557,6 @@ def _source_tag_from_nested(nested: SourceTagNested) -> SourceTag: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_source_tag_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -669,6 +611,9 @@ def _source_tag_from_nested_bytes(data: bytes, serde: Serde) -> SourceTag: SourceTag.METRICS = RelationField("metrics") SourceTag.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SourceTag.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SourceTag.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SourceTag.MEANINGS = RelationField("meanings") SourceTag.MC_MONITORS = RelationField("mcMonitors") SourceTag.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/spark.py b/pyatlan_v9/model/assets/spark.py index 44cca91b4..bb0856174 100644 --- a/pyatlan_v9/model/assets/spark.py +++ b/pyatlan_v9/model/assets/spark.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -49,7 +50,7 @@ from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck -from .spark_related import RelatedSpark, RelatedSparkJob +from .spark_related import RelatedSparkJob # ============================================================================= # FLAT ASSET CLASS @@ -82,6 +83,7 @@ class Spark(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -100,6 +102,8 @@ class Spark(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None SPARK_ORCHESTRATED_BY_AIRFLOW_ASSETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Spark" + spark_run_version: Union[str, None, UnsetType] = UNSET """Spark Version for the Spark Job run eg. 3.4.1""" @@ -164,6 +168,11 @@ class Spark(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -224,66 +233,6 @@ class Spark(Asset): def __post_init__(self) -> None: self.type_name = "Spark" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Spark instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Spark validation failed: {errors}") - - def minimize(self) -> "Spark": - """ - Return a minimal copy of this Spark with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Spark with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Spark instance with only the minimum required fields. - """ - self.validate() - return Spark(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSpark": - """ - Create a :class:`RelatedSpark` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSpark reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSpark(guid=self.guid) - return RelatedSpark(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -407,6 +356,11 @@ class SparkRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -498,6 +452,7 @@ class SparkNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -574,9 +529,6 @@ def _spark_to_nested(spark: Spark) -> SparkNested: is_incomplete=spark.is_incomplete, provenance_type=spark.provenance_type, home_id=spark.home_id, - depth=spark.depth, - immediate_upstream=spark.immediate_upstream, - immediate_downstream=spark.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -606,6 +558,7 @@ def _spark_from_nested(nested: SparkNested) -> Spark: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -614,9 +567,6 @@ def _spark_from_nested(nested: SparkNested) -> Spark: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_spark_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -667,6 +617,9 @@ def _spark_from_nested_bytes(data: bytes, serde: Serde) -> Spark: Spark.METRICS = RelationField("metrics") Spark.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Spark.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Spark.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Spark.MEANINGS = RelationField("meanings") Spark.MC_MONITORS = RelationField("mcMonitors") Spark.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/spark_job.py b/pyatlan_v9/model/assets/spark_job.py index d356edf02..0a7ad23f2 100644 --- a/pyatlan_v9/model/assets/spark_job.py +++ b/pyatlan_v9/model/assets/spark_job.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -86,6 +87,7 @@ class SparkJob(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -107,6 +109,8 @@ class SparkJob(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None SPARK_ORCHESTRATED_BY_AIRFLOW_ASSETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SparkJob" + spark_app_name: Union[str, None, UnsetType] = UNSET """Name of the Spark app containing this Spark Job For eg. extract_raw_data""" @@ -177,6 +181,11 @@ class SparkJob(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -252,72 +261,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SparkJob instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.process is UNSET: - errors.append("process is required for creation") - if errors: - raise ValueError(f"SparkJob validation failed: {errors}") - - def minimize(self) -> "SparkJob": - """ - Return a minimal copy of this SparkJob with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SparkJob with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SparkJob instance with only the minimum required fields. - """ - self.validate() - return SparkJob(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSparkJob": - """ - Create a :class:`RelatedSparkJob` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSparkJob reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSparkJob(guid=self.guid) - return RelatedSparkJob(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -447,6 +390,11 @@ class SparkJobRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -547,6 +495,7 @@ class SparkJobNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -630,9 +579,6 @@ def _spark_job_to_nested(spark_job: SparkJob) -> SparkJobNested: is_incomplete=spark_job.is_incomplete, provenance_type=spark_job.provenance_type, home_id=spark_job.home_id, - depth=spark_job.depth, - immediate_upstream=spark_job.immediate_upstream, - immediate_downstream=spark_job.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -664,6 +610,7 @@ def _spark_job_from_nested(nested: SparkJobNested) -> SparkJob: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -672,9 +619,6 @@ def _spark_job_from_nested(nested: SparkJobNested) -> SparkJob: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_spark_job_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -730,6 +674,9 @@ def _spark_job_from_nested_bytes(data: bytes, serde: Serde) -> SparkJob: SparkJob.METRICS = RelationField("metrics") SparkJob.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SparkJob.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SparkJob.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SparkJob.MEANINGS = RelationField("meanings") SparkJob.MC_MONITORS = RelationField("mcMonitors") SparkJob.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sql.py b/pyatlan_v9/model/assets/sql.py index f06248224..e121664f9 100644 --- a/pyatlan_v9/model/assets/sql.py +++ b/pyatlan_v9/model/assets/sql.py @@ -47,6 +47,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -62,7 +63,6 @@ RelatedSqlInsightBusinessQuestion, RelatedSqlInsightJoin, ) -from .sql_related import RelatedSQL # ============================================================================= # FLAT ASSET CLASS @@ -120,6 +120,7 @@ class SQL(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -141,6 +142,8 @@ class SQL(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SQL" + query_count: Union[int, None, UnsetType] = UNSET """Number of times this asset has been queried.""" @@ -284,6 +287,11 @@ class SQL(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -359,66 +367,6 @@ class SQL(Asset): def __post_init__(self) -> None: self.type_name = "SQL" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SQL instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SQL validation failed: {errors}") - - def minimize(self) -> "SQL": - """ - Return a minimal copy of this SQL with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SQL with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SQL instance with only the minimum required fields. - """ - self.validate() - return SQL(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSQL": - """ - Create a :class:`RelatedSQL` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSQL reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSQL(guid=self.guid) - return RelatedSQL(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -621,6 +569,11 @@ class SQLRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -729,6 +682,7 @@ class SQLNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -860,9 +814,6 @@ def _sql_to_nested(sql: SQL) -> SQLNested: is_incomplete=sql.is_incomplete, provenance_type=sql.provenance_type, home_id=sql.home_id, - depth=sql.depth, - immediate_upstream=sql.immediate_upstream, - immediate_downstream=sql.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -892,6 +843,7 @@ def _sql_from_nested(nested: SQLNested) -> SQL: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -900,9 +852,6 @@ def _sql_from_nested(nested: SQLNested) -> SQL: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sql_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -992,6 +941,9 @@ def _sql_from_nested_bytes(data: bytes, serde: Serde) -> SQL: SQL.DBT_SOURCES = RelationField("dbtSources") SQL.SQL_DBT_SOURCES = RelationField("sqlDBTSources") SQL.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +SQL.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SQL.MEANINGS = RelationField("meanings") SQL.MC_MONITORS = RelationField("mcMonitors") SQL.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sql_insight.py b/pyatlan_v9/model/assets/sql_insight.py index cf9d907aa..148af0f6e 100644 --- a/pyatlan_v9/model/assets/sql_insight.py +++ b/pyatlan_v9/model/assets/sql_insight.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .sql_insight_related import RelatedSqlInsight # ============================================================================= # FLAT ASSET CLASS @@ -78,6 +78,7 @@ class SqlInsight(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -95,6 +96,8 @@ class SqlInsight(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SqlInsight" + catalog_dataset_guid: Union[str, None, UnsetType] = UNSET """Unique identifier of the dataset this asset belongs to.""" @@ -144,6 +147,11 @@ class SqlInsight(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -199,66 +207,6 @@ class SqlInsight(Asset): def __post_init__(self) -> None: self.type_name = "SqlInsight" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SqlInsight instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SqlInsight validation failed: {errors}") - - def minimize(self) -> "SqlInsight": - """ - Return a minimal copy of this SqlInsight with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SqlInsight with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SqlInsight instance with only the minimum required fields. - """ - self.validate() - return SqlInsight(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSqlInsight": - """ - Create a :class:`RelatedSqlInsight` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSqlInsight reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSqlInsight(guid=self.guid) - return RelatedSqlInsight(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -367,6 +315,11 @@ class SqlInsightRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -453,6 +406,7 @@ class SqlInsightNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -518,9 +472,6 @@ def _sql_insight_to_nested(sql_insight: SqlInsight) -> SqlInsightNested: is_incomplete=sql_insight.is_incomplete, provenance_type=sql_insight.provenance_type, home_id=sql_insight.home_id, - depth=sql_insight.depth, - immediate_upstream=sql_insight.immediate_upstream, - immediate_downstream=sql_insight.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -552,6 +503,7 @@ def _sql_insight_from_nested(nested: SqlInsightNested) -> SqlInsight: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -560,9 +512,6 @@ def _sql_insight_from_nested(nested: SqlInsightNested) -> SqlInsight: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sql_insight_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -602,6 +551,9 @@ def _sql_insight_from_nested_bytes(data: bytes, serde: Serde) -> SqlInsight: SqlInsight.METRICS = RelationField("metrics") SqlInsight.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SqlInsight.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SqlInsight.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SqlInsight.MEANINGS = RelationField("meanings") SqlInsight.MC_MONITORS = RelationField("mcMonitors") SqlInsight.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sql_insight_business_question.py b/pyatlan_v9/model/assets/sql_insight_business_question.py index 1f3515049..ddf178fa1 100644 --- a/pyatlan_v9/model/assets/sql_insight_business_question.py +++ b/pyatlan_v9/model/assets/sql_insight_business_question.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -52,7 +53,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .sql_insight_related import RelatedSqlInsightBusinessQuestion from .sql_related import RelatedSQL # ============================================================================= @@ -66,11 +66,11 @@ class SqlInsightBusinessQuestion(Asset): A generalized business question pattern observed from real query traffic. """ - SQL_INSIGHT_BUSINESS_QUESTION_TEXT: ClassVar[Any] = None - SQL_INSIGHT_BUSINESS_QUESTION_CANONICAL_SQL: ClassVar[Any] = None - SQL_INSIGHT_BUSINESS_QUESTION_QUERY_COUNT: ClassVar[Any] = None - SQL_INSIGHT_BUSINESS_QUESTION_UNIQUE_USERS: ClassVar[Any] = None - SQL_INSIGHT_BUSINESS_QUESTION_LAST_SEEN_AT: ClassVar[Any] = None + SQL_INSIGHT_TEXT: ClassVar[Any] = None + SQL_INSIGHT_CANONICAL_SQL: ClassVar[Any] = None + SQL_INSIGHT_QUERY_COUNT: ClassVar[Any] = None + SQL_INSIGHT_UNIQUE_USERS: ClassVar[Any] = None + SQL_INSIGHT_LAST_SEEN_AT: ClassVar[Any] = None CATALOG_DATASET_GUID: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None @@ -86,6 +86,7 @@ class SqlInsightBusinessQuestion(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -104,21 +105,23 @@ class SqlInsightBusinessQuestion(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None SQL_INSIGHT_DATASET: ClassVar[Any] = None - sql_insight_business_question_text: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SqlInsightBusinessQuestion" + + sql_insight_text: Union[str, None, UnsetType] = UNSET """Natural language text of the business question.""" - sql_insight_business_question_canonical_sql: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="sqlInsightBusinessQuestionCanonicalSQL") + sql_insight_canonical_sql: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="sqlInsightCanonicalSQL" ) """Canonical SQL query that answers this business question.""" - sql_insight_business_question_query_count: Union[int, None, UnsetType] = UNSET + sql_insight_query_count: Union[int, None, UnsetType] = UNSET """Number of queries associated with this business question.""" - sql_insight_business_question_unique_users: Union[int, None, UnsetType] = UNSET + sql_insight_unique_users: Union[int, None, UnsetType] = UNSET """Number of unique users who have asked this question.""" - sql_insight_business_question_last_seen_at: Union[int, None, UnsetType] = UNSET + sql_insight_last_seen_at: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this question was last observed, in milliseconds.""" catalog_dataset_guid: Union[str, None, UnsetType] = UNSET @@ -170,6 +173,11 @@ class SqlInsightBusinessQuestion(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -234,72 +242,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SqlInsightBusinessQuestion instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"SqlInsightBusinessQuestion validation failed: {errors}") - - def minimize(self) -> "SqlInsightBusinessQuestion": - """ - Return a minimal copy of this SqlInsightBusinessQuestion with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SqlInsightBusinessQuestion with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SqlInsightBusinessQuestion instance with only the minimum required fields. - """ - self.validate() - return SqlInsightBusinessQuestion( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedSqlInsightBusinessQuestion": - """ - Create a :class:`RelatedSqlInsightBusinessQuestion` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSqlInsightBusinessQuestion reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSqlInsightBusinessQuestion(guid=self.guid) - return RelatedSqlInsightBusinessQuestion(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -357,21 +299,21 @@ def from_json( class SqlInsightBusinessQuestionAttributes(AssetAttributes): """SqlInsightBusinessQuestion-specific attributes for nested API format.""" - sql_insight_business_question_text: Union[str, None, UnsetType] = UNSET + sql_insight_text: Union[str, None, UnsetType] = UNSET """Natural language text of the business question.""" - sql_insight_business_question_canonical_sql: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="sqlInsightBusinessQuestionCanonicalSQL") + sql_insight_canonical_sql: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="sqlInsightCanonicalSQL" ) """Canonical SQL query that answers this business question.""" - sql_insight_business_question_query_count: Union[int, None, UnsetType] = UNSET + sql_insight_query_count: Union[int, None, UnsetType] = UNSET """Number of queries associated with this business question.""" - sql_insight_business_question_unique_users: Union[int, None, UnsetType] = UNSET + sql_insight_unique_users: Union[int, None, UnsetType] = UNSET """Number of unique users who have asked this question.""" - sql_insight_business_question_last_seen_at: Union[int, None, UnsetType] = UNSET + sql_insight_last_seen_at: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this question was last observed, in milliseconds.""" catalog_dataset_guid: Union[str, None, UnsetType] = UNSET @@ -427,6 +369,11 @@ class SqlInsightBusinessQuestionRelationshipAttributes(AssetRelationshipAttribut ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -518,6 +465,7 @@ class SqlInsightBusinessQuestionNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -543,19 +491,11 @@ def _populate_sql_insight_business_question_attrs( ) -> None: """Populate SqlInsightBusinessQuestion-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sql_insight_business_question_text = obj.sql_insight_business_question_text - attrs.sql_insight_business_question_canonical_sql = ( - obj.sql_insight_business_question_canonical_sql - ) - attrs.sql_insight_business_question_query_count = ( - obj.sql_insight_business_question_query_count - ) - attrs.sql_insight_business_question_unique_users = ( - obj.sql_insight_business_question_unique_users - ) - attrs.sql_insight_business_question_last_seen_at = ( - obj.sql_insight_business_question_last_seen_at - ) + attrs.sql_insight_text = obj.sql_insight_text + attrs.sql_insight_canonical_sql = obj.sql_insight_canonical_sql + attrs.sql_insight_query_count = obj.sql_insight_query_count + attrs.sql_insight_unique_users = obj.sql_insight_unique_users + attrs.sql_insight_last_seen_at = obj.sql_insight_last_seen_at attrs.catalog_dataset_guid = obj.catalog_dataset_guid @@ -564,21 +504,11 @@ def _extract_sql_insight_business_question_attrs( ) -> dict: """Extract all SqlInsightBusinessQuestion attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sql_insight_business_question_text"] = ( - attrs.sql_insight_business_question_text - ) - result["sql_insight_business_question_canonical_sql"] = ( - attrs.sql_insight_business_question_canonical_sql - ) - result["sql_insight_business_question_query_count"] = ( - attrs.sql_insight_business_question_query_count - ) - result["sql_insight_business_question_unique_users"] = ( - attrs.sql_insight_business_question_unique_users - ) - result["sql_insight_business_question_last_seen_at"] = ( - attrs.sql_insight_business_question_last_seen_at - ) + result["sql_insight_text"] = attrs.sql_insight_text + result["sql_insight_canonical_sql"] = attrs.sql_insight_canonical_sql + result["sql_insight_query_count"] = attrs.sql_insight_query_count + result["sql_insight_unique_users"] = attrs.sql_insight_unique_users + result["sql_insight_last_seen_at"] = attrs.sql_insight_last_seen_at result["catalog_dataset_guid"] = attrs.catalog_dataset_guid return result @@ -620,9 +550,6 @@ def _sql_insight_business_question_to_nested( is_incomplete=sql_insight_business_question.is_incomplete, provenance_type=sql_insight_business_question.provenance_type, home_id=sql_insight_business_question.home_id, - depth=sql_insight_business_question.depth, - immediate_upstream=sql_insight_business_question.immediate_upstream, - immediate_downstream=sql_insight_business_question.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -658,6 +585,7 @@ def _sql_insight_business_question_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -666,9 +594,6 @@ def _sql_insight_business_question_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sql_insight_business_question_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -701,20 +626,20 @@ def _sql_insight_business_question_from_nested_bytes( RelationField, ) -SqlInsightBusinessQuestion.SQL_INSIGHT_BUSINESS_QUESTION_TEXT = KeywordField( - "sqlInsightBusinessQuestionText", "sqlInsightBusinessQuestionText" +SqlInsightBusinessQuestion.SQL_INSIGHT_TEXT = KeywordField( + "sqlInsightText", "sqlInsightText" ) -SqlInsightBusinessQuestion.SQL_INSIGHT_BUSINESS_QUESTION_CANONICAL_SQL = KeywordField( - "sqlInsightBusinessQuestionCanonicalSQL", "sqlInsightBusinessQuestionCanonicalSQL" +SqlInsightBusinessQuestion.SQL_INSIGHT_CANONICAL_SQL = KeywordField( + "sqlInsightCanonicalSQL", "sqlInsightCanonicalSQL" ) -SqlInsightBusinessQuestion.SQL_INSIGHT_BUSINESS_QUESTION_QUERY_COUNT = NumericField( - "sqlInsightBusinessQuestionQueryCount", "sqlInsightBusinessQuestionQueryCount" +SqlInsightBusinessQuestion.SQL_INSIGHT_QUERY_COUNT = NumericField( + "sqlInsightQueryCount", "sqlInsightQueryCount" ) -SqlInsightBusinessQuestion.SQL_INSIGHT_BUSINESS_QUESTION_UNIQUE_USERS = NumericField( - "sqlInsightBusinessQuestionUniqueUsers", "sqlInsightBusinessQuestionUniqueUsers" +SqlInsightBusinessQuestion.SQL_INSIGHT_UNIQUE_USERS = NumericField( + "sqlInsightUniqueUsers", "sqlInsightUniqueUsers" ) -SqlInsightBusinessQuestion.SQL_INSIGHT_BUSINESS_QUESTION_LAST_SEEN_AT = NumericField( - "sqlInsightBusinessQuestionLastSeenAt", "sqlInsightBusinessQuestionLastSeenAt" +SqlInsightBusinessQuestion.SQL_INSIGHT_LAST_SEEN_AT = NumericField( + "sqlInsightLastSeenAt", "sqlInsightLastSeenAt" ) SqlInsightBusinessQuestion.CATALOG_DATASET_GUID = KeywordField( "catalogDatasetGuid", "catalogDatasetGuid" @@ -747,6 +672,9 @@ def _sql_insight_business_question_from_nested_bytes( SqlInsightBusinessQuestion.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +SqlInsightBusinessQuestion.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SqlInsightBusinessQuestion.MEANINGS = RelationField("meanings") SqlInsightBusinessQuestion.MC_MONITORS = RelationField("mcMonitors") SqlInsightBusinessQuestion.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sql_insight_filter.py b/pyatlan_v9/model/assets/sql_insight_filter.py index 727c8e457..04efd2a5a 100644 --- a/pyatlan_v9/model/assets/sql_insight_filter.py +++ b/pyatlan_v9/model/assets/sql_insight_filter.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -52,7 +53,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .sql_insight_related import RelatedSqlInsightFilter from .sql_related import RelatedColumn # ============================================================================= @@ -66,16 +66,16 @@ class SqlInsightFilter(Asset): A column-level filtering observation from real query traffic. """ - SQL_INSIGHT_FILTER_DATASET_QUALIFIED_NAME: ClassVar[Any] = None - SQL_INSIGHT_FILTER_COLUMN_QUALIFIED_NAME: ClassVar[Any] = None - SQL_INSIGHT_FILTER_COMMON_VALUES: ClassVar[Any] = None - SQL_INSIGHT_FILTER_OPERATOR: ClassVar[Any] = None - SQL_INSIGHT_FILTER_PREDICATE_SQL: ClassVar[Any] = None - SQL_INSIGHT_FILTER_WHEN_TO_USE: ClassVar[Any] = None - SQL_INSIGHT_FILTER_QUERY_COUNT: ClassVar[Any] = None - SQL_INSIGHT_FILTER_UNIQUE_USERS: ClassVar[Any] = None - SQL_INSIGHT_FILTER_LAST_SEEN_AT: ClassVar[Any] = None - SQL_INSIGHT_FILTER_EXAMPLE_QUERIES: ClassVar[Any] = None + SQL_INSIGHT_DATASET_QUALIFIED_NAME: ClassVar[Any] = None + SQL_INSIGHT_COLUMN_QUALIFIED_NAME: ClassVar[Any] = None + SQL_INSIGHT_COMMON_VALUES: ClassVar[Any] = None + SQL_INSIGHT_OPERATOR: ClassVar[Any] = None + SQL_INSIGHT_PREDICATE_SQL: ClassVar[Any] = None + SQL_INSIGHT_WHEN_TO_USE: ClassVar[Any] = None + SQL_INSIGHT_QUERY_COUNT: ClassVar[Any] = None + SQL_INSIGHT_UNIQUE_USERS: ClassVar[Any] = None + SQL_INSIGHT_LAST_SEEN_AT: ClassVar[Any] = None + SQL_INSIGHT_EXAMPLE_QUERIES: ClassVar[Any] = None CATALOG_DATASET_GUID: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None @@ -91,6 +91,7 @@ class SqlInsightFilter(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -109,38 +110,38 @@ class SqlInsightFilter(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None SQL_INSIGHT_COLUMN: ClassVar[Any] = None - sql_insight_filter_dataset_qualified_name: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SqlInsightFilter" + + sql_insight_dataset_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the dataset containing the filtered column.""" - sql_insight_filter_column_qualified_name: Union[str, None, UnsetType] = UNSET + sql_insight_column_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the filtered column.""" - sql_insight_filter_common_values: Union[List[str], None, UnsetType] = UNSET + sql_insight_common_values: Union[List[str], None, UnsetType] = UNSET """Common values observed for this filter.""" - sql_insight_filter_operator: Union[str, None, UnsetType] = UNSET + sql_insight_operator: Union[str, None, UnsetType] = UNSET """SQL operator observed on this column, such as =, !=, IN, LIKE.""" - sql_insight_filter_predicate_sql: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="sqlInsightFilterPredicateSQL" + sql_insight_predicate_sql: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="sqlInsightPredicateSQL" ) """SQL predicate expression for this filter pattern.""" - sql_insight_filter_when_to_use: Union[str, None, UnsetType] = UNSET + sql_insight_when_to_use: Union[str, None, UnsetType] = UNSET """Guidance on when this filter pattern should be used.""" - sql_insight_filter_query_count: Union[int, None, UnsetType] = UNSET + sql_insight_query_count: Union[int, None, UnsetType] = UNSET """Number of queries that use this filter pattern.""" - sql_insight_filter_unique_users: Union[int, None, UnsetType] = UNSET + sql_insight_unique_users: Union[int, None, UnsetType] = UNSET """Number of unique users who have used this filter pattern.""" - sql_insight_filter_last_seen_at: Union[int, None, UnsetType] = UNSET + sql_insight_last_seen_at: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this filter pattern was last observed, in milliseconds.""" - sql_insight_filter_example_queries: Union[List[Dict[str, Any]], None, UnsetType] = ( - UNSET - ) + sql_insight_example_queries: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Example SQL queries that demonstrate this filter pattern, with usage details.""" catalog_dataset_guid: Union[str, None, UnsetType] = UNSET @@ -192,6 +193,11 @@ class SqlInsightFilter(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -256,70 +262,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SqlInsightFilter instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"SqlInsightFilter validation failed: {errors}") - - def minimize(self) -> "SqlInsightFilter": - """ - Return a minimal copy of this SqlInsightFilter with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SqlInsightFilter with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SqlInsightFilter instance with only the minimum required fields. - """ - self.validate() - return SqlInsightFilter(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSqlInsightFilter": - """ - Create a :class:`RelatedSqlInsightFilter` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSqlInsightFilter reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSqlInsightFilter(guid=self.guid) - return RelatedSqlInsightFilter(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -377,38 +319,36 @@ def from_json( class SqlInsightFilterAttributes(AssetAttributes): """SqlInsightFilter-specific attributes for nested API format.""" - sql_insight_filter_dataset_qualified_name: Union[str, None, UnsetType] = UNSET + sql_insight_dataset_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the dataset containing the filtered column.""" - sql_insight_filter_column_qualified_name: Union[str, None, UnsetType] = UNSET + sql_insight_column_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the filtered column.""" - sql_insight_filter_common_values: Union[List[str], None, UnsetType] = UNSET + sql_insight_common_values: Union[List[str], None, UnsetType] = UNSET """Common values observed for this filter.""" - sql_insight_filter_operator: Union[str, None, UnsetType] = UNSET + sql_insight_operator: Union[str, None, UnsetType] = UNSET """SQL operator observed on this column, such as =, !=, IN, LIKE.""" - sql_insight_filter_predicate_sql: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="sqlInsightFilterPredicateSQL" + sql_insight_predicate_sql: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="sqlInsightPredicateSQL" ) """SQL predicate expression for this filter pattern.""" - sql_insight_filter_when_to_use: Union[str, None, UnsetType] = UNSET + sql_insight_when_to_use: Union[str, None, UnsetType] = UNSET """Guidance on when this filter pattern should be used.""" - sql_insight_filter_query_count: Union[int, None, UnsetType] = UNSET + sql_insight_query_count: Union[int, None, UnsetType] = UNSET """Number of queries that use this filter pattern.""" - sql_insight_filter_unique_users: Union[int, None, UnsetType] = UNSET + sql_insight_unique_users: Union[int, None, UnsetType] = UNSET """Number of unique users who have used this filter pattern.""" - sql_insight_filter_last_seen_at: Union[int, None, UnsetType] = UNSET + sql_insight_last_seen_at: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this filter pattern was last observed, in milliseconds.""" - sql_insight_filter_example_queries: Union[List[Dict[str, Any]], None, UnsetType] = ( - UNSET - ) + sql_insight_example_queries: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Example SQL queries that demonstrate this filter pattern, with usage details.""" catalog_dataset_guid: Union[str, None, UnsetType] = UNSET @@ -464,6 +404,11 @@ class SqlInsightFilterRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -555,6 +500,7 @@ class SqlInsightFilterNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -580,42 +526,36 @@ def _populate_sql_insight_filter_attrs( ) -> None: """Populate SqlInsightFilter-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sql_insight_filter_dataset_qualified_name = ( - obj.sql_insight_filter_dataset_qualified_name - ) - attrs.sql_insight_filter_column_qualified_name = ( - obj.sql_insight_filter_column_qualified_name - ) - attrs.sql_insight_filter_common_values = obj.sql_insight_filter_common_values - attrs.sql_insight_filter_operator = obj.sql_insight_filter_operator - attrs.sql_insight_filter_predicate_sql = obj.sql_insight_filter_predicate_sql - attrs.sql_insight_filter_when_to_use = obj.sql_insight_filter_when_to_use - attrs.sql_insight_filter_query_count = obj.sql_insight_filter_query_count - attrs.sql_insight_filter_unique_users = obj.sql_insight_filter_unique_users - attrs.sql_insight_filter_last_seen_at = obj.sql_insight_filter_last_seen_at - attrs.sql_insight_filter_example_queries = obj.sql_insight_filter_example_queries + attrs.sql_insight_dataset_qualified_name = obj.sql_insight_dataset_qualified_name + attrs.sql_insight_column_qualified_name = obj.sql_insight_column_qualified_name + attrs.sql_insight_common_values = obj.sql_insight_common_values + attrs.sql_insight_operator = obj.sql_insight_operator + attrs.sql_insight_predicate_sql = obj.sql_insight_predicate_sql + attrs.sql_insight_when_to_use = obj.sql_insight_when_to_use + attrs.sql_insight_query_count = obj.sql_insight_query_count + attrs.sql_insight_unique_users = obj.sql_insight_unique_users + attrs.sql_insight_last_seen_at = obj.sql_insight_last_seen_at + attrs.sql_insight_example_queries = obj.sql_insight_example_queries attrs.catalog_dataset_guid = obj.catalog_dataset_guid def _extract_sql_insight_filter_attrs(attrs: SqlInsightFilterAttributes) -> dict: """Extract all SqlInsightFilter attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sql_insight_filter_dataset_qualified_name"] = ( - attrs.sql_insight_filter_dataset_qualified_name + result["sql_insight_dataset_qualified_name"] = ( + attrs.sql_insight_dataset_qualified_name ) - result["sql_insight_filter_column_qualified_name"] = ( - attrs.sql_insight_filter_column_qualified_name - ) - result["sql_insight_filter_common_values"] = attrs.sql_insight_filter_common_values - result["sql_insight_filter_operator"] = attrs.sql_insight_filter_operator - result["sql_insight_filter_predicate_sql"] = attrs.sql_insight_filter_predicate_sql - result["sql_insight_filter_when_to_use"] = attrs.sql_insight_filter_when_to_use - result["sql_insight_filter_query_count"] = attrs.sql_insight_filter_query_count - result["sql_insight_filter_unique_users"] = attrs.sql_insight_filter_unique_users - result["sql_insight_filter_last_seen_at"] = attrs.sql_insight_filter_last_seen_at - result["sql_insight_filter_example_queries"] = ( - attrs.sql_insight_filter_example_queries + result["sql_insight_column_qualified_name"] = ( + attrs.sql_insight_column_qualified_name ) + result["sql_insight_common_values"] = attrs.sql_insight_common_values + result["sql_insight_operator"] = attrs.sql_insight_operator + result["sql_insight_predicate_sql"] = attrs.sql_insight_predicate_sql + result["sql_insight_when_to_use"] = attrs.sql_insight_when_to_use + result["sql_insight_query_count"] = attrs.sql_insight_query_count + result["sql_insight_unique_users"] = attrs.sql_insight_unique_users + result["sql_insight_last_seen_at"] = attrs.sql_insight_last_seen_at + result["sql_insight_example_queries"] = attrs.sql_insight_example_queries result["catalog_dataset_guid"] = attrs.catalog_dataset_guid return result @@ -657,9 +597,6 @@ def _sql_insight_filter_to_nested( is_incomplete=sql_insight_filter.is_incomplete, provenance_type=sql_insight_filter.provenance_type, home_id=sql_insight_filter.home_id, - depth=sql_insight_filter.depth, - immediate_upstream=sql_insight_filter.immediate_upstream, - immediate_downstream=sql_insight_filter.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -693,6 +630,7 @@ def _sql_insight_filter_from_nested(nested: SqlInsightFilterNested) -> SqlInsigh updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -701,9 +639,6 @@ def _sql_insight_filter_from_nested(nested: SqlInsightFilterNested) -> SqlInsigh is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sql_insight_filter_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -734,35 +669,35 @@ def _sql_insight_filter_from_nested_bytes( RelationField, ) -SqlInsightFilter.SQL_INSIGHT_FILTER_DATASET_QUALIFIED_NAME = KeywordField( - "sqlInsightFilterDatasetQualifiedName", "sqlInsightFilterDatasetQualifiedName" +SqlInsightFilter.SQL_INSIGHT_DATASET_QUALIFIED_NAME = KeywordField( + "sqlInsightDatasetQualifiedName", "sqlInsightDatasetQualifiedName" ) -SqlInsightFilter.SQL_INSIGHT_FILTER_COLUMN_QUALIFIED_NAME = KeywordField( - "sqlInsightFilterColumnQualifiedName", "sqlInsightFilterColumnQualifiedName" +SqlInsightFilter.SQL_INSIGHT_COLUMN_QUALIFIED_NAME = KeywordField( + "sqlInsightColumnQualifiedName", "sqlInsightColumnQualifiedName" ) -SqlInsightFilter.SQL_INSIGHT_FILTER_COMMON_VALUES = KeywordField( - "sqlInsightFilterCommonValues", "sqlInsightFilterCommonValues" +SqlInsightFilter.SQL_INSIGHT_COMMON_VALUES = KeywordField( + "sqlInsightCommonValues", "sqlInsightCommonValues" ) -SqlInsightFilter.SQL_INSIGHT_FILTER_OPERATOR = KeywordField( - "sqlInsightFilterOperator", "sqlInsightFilterOperator" +SqlInsightFilter.SQL_INSIGHT_OPERATOR = KeywordField( + "sqlInsightOperator", "sqlInsightOperator" ) -SqlInsightFilter.SQL_INSIGHT_FILTER_PREDICATE_SQL = KeywordField( - "sqlInsightFilterPredicateSQL", "sqlInsightFilterPredicateSQL" +SqlInsightFilter.SQL_INSIGHT_PREDICATE_SQL = KeywordField( + "sqlInsightPredicateSQL", "sqlInsightPredicateSQL" ) -SqlInsightFilter.SQL_INSIGHT_FILTER_WHEN_TO_USE = KeywordField( - "sqlInsightFilterWhenToUse", "sqlInsightFilterWhenToUse" +SqlInsightFilter.SQL_INSIGHT_WHEN_TO_USE = KeywordField( + "sqlInsightWhenToUse", "sqlInsightWhenToUse" ) -SqlInsightFilter.SQL_INSIGHT_FILTER_QUERY_COUNT = NumericField( - "sqlInsightFilterQueryCount", "sqlInsightFilterQueryCount" +SqlInsightFilter.SQL_INSIGHT_QUERY_COUNT = NumericField( + "sqlInsightQueryCount", "sqlInsightQueryCount" ) -SqlInsightFilter.SQL_INSIGHT_FILTER_UNIQUE_USERS = NumericField( - "sqlInsightFilterUniqueUsers", "sqlInsightFilterUniqueUsers" +SqlInsightFilter.SQL_INSIGHT_UNIQUE_USERS = NumericField( + "sqlInsightUniqueUsers", "sqlInsightUniqueUsers" ) -SqlInsightFilter.SQL_INSIGHT_FILTER_LAST_SEEN_AT = NumericField( - "sqlInsightFilterLastSeenAt", "sqlInsightFilterLastSeenAt" +SqlInsightFilter.SQL_INSIGHT_LAST_SEEN_AT = NumericField( + "sqlInsightLastSeenAt", "sqlInsightLastSeenAt" ) -SqlInsightFilter.SQL_INSIGHT_FILTER_EXAMPLE_QUERIES = KeywordField( - "sqlInsightFilterExampleQueries", "sqlInsightFilterExampleQueries" +SqlInsightFilter.SQL_INSIGHT_EXAMPLE_QUERIES = KeywordField( + "sqlInsightExampleQueries", "sqlInsightExampleQueries" ) SqlInsightFilter.CATALOG_DATASET_GUID = KeywordField( "catalogDatasetGuid", "catalogDatasetGuid" @@ -785,6 +720,9 @@ def _sql_insight_filter_from_nested_bytes( SqlInsightFilter.METRICS = RelationField("metrics") SqlInsightFilter.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SqlInsightFilter.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SqlInsightFilter.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SqlInsightFilter.MEANINGS = RelationField("meanings") SqlInsightFilter.MC_MONITORS = RelationField("mcMonitors") SqlInsightFilter.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sql_insight_join.py b/pyatlan_v9/model/assets/sql_insight_join.py index f113735e7..71a18cc0b 100644 --- a/pyatlan_v9/model/assets/sql_insight_join.py +++ b/pyatlan_v9/model/assets/sql_insight_join.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -51,7 +52,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .sql_insight_related import RelatedSqlInsightJoin from .sql_related import RelatedSQL # ============================================================================= @@ -65,16 +65,16 @@ class SqlInsightJoin(Asset): A directed join pattern observed between two SQL datasets from real query traffic. """ - SQL_INSIGHT_JOIN_SOURCE_DATASET_QUALIFIED_NAME: ClassVar[Any] = None - SQL_INSIGHT_JOIN_JOINED_DATASET_QUALIFIED_NAME: ClassVar[Any] = None - SQL_INSIGHT_JOIN_TYPE: ClassVar[Any] = None - SQL_INSIGHT_JOIN_CARDINALITY: ClassVar[Any] = None - SQL_INSIGHT_JOIN_WHEN_TO_USE: ClassVar[Any] = None - SQL_INSIGHT_JOIN_COLUMN_PAIRS: ClassVar[Any] = None - SQL_INSIGHT_JOIN_QUERY_COUNT: ClassVar[Any] = None - SQL_INSIGHT_JOIN_UNIQUE_USERS: ClassVar[Any] = None - SQL_INSIGHT_JOIN_LAST_SEEN_AT: ClassVar[Any] = None - SQL_INSIGHT_JOIN_EXAMPLE_QUERIES: ClassVar[Any] = None + SQL_INSIGHT_SOURCE_DATASET_QUALIFIED_NAME: ClassVar[Any] = None + SQL_INSIGHT_JOINED_DATASET_QUALIFIED_NAME: ClassVar[Any] = None + SQL_INSIGHT_TYPE: ClassVar[Any] = None + SQL_INSIGHT_CARDINALITY: ClassVar[Any] = None + SQL_INSIGHT_WHEN_TO_USE: ClassVar[Any] = None + SQL_INSIGHT_COLUMN_PAIRS: ClassVar[Any] = None + SQL_INSIGHT_QUERY_COUNT: ClassVar[Any] = None + SQL_INSIGHT_UNIQUE_USERS: ClassVar[Any] = None + SQL_INSIGHT_LAST_SEEN_AT: ClassVar[Any] = None + SQL_INSIGHT_EXAMPLE_QUERIES: ClassVar[Any] = None CATALOG_DATASET_GUID: ClassVar[Any] = None INPUT_TO_AIRFLOW_TASKS: ClassVar[Any] = None OUTPUT_FROM_AIRFLOW_TASKS: ClassVar[Any] = None @@ -90,6 +90,7 @@ class SqlInsightJoin(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -109,36 +110,36 @@ class SqlInsightJoin(Asset): SQL_INSIGHT_SOURCE_DATASET: ClassVar[Any] = None SQL_INSIGHT_JOINED_DATASET: ClassVar[Any] = None - sql_insight_join_source_dataset_qualified_name: Union[str, None, UnsetType] = UNSET + type_name: Union[str, UnsetType] = "SqlInsightJoin" + + sql_insight_source_dataset_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the source dataset in this join pattern.""" - sql_insight_join_joined_dataset_qualified_name: Union[str, None, UnsetType] = UNSET + sql_insight_joined_dataset_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the joined dataset in this join pattern.""" - sql_insight_join_type: Union[str, None, UnsetType] = UNSET + sql_insight_type: Union[str, None, UnsetType] = UNSET """Type of SQL join observed in this pattern.""" - sql_insight_join_cardinality: Union[str, None, UnsetType] = UNSET + sql_insight_cardinality: Union[str, None, UnsetType] = UNSET """Observed cardinality of the join relationship.""" - sql_insight_join_when_to_use: Union[str, None, UnsetType] = UNSET + sql_insight_when_to_use: Union[str, None, UnsetType] = UNSET """Guidance on when this join pattern should be used.""" - sql_insight_join_column_pairs: Union[List[Dict[str, Any]], None, UnsetType] = UNSET + sql_insight_column_pairs: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Column mappings in this join, pairing source columns to joined columns.""" - sql_insight_join_query_count: Union[int, None, UnsetType] = UNSET + sql_insight_query_count: Union[int, None, UnsetType] = UNSET """Number of queries that use this join pattern.""" - sql_insight_join_unique_users: Union[int, None, UnsetType] = UNSET + sql_insight_unique_users: Union[int, None, UnsetType] = UNSET """Number of unique users who have used this join pattern.""" - sql_insight_join_last_seen_at: Union[int, None, UnsetType] = UNSET + sql_insight_last_seen_at: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this join pattern was last observed, in milliseconds.""" - sql_insight_join_example_queries: Union[List[Dict[str, Any]], None, UnsetType] = ( - UNSET - ) + sql_insight_example_queries: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Example SQL queries that demonstrate this join pattern, with usage details.""" catalog_dataset_guid: Union[str, None, UnsetType] = UNSET @@ -190,6 +191,11 @@ class SqlInsightJoin(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -257,70 +263,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SqlInsightJoin instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"SqlInsightJoin validation failed: {errors}") - - def minimize(self) -> "SqlInsightJoin": - """ - Return a minimal copy of this SqlInsightJoin with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SqlInsightJoin with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SqlInsightJoin instance with only the minimum required fields. - """ - self.validate() - return SqlInsightJoin(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSqlInsightJoin": - """ - Create a :class:`RelatedSqlInsightJoin` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSqlInsightJoin reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSqlInsightJoin(guid=self.guid) - return RelatedSqlInsightJoin(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -376,36 +318,34 @@ def from_json(json_data: str | bytes, serde: Serde | None = None) -> SqlInsightJ class SqlInsightJoinAttributes(AssetAttributes): """SqlInsightJoin-specific attributes for nested API format.""" - sql_insight_join_source_dataset_qualified_name: Union[str, None, UnsetType] = UNSET + sql_insight_source_dataset_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the source dataset in this join pattern.""" - sql_insight_join_joined_dataset_qualified_name: Union[str, None, UnsetType] = UNSET + sql_insight_joined_dataset_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the joined dataset in this join pattern.""" - sql_insight_join_type: Union[str, None, UnsetType] = UNSET + sql_insight_type: Union[str, None, UnsetType] = UNSET """Type of SQL join observed in this pattern.""" - sql_insight_join_cardinality: Union[str, None, UnsetType] = UNSET + sql_insight_cardinality: Union[str, None, UnsetType] = UNSET """Observed cardinality of the join relationship.""" - sql_insight_join_when_to_use: Union[str, None, UnsetType] = UNSET + sql_insight_when_to_use: Union[str, None, UnsetType] = UNSET """Guidance on when this join pattern should be used.""" - sql_insight_join_column_pairs: Union[List[Dict[str, Any]], None, UnsetType] = UNSET + sql_insight_column_pairs: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Column mappings in this join, pairing source columns to joined columns.""" - sql_insight_join_query_count: Union[int, None, UnsetType] = UNSET + sql_insight_query_count: Union[int, None, UnsetType] = UNSET """Number of queries that use this join pattern.""" - sql_insight_join_unique_users: Union[int, None, UnsetType] = UNSET + sql_insight_unique_users: Union[int, None, UnsetType] = UNSET """Number of unique users who have used this join pattern.""" - sql_insight_join_last_seen_at: Union[int, None, UnsetType] = UNSET + sql_insight_last_seen_at: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this join pattern was last observed, in milliseconds.""" - sql_insight_join_example_queries: Union[List[Dict[str, Any]], None, UnsetType] = ( - UNSET - ) + sql_insight_example_queries: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Example SQL queries that demonstrate this join pattern, with usage details.""" catalog_dataset_guid: Union[str, None, UnsetType] = UNSET @@ -461,6 +401,11 @@ class SqlInsightJoinRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -555,6 +500,7 @@ class SqlInsightJoinNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -581,40 +527,40 @@ def _populate_sql_insight_join_attrs( ) -> None: """Populate SqlInsightJoin-specific attributes on the attrs struct.""" _populate_asset_attrs(attrs, obj) - attrs.sql_insight_join_source_dataset_qualified_name = ( - obj.sql_insight_join_source_dataset_qualified_name + attrs.sql_insight_source_dataset_qualified_name = ( + obj.sql_insight_source_dataset_qualified_name ) - attrs.sql_insight_join_joined_dataset_qualified_name = ( - obj.sql_insight_join_joined_dataset_qualified_name + attrs.sql_insight_joined_dataset_qualified_name = ( + obj.sql_insight_joined_dataset_qualified_name ) - attrs.sql_insight_join_type = obj.sql_insight_join_type - attrs.sql_insight_join_cardinality = obj.sql_insight_join_cardinality - attrs.sql_insight_join_when_to_use = obj.sql_insight_join_when_to_use - attrs.sql_insight_join_column_pairs = obj.sql_insight_join_column_pairs - attrs.sql_insight_join_query_count = obj.sql_insight_join_query_count - attrs.sql_insight_join_unique_users = obj.sql_insight_join_unique_users - attrs.sql_insight_join_last_seen_at = obj.sql_insight_join_last_seen_at - attrs.sql_insight_join_example_queries = obj.sql_insight_join_example_queries + attrs.sql_insight_type = obj.sql_insight_type + attrs.sql_insight_cardinality = obj.sql_insight_cardinality + attrs.sql_insight_when_to_use = obj.sql_insight_when_to_use + attrs.sql_insight_column_pairs = obj.sql_insight_column_pairs + attrs.sql_insight_query_count = obj.sql_insight_query_count + attrs.sql_insight_unique_users = obj.sql_insight_unique_users + attrs.sql_insight_last_seen_at = obj.sql_insight_last_seen_at + attrs.sql_insight_example_queries = obj.sql_insight_example_queries attrs.catalog_dataset_guid = obj.catalog_dataset_guid def _extract_sql_insight_join_attrs(attrs: SqlInsightJoinAttributes) -> dict: """Extract all SqlInsightJoin attributes from the attrs struct into a flat dict.""" result = _extract_asset_attrs(attrs) - result["sql_insight_join_source_dataset_qualified_name"] = ( - attrs.sql_insight_join_source_dataset_qualified_name + result["sql_insight_source_dataset_qualified_name"] = ( + attrs.sql_insight_source_dataset_qualified_name ) - result["sql_insight_join_joined_dataset_qualified_name"] = ( - attrs.sql_insight_join_joined_dataset_qualified_name + result["sql_insight_joined_dataset_qualified_name"] = ( + attrs.sql_insight_joined_dataset_qualified_name ) - result["sql_insight_join_type"] = attrs.sql_insight_join_type - result["sql_insight_join_cardinality"] = attrs.sql_insight_join_cardinality - result["sql_insight_join_when_to_use"] = attrs.sql_insight_join_when_to_use - result["sql_insight_join_column_pairs"] = attrs.sql_insight_join_column_pairs - result["sql_insight_join_query_count"] = attrs.sql_insight_join_query_count - result["sql_insight_join_unique_users"] = attrs.sql_insight_join_unique_users - result["sql_insight_join_last_seen_at"] = attrs.sql_insight_join_last_seen_at - result["sql_insight_join_example_queries"] = attrs.sql_insight_join_example_queries + result["sql_insight_type"] = attrs.sql_insight_type + result["sql_insight_cardinality"] = attrs.sql_insight_cardinality + result["sql_insight_when_to_use"] = attrs.sql_insight_when_to_use + result["sql_insight_column_pairs"] = attrs.sql_insight_column_pairs + result["sql_insight_query_count"] = attrs.sql_insight_query_count + result["sql_insight_unique_users"] = attrs.sql_insight_unique_users + result["sql_insight_last_seen_at"] = attrs.sql_insight_last_seen_at + result["sql_insight_example_queries"] = attrs.sql_insight_example_queries result["catalog_dataset_guid"] = attrs.catalog_dataset_guid return result @@ -656,9 +602,6 @@ def _sql_insight_join_to_nested( is_incomplete=sql_insight_join.is_incomplete, provenance_type=sql_insight_join.provenance_type, home_id=sql_insight_join.home_id, - depth=sql_insight_join.depth, - immediate_upstream=sql_insight_join.immediate_upstream, - immediate_downstream=sql_insight_join.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -692,6 +635,7 @@ def _sql_insight_join_from_nested(nested: SqlInsightJoinNested) -> SqlInsightJoi updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -700,9 +644,6 @@ def _sql_insight_join_from_nested(nested: SqlInsightJoinNested) -> SqlInsightJoi is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_sql_insight_join_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -731,37 +672,33 @@ def _sql_insight_join_from_nested_bytes(data: bytes, serde: Serde) -> SqlInsight RelationField, ) -SqlInsightJoin.SQL_INSIGHT_JOIN_SOURCE_DATASET_QUALIFIED_NAME = KeywordField( - "sqlInsightJoinSourceDatasetQualifiedName", - "sqlInsightJoinSourceDatasetQualifiedName", -) -SqlInsightJoin.SQL_INSIGHT_JOIN_JOINED_DATASET_QUALIFIED_NAME = KeywordField( - "sqlInsightJoinJoinedDatasetQualifiedName", - "sqlInsightJoinJoinedDatasetQualifiedName", +SqlInsightJoin.SQL_INSIGHT_SOURCE_DATASET_QUALIFIED_NAME = KeywordField( + "sqlInsightSourceDatasetQualifiedName", "sqlInsightSourceDatasetQualifiedName" ) -SqlInsightJoin.SQL_INSIGHT_JOIN_TYPE = KeywordField( - "sqlInsightJoinType", "sqlInsightJoinType" +SqlInsightJoin.SQL_INSIGHT_JOINED_DATASET_QUALIFIED_NAME = KeywordField( + "sqlInsightJoinedDatasetQualifiedName", "sqlInsightJoinedDatasetQualifiedName" ) -SqlInsightJoin.SQL_INSIGHT_JOIN_CARDINALITY = KeywordField( - "sqlInsightJoinCardinality", "sqlInsightJoinCardinality" +SqlInsightJoin.SQL_INSIGHT_TYPE = KeywordField("sqlInsightType", "sqlInsightType") +SqlInsightJoin.SQL_INSIGHT_CARDINALITY = KeywordField( + "sqlInsightCardinality", "sqlInsightCardinality" ) -SqlInsightJoin.SQL_INSIGHT_JOIN_WHEN_TO_USE = KeywordField( - "sqlInsightJoinWhenToUse", "sqlInsightJoinWhenToUse" +SqlInsightJoin.SQL_INSIGHT_WHEN_TO_USE = KeywordField( + "sqlInsightWhenToUse", "sqlInsightWhenToUse" ) -SqlInsightJoin.SQL_INSIGHT_JOIN_COLUMN_PAIRS = KeywordField( - "sqlInsightJoinColumnPairs", "sqlInsightJoinColumnPairs" +SqlInsightJoin.SQL_INSIGHT_COLUMN_PAIRS = KeywordField( + "sqlInsightColumnPairs", "sqlInsightColumnPairs" ) -SqlInsightJoin.SQL_INSIGHT_JOIN_QUERY_COUNT = NumericField( - "sqlInsightJoinQueryCount", "sqlInsightJoinQueryCount" +SqlInsightJoin.SQL_INSIGHT_QUERY_COUNT = NumericField( + "sqlInsightQueryCount", "sqlInsightQueryCount" ) -SqlInsightJoin.SQL_INSIGHT_JOIN_UNIQUE_USERS = NumericField( - "sqlInsightJoinUniqueUsers", "sqlInsightJoinUniqueUsers" +SqlInsightJoin.SQL_INSIGHT_UNIQUE_USERS = NumericField( + "sqlInsightUniqueUsers", "sqlInsightUniqueUsers" ) -SqlInsightJoin.SQL_INSIGHT_JOIN_LAST_SEEN_AT = NumericField( - "sqlInsightJoinLastSeenAt", "sqlInsightJoinLastSeenAt" +SqlInsightJoin.SQL_INSIGHT_LAST_SEEN_AT = NumericField( + "sqlInsightLastSeenAt", "sqlInsightLastSeenAt" ) -SqlInsightJoin.SQL_INSIGHT_JOIN_EXAMPLE_QUERIES = KeywordField( - "sqlInsightJoinExampleQueries", "sqlInsightJoinExampleQueries" +SqlInsightJoin.SQL_INSIGHT_EXAMPLE_QUERIES = KeywordField( + "sqlInsightExampleQueries", "sqlInsightExampleQueries" ) SqlInsightJoin.CATALOG_DATASET_GUID = KeywordField( "catalogDatasetGuid", "catalogDatasetGuid" @@ -784,6 +721,9 @@ def _sql_insight_join_from_nested_bytes(data: bytes, serde: Serde) -> SqlInsight SqlInsightJoin.METRICS = RelationField("metrics") SqlInsightJoin.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SqlInsightJoin.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SqlInsightJoin.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SqlInsightJoin.MEANINGS = RelationField("meanings") SqlInsightJoin.MC_MONITORS = RelationField("mcMonitors") SqlInsightJoin.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/sql_insight_related.py b/pyatlan_v9/model/assets/sql_insight_related.py index fb8fb587e..818613185 100644 --- a/pyatlan_v9/model/assets/sql_insight_related.py +++ b/pyatlan_v9/model/assets/sql_insight_related.py @@ -52,36 +52,34 @@ class RelatedSqlInsightJoin(RelatedSqlInsight): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SqlInsightJoin" so it serializes correctly - sql_insight_join_source_dataset_qualified_name: Union[str, None, UnsetType] = UNSET + sql_insight_source_dataset_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the source dataset in this join pattern.""" - sql_insight_join_joined_dataset_qualified_name: Union[str, None, UnsetType] = UNSET + sql_insight_joined_dataset_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the joined dataset in this join pattern.""" - sql_insight_join_type: Union[str, None, UnsetType] = UNSET + sql_insight_type: Union[str, None, UnsetType] = UNSET """Type of SQL join observed in this pattern.""" - sql_insight_join_cardinality: Union[str, None, UnsetType] = UNSET + sql_insight_cardinality: Union[str, None, UnsetType] = UNSET """Observed cardinality of the join relationship.""" - sql_insight_join_when_to_use: Union[str, None, UnsetType] = UNSET + sql_insight_when_to_use: Union[str, None, UnsetType] = UNSET """Guidance on when this join pattern should be used.""" - sql_insight_join_column_pairs: Union[List[Dict[str, Any]], None, UnsetType] = UNSET + sql_insight_column_pairs: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Column mappings in this join, pairing source columns to joined columns.""" - sql_insight_join_query_count: Union[int, None, UnsetType] = UNSET + sql_insight_query_count: Union[int, None, UnsetType] = UNSET """Number of queries that use this join pattern.""" - sql_insight_join_unique_users: Union[int, None, UnsetType] = UNSET + sql_insight_unique_users: Union[int, None, UnsetType] = UNSET """Number of unique users who have used this join pattern.""" - sql_insight_join_last_seen_at: Union[int, None, UnsetType] = UNSET + sql_insight_last_seen_at: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this join pattern was last observed, in milliseconds.""" - sql_insight_join_example_queries: Union[List[Dict[str, Any]], None, UnsetType] = ( - UNSET - ) + sql_insight_example_queries: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Example SQL queries that demonstrate this join pattern, with usage details.""" def __post_init__(self) -> None: @@ -99,38 +97,36 @@ class RelatedSqlInsightFilter(RelatedSqlInsight): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SqlInsightFilter" so it serializes correctly - sql_insight_filter_dataset_qualified_name: Union[str, None, UnsetType] = UNSET + sql_insight_dataset_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the dataset containing the filtered column.""" - sql_insight_filter_column_qualified_name: Union[str, None, UnsetType] = UNSET + sql_insight_column_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the filtered column.""" - sql_insight_filter_common_values: Union[List[str], None, UnsetType] = UNSET + sql_insight_common_values: Union[List[str], None, UnsetType] = UNSET """Common values observed for this filter.""" - sql_insight_filter_operator: Union[str, None, UnsetType] = UNSET + sql_insight_operator: Union[str, None, UnsetType] = UNSET """SQL operator observed on this column, such as =, !=, IN, LIKE.""" - sql_insight_filter_predicate_sql: Union[str, None, UnsetType] = msgspec.field( - default=UNSET, name="sqlInsightFilterPredicateSQL" + sql_insight_predicate_sql: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="sqlInsightPredicateSQL" ) """SQL predicate expression for this filter pattern.""" - sql_insight_filter_when_to_use: Union[str, None, UnsetType] = UNSET + sql_insight_when_to_use: Union[str, None, UnsetType] = UNSET """Guidance on when this filter pattern should be used.""" - sql_insight_filter_query_count: Union[int, None, UnsetType] = UNSET + sql_insight_query_count: Union[int, None, UnsetType] = UNSET """Number of queries that use this filter pattern.""" - sql_insight_filter_unique_users: Union[int, None, UnsetType] = UNSET + sql_insight_unique_users: Union[int, None, UnsetType] = UNSET """Number of unique users who have used this filter pattern.""" - sql_insight_filter_last_seen_at: Union[int, None, UnsetType] = UNSET + sql_insight_last_seen_at: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this filter pattern was last observed, in milliseconds.""" - sql_insight_filter_example_queries: Union[List[Dict[str, Any]], None, UnsetType] = ( - UNSET - ) + sql_insight_example_queries: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """Example SQL queries that demonstrate this filter pattern, with usage details.""" def __post_init__(self) -> None: @@ -148,21 +144,21 @@ class RelatedSqlInsightBusinessQuestion(RelatedSqlInsight): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "SqlInsightBusinessQuestion" so it serializes correctly - sql_insight_business_question_text: Union[str, None, UnsetType] = UNSET + sql_insight_text: Union[str, None, UnsetType] = UNSET """Natural language text of the business question.""" - sql_insight_business_question_canonical_sql: Union[str, None, UnsetType] = ( - msgspec.field(default=UNSET, name="sqlInsightBusinessQuestionCanonicalSQL") + sql_insight_canonical_sql: Union[str, None, UnsetType] = msgspec.field( + default=UNSET, name="sqlInsightCanonicalSQL" ) """Canonical SQL query that answers this business question.""" - sql_insight_business_question_query_count: Union[int, None, UnsetType] = UNSET + sql_insight_query_count: Union[int, None, UnsetType] = UNSET """Number of queries associated with this business question.""" - sql_insight_business_question_unique_users: Union[int, None, UnsetType] = UNSET + sql_insight_unique_users: Union[int, None, UnsetType] = UNSET """Number of unique users who have asked this question.""" - sql_insight_business_question_last_seen_at: Union[int, None, UnsetType] = UNSET + sql_insight_last_seen_at: Union[int, None, UnsetType] = UNSET """Time (epoch) at which this question was last observed, in milliseconds.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/sql_related.py b/pyatlan_v9/model/assets/sql_related.py index 0f5884b48..8d839cd69 100644 --- a/pyatlan_v9/model/assets/sql_related.py +++ b/pyatlan_v9/model/assets/sql_related.py @@ -137,16 +137,16 @@ class RelatedCalculationView(RelatedSQL): column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this calculation view.""" - calculation_view_version_id: Union[int, None, UnsetType] = UNSET + sql_version_id: Union[int, None, UnsetType] = UNSET """The version ID of this calculation view.""" - calculation_view_activated_by: Union[str, None, UnsetType] = UNSET + sql_activated_by: Union[str, None, UnsetType] = UNSET """The owner who activated the calculation view""" - calculation_view_activated_at: Union[int, None, UnsetType] = UNSET + sql_activated_at: Union[int, None, UnsetType] = UNSET """Time at which this calculation view was activated at""" - calculation_view_package_id: Union[str, None, UnsetType] = UNSET + sql_package_id: Union[str, None, UnsetType] = UNSET """The full package id path to which a calculation view belongs/resides in the repository.""" def __post_init__(self) -> None: @@ -170,10 +170,10 @@ class RelatedColumn(RelatedSQL): sub_data_type: Union[str, None, UnsetType] = UNSET """Sub-data type of this column.""" - column_compression: Union[str, None, UnsetType] = UNSET + sql_compression: Union[str, None, UnsetType] = UNSET """Compression type of this column.""" - column_encoding: Union[str, None, UnsetType] = UNSET + sql_encoding: Union[str, None, UnsetType] = UNSET """Encoding type of this column.""" raw_data_type_definition: Union[str, None, UnsetType] = UNSET @@ -248,112 +248,115 @@ class RelatedColumn(RelatedSQL): parent_column_name: Union[str, None, UnsetType] = UNSET """Simple name of the column this column is nested within, for STRUCT and NESTED columns.""" - column_distinct_values_count: Union[int, None, UnsetType] = UNSET + sql_distinct_values_count: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" - column_distinct_values_count_long: Union[int, None, UnsetType] = UNSET + sql_distinct_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" - column_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET + sql_distinct_values_percentage: Union[float, None, UnsetType] = UNSET + """Percentage of rows in a column that contain distinct values.""" + + sql_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """List of values in a histogram that represents the contents of this column.""" - column_max: Union[float, None, UnsetType] = UNSET + sql_max: Union[float, None, UnsetType] = UNSET """Greatest value in a numeric column.""" - column_min: Union[float, None, UnsetType] = UNSET + sql_min: Union[float, None, UnsetType] = UNSET """Least value in a numeric column.""" - column_mean: Union[float, None, UnsetType] = UNSET + sql_mean: Union[float, None, UnsetType] = UNSET """Arithmetic mean of the values in a numeric column.""" - column_sum: Union[float, None, UnsetType] = UNSET + sql_sum: Union[float, None, UnsetType] = UNSET """Calculated sum of the values in a numeric column.""" - column_median: Union[float, None, UnsetType] = UNSET + sql_median: Union[float, None, UnsetType] = UNSET """Calculated median of the values in a numeric column.""" - column_standard_deviation: Union[float, None, UnsetType] = UNSET + sql_standard_deviation: Union[float, None, UnsetType] = UNSET """Calculated standard deviation of the values in a numeric column.""" - column_unique_values_count: Union[int, None, UnsetType] = UNSET + sql_unique_values_count: Union[int, None, UnsetType] = UNSET """Number of rows in which a value in this column appears only once.""" - column_unique_values_count_long: Union[int, None, UnsetType] = UNSET + sql_unique_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows in which a value in this column appears only once.""" - column_average: Union[float, None, UnsetType] = UNSET + sql_average: Union[float, None, UnsetType] = UNSET """Average value in this column.""" - column_average_length: Union[float, None, UnsetType] = UNSET + sql_average_length: Union[float, None, UnsetType] = UNSET """Average length of values in a string column.""" - column_duplicate_values_count: Union[int, None, UnsetType] = UNSET + sql_duplicate_values_count: Union[int, None, UnsetType] = UNSET """Number of rows that contain duplicate values.""" - column_duplicate_values_count_long: Union[int, None, UnsetType] = UNSET + sql_duplicate_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain duplicate values.""" - column_maximum_string_length: Union[int, None, UnsetType] = UNSET + sql_maximum_string_length: Union[int, None, UnsetType] = UNSET """Length of the longest value in a string column.""" column_maxs: Union[List[str], None, UnsetType] = UNSET """List of the greatest values in a column.""" - column_minimum_string_length: Union[int, None, UnsetType] = UNSET + sql_minimum_string_length: Union[int, None, UnsetType] = UNSET """Length of the shortest value in a string column.""" column_mins: Union[List[str], None, UnsetType] = UNSET """List of the least values in a column.""" - column_missing_values_count: Union[int, None, UnsetType] = UNSET + sql_missing_values_count: Union[int, None, UnsetType] = UNSET """Number of rows in a column that do not contain content.""" - column_missing_values_count_long: Union[int, None, UnsetType] = UNSET + sql_missing_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows in a column that do not contain content.""" - column_missing_values_percentage: Union[float, None, UnsetType] = UNSET + sql_missing_values_percentage: Union[float, None, UnsetType] = UNSET """Percentage of rows in a column that do not contain content.""" - column_uniqueness_percentage: Union[float, None, UnsetType] = UNSET + sql_uniqueness_percentage: Union[float, None, UnsetType] = UNSET """Ratio indicating how unique data in this column is: 0 indicates that all values are the same, 100 indicates that all values in this column are unique.""" - column_variance: Union[float, None, UnsetType] = UNSET + sql_variance: Union[float, None, UnsetType] = UNSET """Calculated variance of the values in a numeric column.""" column_top_values: Union[List[Dict[str, Any]], None, UnsetType] = UNSET """List of top values in this column.""" - column_max_value: Union[float, None, UnsetType] = UNSET + sql_max_value: Union[float, None, UnsetType] = UNSET """Greatest value in a numeric column.""" - column_min_value: Union[float, None, UnsetType] = UNSET + sql_min_value: Union[float, None, UnsetType] = UNSET """Least value in a numeric column.""" - column_mean_value: Union[float, None, UnsetType] = UNSET + sql_mean_value: Union[float, None, UnsetType] = UNSET """Arithmetic mean of the values in a numeric column.""" - column_sum_value: Union[float, None, UnsetType] = UNSET + sql_sum_value: Union[float, None, UnsetType] = UNSET """Calculated sum of the values in a numeric column.""" - column_median_value: Union[float, None, UnsetType] = UNSET + sql_median_value: Union[float, None, UnsetType] = UNSET """Calculated median of the values in a numeric column.""" - column_standard_deviation_value: Union[float, None, UnsetType] = UNSET + sql_standard_deviation_value: Union[float, None, UnsetType] = UNSET """Calculated standard deviation of the values in a numeric column.""" - column_average_value: Union[float, None, UnsetType] = UNSET + sql_average_value: Union[float, None, UnsetType] = UNSET """Average value in this column.""" - column_variance_value: Union[float, None, UnsetType] = UNSET + sql_variance_value: Union[float, None, UnsetType] = UNSET """Calculated variance of the values in a numeric column.""" - column_average_length_value: Union[float, None, UnsetType] = UNSET + sql_average_length_value: Union[float, None, UnsetType] = UNSET """Average length of values in a string column.""" - column_distribution_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET + sql_distribution_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """Detailed information representing a histogram of values for a column.""" - column_depth_level: Union[int, None, UnsetType] = UNSET + sql_depth_level: Union[int, None, UnsetType] = UNSET """Level of nesting of this column, used for STRUCT and NESTED columns.""" nosql_collection_name: Union[str, None, UnsetType] = UNSET @@ -362,27 +365,27 @@ class RelatedColumn(RelatedSQL): nosql_collection_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the cosmos/mongo collection in which this SQL asset (column) exists, or empty if it does not exist within a cosmos/mongo collection.""" - column_is_measure: Union[bool, None, UnsetType] = UNSET + sql_is_measure: Union[bool, None, UnsetType] = UNSET """When true, this column is of type measure/calculated.""" - column_measure_type: Union[str, None, UnsetType] = UNSET + sql_measure_type: Union[str, None, UnsetType] = UNSET """The type of measure/calculated column this is, eg: base, calculated, derived.""" - column_ai_insights_is_measure: Union[bool, None, UnsetType] = UNSET + sql_ai_insights_is_measure: Union[bool, None, UnsetType] = UNSET """When true, this column is identified as a measure/calculated column by AI analysis of query patterns.""" - column_ai_insights_measure_type: Union[str, None, UnsetType] = UNSET + sql_ai_insights_measure_type: Union[str, None, UnsetType] = UNSET """Type of measure/calculated column as classified by AI analysis, for example: base, calculated, derived.""" - column_ai_insights_is_dimension: Union[bool, None, UnsetType] = UNSET + sql_ai_insights_is_dimension: Union[bool, None, UnsetType] = UNSET """When true, this column is identified as a dimension by AI analysis of query patterns.""" - column_ai_insights_dimension_type: Union[str, None, UnsetType] = UNSET + sql_ai_insights_dimension_type: Union[str, None, UnsetType] = UNSET """Type of dimension as classified by AI analysis, for example: time, categorical, geographic.""" - column_ai_insights_foreign_key_column_qualified_name: Union[ - str, None, UnsetType - ] = UNSET + sql_ai_insights_foreign_key_column_qualified_name: Union[str, None, UnsetType] = ( + UNSET + ) """Qualified name of the column in another table that this column likely references as a foreign key, inferred by AI analysis of query patterns.""" def __post_init__(self) -> None: @@ -421,45 +424,45 @@ class RelatedFunction(RelatedSQL): function_definition: Union[str, None, UnsetType] = UNSET """Code or set of statements that determine the output of the function.""" - function_return_type: Union[str, None, UnsetType] = UNSET + sql_return_type: Union[str, None, UnsetType] = UNSET """Data type of the value returned by the function.""" - function_arguments: Union[List[str], None, UnsetType] = UNSET + sql_arguments: Union[List[str], None, UnsetType] = UNSET """Arguments that are passed in to the function.""" - function_language: Union[str, None, UnsetType] = UNSET + sql_language: Union[str, None, UnsetType] = UNSET """Programming language in which the function is written.""" - function_type: Union[str, None, UnsetType] = UNSET + sql_type: Union[str, None, UnsetType] = UNSET """Type of function.""" - function_is_external: Union[bool, None, UnsetType] = UNSET + sql_is_external: Union[bool, None, UnsetType] = UNSET """Whether the function is stored or executed externally (true) or internally (false).""" - function_is_dmf: Union[bool, None, UnsetType] = msgspec.field( - default=UNSET, name="functionIsDMF" + sql_is_dmf: Union[bool, None, UnsetType] = msgspec.field( + default=UNSET, name="sqlIsDMF" ) """Whether the function is a data metric function.""" - function_is_secure: Union[bool, None, UnsetType] = UNSET + sql_is_secure: Union[bool, None, UnsetType] = UNSET """Whether sensitive information of the function is omitted for unauthorized users (true) or not (false).""" - function_is_memoizable: Union[bool, None, UnsetType] = UNSET + sql_is_memoizable: Union[bool, None, UnsetType] = UNSET """Whether the function must re-compute if there are no underlying changes in the values (false) or not (true).""" - function_runtime_version: Union[str, None, UnsetType] = UNSET + sql_runtime_version: Union[str, None, UnsetType] = UNSET """Version of the language runtime used by the function.""" - function_external_access_integrations: Union[str, None, UnsetType] = UNSET + sql_external_access_integrations: Union[str, None, UnsetType] = UNSET """Names of external access integrations used by the function.""" - function_secrets: Union[str, None, UnsetType] = UNSET + sql_secrets: Union[str, None, UnsetType] = UNSET """Secret variables used by the function.""" - function_packages: Union[str, None, UnsetType] = UNSET + sql_packages: Union[str, None, UnsetType] = UNSET """Packages requested by the function.""" - function_installed_packages: Union[str, None, UnsetType] = UNSET + sql_installed_packages: Union[str, None, UnsetType] = UNSET """Packages actually installed for the function.""" def __post_init__(self) -> None: @@ -633,7 +636,7 @@ class RelatedSchema(RelatedSQL): table_count: Union[int, None, UnsetType] = UNSET """Number of tables in this schema.""" - schema_external_location: Union[str, None, UnsetType] = UNSET + sql_external_location: Union[str, None, UnsetType] = UNSET """External location of this schema, for example: an S3 object location.""" views_count: Union[int, None, UnsetType] = UNSET @@ -666,7 +669,7 @@ class RelatedTable(RelatedSQL): size_bytes: Union[int, None, UnsetType] = UNSET """Size of this table, in bytes.""" - table_object_count: Union[int, None, UnsetType] = UNSET + sql_object_count: Union[int, None, UnsetType] = UNSET """Number of objects in this table.""" alias: Union[str, None, UnsetType] = UNSET @@ -708,7 +711,7 @@ class RelatedTable(RelatedSQL): is_sharded: Union[bool, None, UnsetType] = UNSET """Whether this table is a sharded table (true) or not (false).""" - table_type: Union[str, None, UnsetType] = UNSET + sql_type: Union[str, None, UnsetType] = UNSET """Type of the table.""" iceberg_catalog_name: Union[str, None, UnsetType] = UNSET @@ -723,19 +726,19 @@ class RelatedTable(RelatedSQL): iceberg_catalog_table_name: Union[str, None, UnsetType] = UNSET """Catalog table name (actual table name on the catalog side).""" - table_impala_parameters: Union[Dict[str, str], None, UnsetType] = UNSET + sql_impala_parameters: Union[Dict[str, str], None, UnsetType] = UNSET """Extra attributes for Impala""" iceberg_catalog_table_namespace: Union[str, None, UnsetType] = UNSET """Catalog table namespace (actual database name on the catalog side).""" - table_external_volume_name: Union[str, None, UnsetType] = UNSET + sql_external_volume_name: Union[str, None, UnsetType] = UNSET """External volume name for the table.""" iceberg_table_base_location: Union[str, None, UnsetType] = UNSET """Iceberg table base location inside the external volume.""" - table_retention_time: Union[int, None, UnsetType] = UNSET + sql_retention_time: Union[int, None, UnsetType] = UNSET """Data retention time in days.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/starburst.py b/pyatlan_v9/model/assets/starburst.py index 3d9b1aa79..8bea2f6c9 100644 --- a/pyatlan_v9/model/assets/starburst.py +++ b/pyatlan_v9/model/assets/starburst.py @@ -47,6 +47,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -62,7 +63,6 @@ RelatedSqlInsightBusinessQuestion, RelatedSqlInsightJoin, ) -from .starburst_related import RelatedStarburst # ============================================================================= # FLAT ASSET CLASS @@ -123,6 +123,7 @@ class Starburst(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -144,6 +145,8 @@ class Starburst(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Starburst" + starburst_data_product_name: Union[str, None, UnsetType] = UNSET """Name of the Starburst Data Product that contains this asset.""" @@ -296,6 +299,11 @@ class Starburst(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -371,66 +379,6 @@ class Starburst(Asset): def __post_init__(self) -> None: self.type_name = "Starburst" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Starburst instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Starburst validation failed: {errors}") - - def minimize(self) -> "Starburst": - """ - Return a minimal copy of this Starburst with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Starburst with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Starburst instance with only the minimum required fields. - """ - self.validate() - return Starburst(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedStarburst": - """ - Create a :class:`RelatedStarburst` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedStarburst reference to this asset. - """ - if self.guid is not UNSET: - return RelatedStarburst(guid=self.guid) - return RelatedStarburst(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -642,6 +590,11 @@ class StarburstRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -754,6 +707,7 @@ class StarburstNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -891,9 +845,6 @@ def _starburst_to_nested(starburst: Starburst) -> StarburstNested: is_incomplete=starburst.is_incomplete, provenance_type=starburst.provenance_type, home_id=starburst.home_id, - depth=starburst.depth, - immediate_upstream=starburst.immediate_upstream, - immediate_downstream=starburst.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -925,6 +876,7 @@ def _starburst_from_nested(nested: StarburstNested) -> Starburst: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -933,9 +885,6 @@ def _starburst_from_nested(nested: StarburstNested) -> Starburst: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_starburst_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1044,6 +993,9 @@ def _starburst_from_nested_bytes(data: bytes, serde: Serde) -> Starburst: Starburst.DBT_SOURCES = RelationField("dbtSources") Starburst.SQL_DBT_SOURCES = RelationField("sqlDBTSources") Starburst.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +Starburst.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Starburst.MEANINGS = RelationField("meanings") Starburst.MC_MONITORS = RelationField("mcMonitors") Starburst.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/starburst_dataset.py b/pyatlan_v9/model/assets/starburst_dataset.py index b29c35925..d97a8fc4a 100644 --- a/pyatlan_v9/model/assets/starburst_dataset.py +++ b/pyatlan_v9/model/assets/starburst_dataset.py @@ -48,6 +48,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -70,7 +71,7 @@ RelatedTable, RelatedTablePartition, ) -from .starburst_related import RelatedStarburstDataset, RelatedStarburstDatasetColumn +from .starburst_related import RelatedStarburstDatasetColumn # ============================================================================= # FLAT ASSET CLASS @@ -161,6 +162,7 @@ class StarburstDataset(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -190,6 +192,8 @@ class StarburstDataset(Asset): STARBURST_DATA_PRODUCT: ClassVar[Any] = None STARBURST_DATASET_COLUMNS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "StarburstDataset" + starburst_is_materialized: Union[bool, None, UnsetType] = UNSET """Whether this dataset is a materialized view.""" @@ -432,6 +436,11 @@ class StarburstDataset(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -539,70 +548,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this StarburstDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if errors: - raise ValueError(f"StarburstDataset validation failed: {errors}") - - def minimize(self) -> "StarburstDataset": - """ - Return a minimal copy of this StarburstDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new StarburstDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new StarburstDataset instance with only the minimum required fields. - """ - self.validate() - return StarburstDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedStarburstDataset": - """ - Create a :class:`RelatedStarburstDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedStarburstDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedStarburstDataset(guid=self.guid) - return RelatedStarburstDataset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -906,6 +851,11 @@ class StarburstDatasetRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1046,6 +996,7 @@ class StarburstDatasetNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1257,9 +1208,6 @@ def _starburst_dataset_to_nested( is_incomplete=starburst_dataset.is_incomplete, provenance_type=starburst_dataset.provenance_type, home_id=starburst_dataset.home_id, - depth=starburst_dataset.depth, - immediate_upstream=starburst_dataset.immediate_upstream, - immediate_downstream=starburst_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1293,6 +1241,7 @@ def _starburst_dataset_from_nested(nested: StarburstDatasetNested) -> StarburstD updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1301,9 +1250,6 @@ def _starburst_dataset_from_nested(nested: StarburstDatasetNested) -> StarburstD is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_starburst_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1488,6 +1434,9 @@ def _starburst_dataset_from_nested_bytes(data: bytes, serde: Serde) -> Starburst StarburstDataset.DBT_SOURCES = RelationField("dbtSources") StarburstDataset.SQL_DBT_SOURCES = RelationField("sqlDBTSources") StarburstDataset.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +StarburstDataset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) StarburstDataset.MEANINGS = RelationField("meanings") StarburstDataset.MC_MONITORS = RelationField("mcMonitors") StarburstDataset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/starburst_dataset_column.py b/pyatlan_v9/model/assets/starburst_dataset_column.py index fcf98022e..14c92126e 100644 --- a/pyatlan_v9/model/assets/starburst_dataset_column.py +++ b/pyatlan_v9/model/assets/starburst_dataset_column.py @@ -51,6 +51,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .mongo_db_related import RelatedMongoDBCollection @@ -80,7 +81,7 @@ RelatedTablePartition, RelatedView, ) -from .starburst_related import RelatedStarburstDataset, RelatedStarburstDatasetColumn +from .starburst_related import RelatedStarburstDataset # ============================================================================= # FLAT ASSET CLASS @@ -152,6 +153,7 @@ class StarburstDatasetColumn(Asset): PARENT_COLUMN_NAME: ClassVar[Any] = None COLUMN_DISTINCT_VALUES_COUNT: ClassVar[Any] = None COLUMN_DISTINCT_VALUES_COUNT_LONG: ClassVar[Any] = None + COLUMN_DISTINCT_VALUES_PERCENTAGE: ClassVar[Any] = None COLUMN_HISTOGRAM: ClassVar[Any] = None COLUMN_MAX: ClassVar[Any] = None COLUMN_MIN: ClassVar[Any] = None @@ -223,6 +225,7 @@ class StarburstDatasetColumn(Asset): DBT_MODEL_COLUMNS: ClassVar[Any] = None COLUMN_DBT_MODEL_COLUMNS: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MONGO_DB_COLLECTION: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None @@ -258,6 +261,8 @@ class StarburstDatasetColumn(Asset): SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None STARBURST_DATASET: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "StarburstDatasetColumn" + starburst_sql_column_qualified_name: Union[str, None, UnsetType] = UNSET """Qualified name of the corresponding SQL Column. Enables cross-stream lookup between the Data Product perspective and the SQL perspective of the same underlying column.""" @@ -437,6 +442,9 @@ class StarburstDatasetColumn(Asset): column_distinct_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" + column_distinct_values_percentage: Union[float, None, UnsetType] = UNSET + """Percentage of rows in a column that contain distinct values.""" + column_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """List of values in a histogram that represents the contents of this column.""" @@ -664,6 +672,11 @@ class StarburstDatasetColumn(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -793,82 +806,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this StarburstDatasetColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.starburst_dataset is UNSET: - errors.append("starburst_dataset is required for creation") - if self.starburst_dataset_name is UNSET: - errors.append("starburst_dataset_name is required for creation") - if self.starburst_dataset_qualified_name is UNSET: - errors.append( - "starburst_dataset_qualified_name is required for creation" - ) - if self.order is UNSET: - errors.append("order is required for creation") - if errors: - raise ValueError(f"StarburstDatasetColumn validation failed: {errors}") - - def minimize(self) -> "StarburstDatasetColumn": - """ - Return a minimal copy of this StarburstDatasetColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new StarburstDatasetColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new StarburstDatasetColumn instance with only the minimum required fields. - """ - self.validate() - return StarburstDatasetColumn( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedStarburstDatasetColumn": - """ - Create a :class:`RelatedStarburstDatasetColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedStarburstDatasetColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedStarburstDatasetColumn(guid=self.guid) - return RelatedStarburstDatasetColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -1105,6 +1042,9 @@ class StarburstDatasetColumnAttributes(AssetAttributes): column_distinct_values_count_long: Union[int, None, UnsetType] = UNSET """Number of rows that contain distinct values.""" + column_distinct_values_percentage: Union[float, None, UnsetType] = UNSET + """Percentage of rows in a column that contain distinct values.""" + column_histogram: Union[Dict[str, Any], None, UnsetType] = UNSET """List of values in a histogram that represents the contents of this column.""" @@ -1336,6 +1276,11 @@ class StarburstDatasetColumnRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1504,6 +1449,7 @@ class StarburstDatasetColumnNested(AssetNested): "dbt_model_columns", "column_dbt_model_columns", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mongo_db_collection", "mc_monitors", @@ -1609,6 +1555,7 @@ def _populate_starburst_dataset_column_attrs( attrs.parent_column_name = obj.parent_column_name attrs.column_distinct_values_count = obj.column_distinct_values_count attrs.column_distinct_values_count_long = obj.column_distinct_values_count_long + attrs.column_distinct_values_percentage = obj.column_distinct_values_percentage attrs.column_histogram = obj.column_histogram attrs.column_max = obj.column_max attrs.column_min = obj.column_min @@ -1734,6 +1681,9 @@ def _extract_starburst_dataset_column_attrs( result["column_distinct_values_count_long"] = ( attrs.column_distinct_values_count_long ) + result["column_distinct_values_percentage"] = ( + attrs.column_distinct_values_percentage + ) result["column_histogram"] = attrs.column_histogram result["column_max"] = attrs.column_max result["column_min"] = attrs.column_min @@ -1823,9 +1773,6 @@ def _starburst_dataset_column_to_nested( is_incomplete=starburst_dataset_column.is_incomplete, provenance_type=starburst_dataset_column.provenance_type, home_id=starburst_dataset_column.home_id, - depth=starburst_dataset_column.depth, - immediate_upstream=starburst_dataset_column.immediate_upstream, - immediate_downstream=starburst_dataset_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1861,6 +1808,7 @@ def _starburst_dataset_column_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1869,9 +1817,6 @@ def _starburst_dataset_column_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_starburst_dataset_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -2032,6 +1977,9 @@ def _starburst_dataset_column_from_nested_bytes( StarburstDatasetColumn.COLUMN_DISTINCT_VALUES_COUNT_LONG = NumericField( "columnDistinctValuesCountLong", "columnDistinctValuesCountLong" ) +StarburstDatasetColumn.COLUMN_DISTINCT_VALUES_PERCENTAGE = NumericField( + "columnDistinctValuesPercentage", "columnDistinctValuesPercentage" +) StarburstDatasetColumn.COLUMN_HISTOGRAM = KeywordField( "columnHistogram", "columnHistogram" ) @@ -2194,6 +2142,9 @@ def _starburst_dataset_column_from_nested_bytes( StarburstDatasetColumn.DBT_MODEL_COLUMNS = RelationField("dbtModelColumns") StarburstDatasetColumn.COLUMN_DBT_MODEL_COLUMNS = RelationField("columnDbtModelColumns") StarburstDatasetColumn.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +StarburstDatasetColumn.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) StarburstDatasetColumn.MEANINGS = RelationField("meanings") StarburstDatasetColumn.MONGO_DB_COLLECTION = RelationField("mongoDBCollection") StarburstDatasetColumn.MC_MONITORS = RelationField("mcMonitors") diff --git a/pyatlan_v9/model/assets/superset.py b/pyatlan_v9/model/assets/superset.py index 7491c1d50..dd930d659 100644 --- a/pyatlan_v9/model/assets/superset.py +++ b/pyatlan_v9/model/assets/superset.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .superset_related import RelatedSuperset # ============================================================================= # FLAT ASSET CLASS @@ -80,6 +80,7 @@ class Superset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +98,8 @@ class Superset(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Superset" + superset_dashboard_id: Union[int, None, UnsetType] = UNSET """Identifier of the dashboard in which this asset exists, in Superset.""" @@ -152,6 +155,11 @@ class Superset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -207,66 +215,6 @@ class Superset(Asset): def __post_init__(self) -> None: self.type_name = "Superset" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Superset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Superset validation failed: {errors}") - - def minimize(self) -> "Superset": - """ - Return a minimal copy of this Superset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Superset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Superset instance with only the minimum required fields. - """ - self.validate() - return Superset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSuperset": - """ - Create a :class:`RelatedSuperset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSuperset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSuperset(guid=self.guid) - return RelatedSuperset(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -381,6 +329,11 @@ class SupersetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -467,6 +420,7 @@ class SupersetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -538,9 +492,6 @@ def _superset_to_nested(superset: Superset) -> SupersetNested: is_incomplete=superset.is_incomplete, provenance_type=superset.provenance_type, home_id=superset.home_id, - depth=superset.depth, - immediate_upstream=superset.immediate_upstream, - immediate_downstream=superset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -572,6 +523,7 @@ def _superset_from_nested(nested: SupersetNested) -> Superset: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -580,9 +532,6 @@ def _superset_from_nested(nested: SupersetNested) -> Superset: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_superset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -633,6 +582,9 @@ def _superset_from_nested_bytes(data: bytes, serde: Serde) -> Superset: Superset.METRICS = RelationField("metrics") Superset.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Superset.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Superset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Superset.MEANINGS = RelationField("meanings") Superset.MC_MONITORS = RelationField("mcMonitors") Superset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/superset_chart.py b/pyatlan_v9/model/assets/superset_chart.py index 1c7d2939f..31936e22c 100644 --- a/pyatlan_v9/model/assets/superset_chart.py +++ b/pyatlan_v9/model/assets/superset_chart.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -52,7 +53,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .superset_related import RelatedSupersetChart, RelatedSupersetDashboard +from .superset_related import RelatedSupersetDashboard # ============================================================================= # FLAT ASSET CLASS @@ -84,6 +85,7 @@ class SupersetChart(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +104,8 @@ class SupersetChart(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None SUPERSET_DASHBOARD: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SupersetChart" + superset_chart_description_markdown: Union[str, None, UnsetType] = UNSET """Description markdown of the chart.""" @@ -163,6 +167,11 @@ class SupersetChart(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -227,76 +236,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SupersetChart instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.superset_dashboard is UNSET: - errors.append("superset_dashboard is required for creation") - if self.superset_dashboard_qualified_name is UNSET: - errors.append( - "superset_dashboard_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"SupersetChart validation failed: {errors}") - - def minimize(self) -> "SupersetChart": - """ - Return a minimal copy of this SupersetChart with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SupersetChart with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SupersetChart instance with only the minimum required fields. - """ - self.validate() - return SupersetChart(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSupersetChart": - """ - Create a :class:`RelatedSupersetChart` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSupersetChart reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSupersetChart(guid=self.guid) - return RelatedSupersetChart(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -463,6 +402,11 @@ class SupersetChartRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -554,6 +498,7 @@ class SupersetChartNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -634,9 +579,6 @@ def _superset_chart_to_nested(superset_chart: SupersetChart) -> SupersetChartNes is_incomplete=superset_chart.is_incomplete, provenance_type=superset_chart.provenance_type, home_id=superset_chart.home_id, - depth=superset_chart.depth, - immediate_upstream=superset_chart.immediate_upstream, - immediate_downstream=superset_chart.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -670,6 +612,7 @@ def _superset_chart_from_nested(nested: SupersetChartNested) -> SupersetChart: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -678,9 +621,6 @@ def _superset_chart_from_nested(nested: SupersetChartNested) -> SupersetChart: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_superset_chart_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -743,6 +683,9 @@ def _superset_chart_from_nested_bytes(data: bytes, serde: Serde) -> SupersetChar SupersetChart.METRICS = RelationField("metrics") SupersetChart.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SupersetChart.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SupersetChart.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SupersetChart.MEANINGS = RelationField("meanings") SupersetChart.MC_MONITORS = RelationField("mcMonitors") SupersetChart.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/superset_dashboard.py b/pyatlan_v9/model/assets/superset_dashboard.py index 25f3bf4c4..6e675083c 100644 --- a/pyatlan_v9/model/assets/superset_dashboard.py +++ b/pyatlan_v9/model/assets/superset_dashboard.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -52,11 +53,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .superset_related import ( - RelatedSupersetChart, - RelatedSupersetDashboard, - RelatedSupersetDataset, -) +from .superset_related import RelatedSupersetChart, RelatedSupersetDataset # ============================================================================= # FLAT ASSET CLASS @@ -92,6 +89,7 @@ class SupersetDashboard(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -111,6 +109,8 @@ class SupersetDashboard(Asset): SUPERSET_CHARTS: ClassVar[Any] = None SUPERSET_DATASETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SupersetDashboard" + superset_dashboard_changed_by_name: Union[str, None, UnsetType] = UNSET """Name of the user who changed the dashboard.""" @@ -188,6 +188,11 @@ class SupersetDashboard(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -249,66 +254,6 @@ class SupersetDashboard(Asset): def __post_init__(self) -> None: self.type_name = "SupersetDashboard" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SupersetDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"SupersetDashboard validation failed: {errors}") - - def minimize(self) -> "SupersetDashboard": - """ - Return a minimal copy of this SupersetDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SupersetDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SupersetDashboard instance with only the minimum required fields. - """ - self.validate() - return SupersetDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSupersetDashboard": - """ - Create a :class:`RelatedSupersetDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSupersetDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSupersetDashboard(guid=self.guid) - return RelatedSupersetDashboard(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -477,6 +422,11 @@ class SupersetDashboardRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -571,6 +521,7 @@ class SupersetDashboardNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -670,9 +621,6 @@ def _superset_dashboard_to_nested( is_incomplete=superset_dashboard.is_incomplete, provenance_type=superset_dashboard.provenance_type, home_id=superset_dashboard.home_id, - depth=superset_dashboard.depth, - immediate_upstream=superset_dashboard.immediate_upstream, - immediate_downstream=superset_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -708,6 +656,7 @@ def _superset_dashboard_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -716,9 +665,6 @@ def _superset_dashboard_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_superset_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -798,6 +744,9 @@ def _superset_dashboard_from_nested_bytes( SupersetDashboard.METRICS = RelationField("metrics") SupersetDashboard.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SupersetDashboard.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SupersetDashboard.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SupersetDashboard.MEANINGS = RelationField("meanings") SupersetDashboard.MC_MONITORS = RelationField("mcMonitors") SupersetDashboard.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/superset_dataset.py b/pyatlan_v9/model/assets/superset_dataset.py index c2f9cb252..044b26134 100644 --- a/pyatlan_v9/model/assets/superset_dataset.py +++ b/pyatlan_v9/model/assets/superset_dataset.py @@ -42,6 +42,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -52,7 +53,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .superset_related import RelatedSupersetDashboard, RelatedSupersetDataset +from .superset_related import RelatedSupersetDashboard # ============================================================================= # FLAT ASSET CLASS @@ -85,6 +86,7 @@ class SupersetDataset(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,6 +105,8 @@ class SupersetDataset(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None SUPERSET_DASHBOARD: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "SupersetDataset" + superset_dataset_datasource_name: Union[str, None, UnsetType] = UNSET """Name of the datasource for the dataset.""" @@ -167,6 +171,11 @@ class SupersetDataset(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -231,76 +240,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this SupersetDataset instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.superset_dashboard is UNSET: - errors.append("superset_dashboard is required for creation") - if self.superset_dashboard_qualified_name is UNSET: - errors.append( - "superset_dashboard_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"SupersetDataset validation failed: {errors}") - - def minimize(self) -> "SupersetDataset": - """ - Return a minimal copy of this SupersetDataset with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new SupersetDataset with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new SupersetDataset instance with only the minimum required fields. - """ - self.validate() - return SupersetDataset(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedSupersetDataset": - """ - Create a :class:`RelatedSupersetDataset` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedSupersetDataset reference to this asset. - """ - if self.guid is not UNSET: - return RelatedSupersetDataset(guid=self.guid) - return RelatedSupersetDataset(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -474,6 +413,11 @@ class SupersetDatasetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -565,6 +509,7 @@ class SupersetDatasetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -649,9 +594,6 @@ def _superset_dataset_to_nested( is_incomplete=superset_dataset.is_incomplete, provenance_type=superset_dataset.provenance_type, home_id=superset_dataset.home_id, - depth=superset_dataset.depth, - immediate_upstream=superset_dataset.immediate_upstream, - immediate_downstream=superset_dataset.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -685,6 +627,7 @@ def _superset_dataset_from_nested(nested: SupersetDatasetNested) -> SupersetData updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -693,9 +636,6 @@ def _superset_dataset_from_nested(nested: SupersetDatasetNested) -> SupersetData is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_superset_dataset_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -763,6 +703,9 @@ def _superset_dataset_from_nested_bytes(data: bytes, serde: Serde) -> SupersetDa SupersetDataset.METRICS = RelationField("metrics") SupersetDataset.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") SupersetDataset.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +SupersetDataset.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) SupersetDataset.MEANINGS = RelationField("meanings") SupersetDataset.MC_MONITORS = RelationField("mcMonitors") SupersetDataset.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/table.py b/pyatlan_v9/model/assets/table.py index 06e34d45f..b38041b3e 100644 --- a/pyatlan_v9/model/assets/table.py +++ b/pyatlan_v9/model/assets/table.py @@ -49,6 +49,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -86,7 +87,7 @@ class Table(Asset): COLUMN_COUNT: ClassVar[Any] = None ROW_COUNT: ClassVar[Any] = None SIZE_BYTES: ClassVar[Any] = None - TABLE_OBJECT_COUNT: ClassVar[Any] = None + SQL_OBJECT_COUNT: ClassVar[Any] = None ALIAS: ClassVar[Any] = None IS_TEMPORARY: ClassVar[Any] = None IS_QUERY_PREVIEW: ClassVar[Any] = None @@ -100,16 +101,16 @@ class Table(Asset): TABLE_DEFINITION: ClassVar[Any] = None PARTITION_LIST: ClassVar[Any] = None IS_SHARDED: ClassVar[Any] = None - TABLE_TYPE: ClassVar[Any] = None + SQL_TYPE: ClassVar[Any] = None ICEBERG_CATALOG_NAME: ClassVar[Any] = None ICEBERG_TABLE_TYPE: ClassVar[Any] = None ICEBERG_CATALOG_SOURCE: ClassVar[Any] = None ICEBERG_CATALOG_TABLE_NAME: ClassVar[Any] = None - TABLE_IMPALA_PARAMETERS: ClassVar[Any] = None + SQL_IMPALA_PARAMETERS: ClassVar[Any] = None ICEBERG_CATALOG_TABLE_NAMESPACE: ClassVar[Any] = None - TABLE_EXTERNAL_VOLUME_NAME: ClassVar[Any] = None + SQL_EXTERNAL_VOLUME_NAME: ClassVar[Any] = None ICEBERG_TABLE_BASE_LOCATION: ClassVar[Any] = None - TABLE_RETENTION_TIME: ClassVar[Any] = None + SQL_RETENTION_TIME: ClassVar[Any] = None QUERY_COUNT: ClassVar[Any] = None QUERY_USER_COUNT: ClassVar[Any] = None QUERY_USER_MAP: ClassVar[Any] = None @@ -155,6 +156,7 @@ class Table(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -182,6 +184,8 @@ class Table(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Table" + column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this table.""" @@ -191,7 +195,7 @@ class Table(Asset): size_bytes: Union[int, None, UnsetType] = UNSET """Size of this table, in bytes.""" - table_object_count: Union[int, None, UnsetType] = UNSET + sql_object_count: Union[int, None, UnsetType] = UNSET """Number of objects in this table.""" alias: Union[str, None, UnsetType] = UNSET @@ -233,7 +237,7 @@ class Table(Asset): is_sharded: Union[bool, None, UnsetType] = UNSET """Whether this table is a sharded table (true) or not (false).""" - table_type: Union[str, None, UnsetType] = UNSET + sql_type: Union[str, None, UnsetType] = UNSET """Type of the table.""" iceberg_catalog_name: Union[str, None, UnsetType] = UNSET @@ -248,19 +252,19 @@ class Table(Asset): iceberg_catalog_table_name: Union[str, None, UnsetType] = UNSET """Catalog table name (actual table name on the catalog side).""" - table_impala_parameters: Union[Dict[str, str], None, UnsetType] = UNSET + sql_impala_parameters: Union[Dict[str, str], None, UnsetType] = UNSET """Extra attributes for Impala""" iceberg_catalog_table_namespace: Union[str, None, UnsetType] = UNSET """Catalog table namespace (actual database name on the catalog side).""" - table_external_volume_name: Union[str, None, UnsetType] = UNSET + sql_external_volume_name: Union[str, None, UnsetType] = UNSET """External volume name for the table.""" iceberg_table_base_location: Union[str, None, UnsetType] = UNSET """Iceberg table base location inside the external volume.""" - table_retention_time: Union[int, None, UnsetType] = UNSET + sql_retention_time: Union[int, None, UnsetType] = UNSET """Data retention time in days.""" query_count: Union[int, None, UnsetType] = UNSET @@ -406,6 +410,11 @@ class Table(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -507,80 +516,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Table instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.atlan_schema is UNSET: - errors.append("atlan_schema is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"Table validation failed: {errors}") - - def minimize(self) -> "Table": - """ - Return a minimal copy of this Table with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Table with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Table instance with only the minimum required fields. - """ - self.validate() - return Table(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTable": - """ - Create a :class:`RelatedTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTable(guid=self.guid) - return RelatedTable(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -720,7 +655,7 @@ class TableAttributes(AssetAttributes): size_bytes: Union[int, None, UnsetType] = UNSET """Size of this table, in bytes.""" - table_object_count: Union[int, None, UnsetType] = UNSET + sql_object_count: Union[int, None, UnsetType] = UNSET """Number of objects in this table.""" alias: Union[str, None, UnsetType] = UNSET @@ -762,7 +697,7 @@ class TableAttributes(AssetAttributes): is_sharded: Union[bool, None, UnsetType] = UNSET """Whether this table is a sharded table (true) or not (false).""" - table_type: Union[str, None, UnsetType] = UNSET + sql_type: Union[str, None, UnsetType] = UNSET """Type of the table.""" iceberg_catalog_name: Union[str, None, UnsetType] = UNSET @@ -777,19 +712,19 @@ class TableAttributes(AssetAttributes): iceberg_catalog_table_name: Union[str, None, UnsetType] = UNSET """Catalog table name (actual table name on the catalog side).""" - table_impala_parameters: Union[Dict[str, str], None, UnsetType] = UNSET + sql_impala_parameters: Union[Dict[str, str], None, UnsetType] = UNSET """Extra attributes for Impala""" iceberg_catalog_table_namespace: Union[str, None, UnsetType] = UNSET """Catalog table namespace (actual database name on the catalog side).""" - table_external_volume_name: Union[str, None, UnsetType] = UNSET + sql_external_volume_name: Union[str, None, UnsetType] = UNSET """External volume name for the table.""" iceberg_table_base_location: Union[str, None, UnsetType] = UNSET """Iceberg table base location inside the external volume.""" - table_retention_time: Union[int, None, UnsetType] = UNSET + sql_retention_time: Union[int, None, UnsetType] = UNSET """Data retention time in days.""" query_count: Union[int, None, UnsetType] = UNSET @@ -939,6 +874,11 @@ class TableRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1069,6 +1009,7 @@ class TableNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1104,7 +1045,7 @@ def _populate_table_attrs(attrs: TableAttributes, obj: Table) -> None: attrs.column_count = obj.column_count attrs.row_count = obj.row_count attrs.size_bytes = obj.size_bytes - attrs.table_object_count = obj.table_object_count + attrs.sql_object_count = obj.sql_object_count attrs.alias = obj.alias attrs.is_temporary = obj.is_temporary attrs.is_query_preview = obj.is_query_preview @@ -1118,16 +1059,16 @@ def _populate_table_attrs(attrs: TableAttributes, obj: Table) -> None: attrs.table_definition = obj.table_definition attrs.partition_list = obj.partition_list attrs.is_sharded = obj.is_sharded - attrs.table_type = obj.table_type + attrs.sql_type = obj.sql_type attrs.iceberg_catalog_name = obj.iceberg_catalog_name attrs.iceberg_table_type = obj.iceberg_table_type attrs.iceberg_catalog_source = obj.iceberg_catalog_source attrs.iceberg_catalog_table_name = obj.iceberg_catalog_table_name - attrs.table_impala_parameters = obj.table_impala_parameters + attrs.sql_impala_parameters = obj.sql_impala_parameters attrs.iceberg_catalog_table_namespace = obj.iceberg_catalog_table_namespace - attrs.table_external_volume_name = obj.table_external_volume_name + attrs.sql_external_volume_name = obj.sql_external_volume_name attrs.iceberg_table_base_location = obj.iceberg_table_base_location - attrs.table_retention_time = obj.table_retention_time + attrs.sql_retention_time = obj.sql_retention_time attrs.query_count = obj.query_count attrs.query_user_count = obj.query_user_count attrs.query_user_map = obj.query_user_map @@ -1165,7 +1106,7 @@ def _extract_table_attrs(attrs: TableAttributes) -> dict: result["column_count"] = attrs.column_count result["row_count"] = attrs.row_count result["size_bytes"] = attrs.size_bytes - result["table_object_count"] = attrs.table_object_count + result["sql_object_count"] = attrs.sql_object_count result["alias"] = attrs.alias result["is_temporary"] = attrs.is_temporary result["is_query_preview"] = attrs.is_query_preview @@ -1179,16 +1120,16 @@ def _extract_table_attrs(attrs: TableAttributes) -> dict: result["table_definition"] = attrs.table_definition result["partition_list"] = attrs.partition_list result["is_sharded"] = attrs.is_sharded - result["table_type"] = attrs.table_type + result["sql_type"] = attrs.sql_type result["iceberg_catalog_name"] = attrs.iceberg_catalog_name result["iceberg_table_type"] = attrs.iceberg_table_type result["iceberg_catalog_source"] = attrs.iceberg_catalog_source result["iceberg_catalog_table_name"] = attrs.iceberg_catalog_table_name - result["table_impala_parameters"] = attrs.table_impala_parameters + result["sql_impala_parameters"] = attrs.sql_impala_parameters result["iceberg_catalog_table_namespace"] = attrs.iceberg_catalog_table_namespace - result["table_external_volume_name"] = attrs.table_external_volume_name + result["sql_external_volume_name"] = attrs.sql_external_volume_name result["iceberg_table_base_location"] = attrs.iceberg_table_base_location - result["table_retention_time"] = attrs.table_retention_time + result["sql_retention_time"] = attrs.sql_retention_time result["query_count"] = attrs.query_count result["query_user_count"] = attrs.query_user_count result["query_user_map"] = attrs.query_user_map @@ -1260,9 +1201,6 @@ def _table_to_nested(table: Table) -> TableNested: is_incomplete=table.is_incomplete, provenance_type=table.provenance_type, home_id=table.home_id, - depth=table.depth, - immediate_upstream=table.immediate_upstream, - immediate_downstream=table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1292,6 +1230,7 @@ def _table_from_nested(nested: TableNested) -> Table: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1300,9 +1239,6 @@ def _table_from_nested(nested: TableNested) -> Table: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_table_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1333,7 +1269,7 @@ def _table_from_nested_bytes(data: bytes, serde: Serde) -> Table: Table.COLUMN_COUNT = NumericField("columnCount", "columnCount") Table.ROW_COUNT = NumericField("rowCount", "rowCount") Table.SIZE_BYTES = NumericField("sizeBytes", "sizeBytes") -Table.TABLE_OBJECT_COUNT = NumericField("tableObjectCount", "tableObjectCount") +Table.SQL_OBJECT_COUNT = NumericField("sqlObjectCount", "sqlObjectCount") Table.ALIAS = KeywordField("alias", "alias") Table.IS_TEMPORARY = BooleanField("isTemporary", "isTemporary") Table.IS_QUERY_PREVIEW = BooleanField("isQueryPreview", "isQueryPreview") @@ -1351,7 +1287,7 @@ def _table_from_nested_bytes(data: bytes, serde: Serde) -> Table: Table.TABLE_DEFINITION = KeywordField("tableDefinition", "tableDefinition") Table.PARTITION_LIST = KeywordField("partitionList", "partitionList") Table.IS_SHARDED = BooleanField("isSharded", "isSharded") -Table.TABLE_TYPE = KeywordField("tableType", "tableType") +Table.SQL_TYPE = KeywordField("sqlType", "sqlType") Table.ICEBERG_CATALOG_NAME = KeywordField("icebergCatalogName", "icebergCatalogName") Table.ICEBERG_TABLE_TYPE = KeywordField("icebergTableType", "icebergTableType") Table.ICEBERG_CATALOG_SOURCE = KeywordField( @@ -1360,19 +1296,17 @@ def _table_from_nested_bytes(data: bytes, serde: Serde) -> Table: Table.ICEBERG_CATALOG_TABLE_NAME = KeywordField( "icebergCatalogTableName", "icebergCatalogTableName" ) -Table.TABLE_IMPALA_PARAMETERS = KeywordField( - "tableImpalaParameters", "tableImpalaParameters" -) +Table.SQL_IMPALA_PARAMETERS = KeywordField("sqlImpalaParameters", "sqlImpalaParameters") Table.ICEBERG_CATALOG_TABLE_NAMESPACE = KeywordField( "icebergCatalogTableNamespace", "icebergCatalogTableNamespace" ) -Table.TABLE_EXTERNAL_VOLUME_NAME = KeywordField( - "tableExternalVolumeName", "tableExternalVolumeName" +Table.SQL_EXTERNAL_VOLUME_NAME = KeywordField( + "sqlExternalVolumeName", "sqlExternalVolumeName" ) Table.ICEBERG_TABLE_BASE_LOCATION = KeywordField( "icebergTableBaseLocation", "icebergTableBaseLocation" ) -Table.TABLE_RETENTION_TIME = NumericField("tableRetentionTime", "tableRetentionTime") +Table.SQL_RETENTION_TIME = NumericField("sqlRetentionTime", "sqlRetentionTime") Table.QUERY_COUNT = NumericField("queryCount", "queryCount") Table.QUERY_USER_COUNT = NumericField("queryUserCount", "queryUserCount") Table.QUERY_USER_MAP = KeywordField("queryUserMap", "queryUserMap") @@ -1437,6 +1371,9 @@ def _table_from_nested_bytes(data: bytes, serde: Serde) -> Table: Table.DBT_SOURCES = RelationField("dbtSources") Table.SQL_DBT_SOURCES = RelationField("sqlDBTSources") Table.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +Table.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Table.MEANINGS = RelationField("meanings") Table.MC_MONITORS = RelationField("mcMonitors") Table.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/table_partition.py b/pyatlan_v9/model/assets/table_partition.py index 6b89f9a86..10838a731 100644 --- a/pyatlan_v9/model/assets/table_partition.py +++ b/pyatlan_v9/model/assets/table_partition.py @@ -49,6 +49,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -137,6 +138,7 @@ class TablePartition(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -162,6 +164,8 @@ class TablePartition(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TablePartition" + constraint: Union[str, None, UnsetType] = UNSET """Constraint that defines this table partition.""" @@ -350,6 +354,11 @@ class TablePartition(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -445,84 +454,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TablePartition instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.parent_table is UNSET: - errors.append("parent_table is required for creation") - if self.table_name is UNSET: - errors.append("table_name is required for creation") - if self.table_qualified_name is UNSET: - errors.append("table_qualified_name is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"TablePartition validation failed: {errors}") - - def minimize(self) -> "TablePartition": - """ - Return a minimal copy of this TablePartition with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TablePartition with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TablePartition instance with only the minimum required fields. - """ - self.validate() - return TablePartition(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTablePartition": - """ - Create a :class:`RelatedTablePartition` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTablePartition reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTablePartition(guid=self.guid) - return RelatedTablePartition(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -876,6 +807,11 @@ class TablePartitionRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -1002,6 +938,7 @@ class TablePartitionNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1171,9 +1108,6 @@ def _table_partition_to_nested(table_partition: TablePartition) -> TablePartitio is_incomplete=table_partition.is_incomplete, provenance_type=table_partition.provenance_type, home_id=table_partition.home_id, - depth=table_partition.depth, - immediate_upstream=table_partition.immediate_upstream, - immediate_downstream=table_partition.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1207,6 +1141,7 @@ def _table_partition_from_nested(nested: TablePartitionNested) -> TablePartition updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1215,9 +1150,6 @@ def _table_partition_from_nested(nested: TablePartitionNested) -> TablePartition is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_table_partition_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1350,6 +1282,9 @@ def _table_partition_from_nested_bytes(data: bytes, serde: Serde) -> TablePartit TablePartition.DBT_SOURCES = RelationField("dbtSources") TablePartition.SQL_DBT_SOURCES = RelationField("sqlDBTSources") TablePartition.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +TablePartition.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) TablePartition.MEANINGS = RelationField("meanings") TablePartition.MC_MONITORS = RelationField("mcMonitors") TablePartition.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/tableau.py b/pyatlan_v9/model/assets/tableau.py index a7b728b8c..0c404bf99 100644 --- a/pyatlan_v9/model/assets/tableau.py +++ b/pyatlan_v9/model/assets/tableau.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .tableau_related import RelatedTableau # ============================================================================= # FLAT ASSET CLASS @@ -79,6 +79,7 @@ class Tableau(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -96,6 +97,8 @@ class Tableau(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Tableau" + tableau_project_hierarchy_qualified_names: Union[List[str], None, UnsetType] = UNSET """Array of qualified names representing the project hierarchy for this Tableau asset.""" @@ -148,6 +151,11 @@ class Tableau(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -203,66 +211,6 @@ class Tableau(Asset): def __post_init__(self) -> None: self.type_name = "Tableau" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Tableau instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Tableau validation failed: {errors}") - - def minimize(self) -> "Tableau": - """ - Return a minimal copy of this Tableau with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Tableau with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Tableau instance with only the minimum required fields. - """ - self.validate() - return Tableau(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableau": - """ - Create a :class:`RelatedTableau` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableau reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableau(guid=self.guid) - return RelatedTableau(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -374,6 +322,11 @@ class TableauRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -460,6 +413,7 @@ class TableauNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -531,9 +485,6 @@ def _tableau_to_nested(tableau: Tableau) -> TableauNested: is_incomplete=tableau.is_incomplete, provenance_type=tableau.provenance_type, home_id=tableau.home_id, - depth=tableau.depth, - immediate_upstream=tableau.immediate_upstream, - immediate_downstream=tableau.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -563,6 +514,7 @@ def _tableau_from_nested(nested: TableauNested) -> Tableau: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -571,9 +523,6 @@ def _tableau_from_nested(nested: TableauNested) -> Tableau: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -614,6 +563,9 @@ def _tableau_from_nested_bytes(data: bytes, serde: Serde) -> Tableau: Tableau.METRICS = RelationField("metrics") Tableau.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Tableau.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Tableau.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Tableau.MEANINGS = RelationField("meanings") Tableau.MC_MONITORS = RelationField("mcMonitors") Tableau.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/tableau_calculated_field.py b/pyatlan_v9/model/assets/tableau_calculated_field.py index e036da6e7..4a8fc0e8d 100644 --- a/pyatlan_v9/model/assets/tableau_calculated_field.py +++ b/pyatlan_v9/model/assets/tableau_calculated_field.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -52,7 +53,6 @@ from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob from .tableau_related import ( - RelatedTableauCalculatedField, RelatedTableauDatasource, RelatedTableauWorksheet, RelatedTableauWorksheetField, @@ -96,6 +96,7 @@ class TableauCalculatedField(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -116,6 +117,8 @@ class TableauCalculatedField(Asset): DATASOURCE: ClassVar[Any] = None WORKSHEETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauCalculatedField" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this calculated field exists.""" @@ -201,6 +204,11 @@ class TableauCalculatedField(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -275,80 +283,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauCalculatedField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.datasource is UNSET: - errors.append("datasource is required for creation") - if self.datasource_qualified_name is UNSET: - errors.append("datasource_qualified_name is required for creation") - if self.project_qualified_name is UNSET: - errors.append("project_qualified_name is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauCalculatedField validation failed: {errors}") - - def minimize(self) -> "TableauCalculatedField": - """ - Return a minimal copy of this TableauCalculatedField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauCalculatedField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauCalculatedField instance with only the minimum required fields. - """ - self.validate() - return TableauCalculatedField( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedTableauCalculatedField": - """ - Create a :class:`RelatedTableauCalculatedField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauCalculatedField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauCalculatedField(guid=self.guid) - return RelatedTableauCalculatedField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -495,6 +429,11 @@ class TableauCalculatedFieldRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -594,6 +533,7 @@ class TableauCalculatedFieldNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -698,9 +638,6 @@ def _tableau_calculated_field_to_nested( is_incomplete=tableau_calculated_field.is_incomplete, provenance_type=tableau_calculated_field.provenance_type, home_id=tableau_calculated_field.home_id, - depth=tableau_calculated_field.depth, - immediate_upstream=tableau_calculated_field.immediate_upstream, - immediate_downstream=tableau_calculated_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -736,6 +673,7 @@ def _tableau_calculated_field_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -744,9 +682,6 @@ def _tableau_calculated_field_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_calculated_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -836,6 +771,9 @@ def _tableau_calculated_field_from_nested_bytes( TableauCalculatedField.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +TableauCalculatedField.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) TableauCalculatedField.MEANINGS = RelationField("meanings") TableauCalculatedField.MC_MONITORS = RelationField("mcMonitors") TableauCalculatedField.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/tableau_dashboard.py b/pyatlan_v9/model/assets/tableau_dashboard.py index 5b97989ba..6863d65e8 100644 --- a/pyatlan_v9/model/assets/tableau_dashboard.py +++ b/pyatlan_v9/model/assets/tableau_dashboard.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -90,6 +91,7 @@ class TableauDashboard(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -112,6 +114,8 @@ class TableauDashboard(Asset): TABLEAU_PARENT_DASHBOARDS: ClassVar[Any] = None TABLEAU_DASHBOARD_FIELDS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauDashboard" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this dashboard exists.""" @@ -179,6 +183,11 @@ class TableauDashboard(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -263,78 +272,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauDashboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.workbook is UNSET: - errors.append("workbook is required for creation") - if self.workbook_qualified_name is UNSET: - errors.append("workbook_qualified_name is required for creation") - if self.project_qualified_name is UNSET: - errors.append("project_qualified_name is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauDashboard validation failed: {errors}") - - def minimize(self) -> "TableauDashboard": - """ - Return a minimal copy of this TableauDashboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauDashboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauDashboard instance with only the minimum required fields. - """ - self.validate() - return TableauDashboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauDashboard": - """ - Create a :class:`RelatedTableauDashboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauDashboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauDashboard(guid=self.guid) - return RelatedTableauDashboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -463,6 +400,11 @@ class TableauDashboardRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -572,6 +514,7 @@ class TableauDashboardNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -664,9 +607,6 @@ def _tableau_dashboard_to_nested( is_incomplete=tableau_dashboard.is_incomplete, provenance_type=tableau_dashboard.provenance_type, home_id=tableau_dashboard.home_id, - depth=tableau_dashboard.depth, - immediate_upstream=tableau_dashboard.immediate_upstream, - immediate_downstream=tableau_dashboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -700,6 +640,7 @@ def _tableau_dashboard_from_nested(nested: TableauDashboardNested) -> TableauDas updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -708,9 +649,6 @@ def _tableau_dashboard_from_nested(nested: TableauDashboardNested) -> TableauDas is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_dashboard_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -774,6 +712,9 @@ def _tableau_dashboard_from_nested_bytes(data: bytes, serde: Serde) -> TableauDa TableauDashboard.METRICS = RelationField("metrics") TableauDashboard.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") TableauDashboard.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +TableauDashboard.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) TableauDashboard.MEANINGS = RelationField("meanings") TableauDashboard.MC_MONITORS = RelationField("mcMonitors") TableauDashboard.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/tableau_dashboard_field.py b/pyatlan_v9/model/assets/tableau_dashboard_field.py index d62bcb373..7378834d6 100644 --- a/pyatlan_v9/model/assets/tableau_dashboard_field.py +++ b/pyatlan_v9/model/assets/tableau_dashboard_field.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -51,11 +52,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .tableau_related import ( - RelatedTableauDashboard, - RelatedTableauDashboardField, - RelatedTableauWorksheetField, -) +from .tableau_related import RelatedTableauDashboard, RelatedTableauWorksheetField # ============================================================================= # FLAT ASSET CLASS @@ -99,6 +96,7 @@ class TableauDashboardField(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -118,6 +116,8 @@ class TableauDashboardField(Asset): TABLEAU_WORKSHEET_FIELD: ClassVar[Any] = None TABLEAU_DASHBOARD: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauDashboardField" + tableau_site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this dashboard field exists.""" @@ -219,6 +219,11 @@ class TableauDashboardField(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -290,80 +295,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauDashboardField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.tableau_dashboard is UNSET: - errors.append("tableau_dashboard is required for creation") - if self.tableau_dashboard_qualified_name is UNSET: - errors.append( - "tableau_dashboard_qualified_name is required for creation" - ) - if self.tableau_project_qualified_name is UNSET: - errors.append("tableau_project_qualified_name is required for creation") - if self.tableau_site_qualified_name is UNSET: - errors.append("tableau_site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauDashboardField validation failed: {errors}") - - def minimize(self) -> "TableauDashboardField": - """ - Return a minimal copy of this TableauDashboardField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauDashboardField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauDashboardField instance with only the minimum required fields. - """ - self.validate() - return TableauDashboardField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauDashboardField": - """ - Create a :class:`RelatedTableauDashboardField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauDashboardField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauDashboardField(guid=self.guid) - return RelatedTableauDashboardField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -526,6 +457,11 @@ class TableauDashboardFieldRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -622,6 +558,7 @@ class TableauDashboardFieldNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -751,9 +688,6 @@ def _tableau_dashboard_field_to_nested( is_incomplete=tableau_dashboard_field.is_incomplete, provenance_type=tableau_dashboard_field.provenance_type, home_id=tableau_dashboard_field.home_id, - depth=tableau_dashboard_field.depth, - immediate_upstream=tableau_dashboard_field.immediate_upstream, - immediate_downstream=tableau_dashboard_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -789,6 +723,7 @@ def _tableau_dashboard_field_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -797,9 +732,6 @@ def _tableau_dashboard_field_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_dashboard_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -909,6 +841,9 @@ def _tableau_dashboard_field_from_nested_bytes( TableauDashboardField.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +TableauDashboardField.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) TableauDashboardField.MEANINGS = RelationField("meanings") TableauDashboardField.MC_MONITORS = RelationField("mcMonitors") TableauDashboardField.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/tableau_datasource.py b/pyatlan_v9/model/assets/tableau_datasource.py index dab9c9fd6..3686306c9 100644 --- a/pyatlan_v9/model/assets/tableau_datasource.py +++ b/pyatlan_v9/model/assets/tableau_datasource.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -52,7 +53,6 @@ from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob from .tableau_related import ( - RelatedTableauDatasource, RelatedTableauDatasourceField, RelatedTableauProject, RelatedTableauWorkbook, @@ -98,6 +98,7 @@ class TableauDatasource(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -118,6 +119,8 @@ class TableauDatasource(Asset): WORKBOOK: ClassVar[Any] = None FIELDS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauDatasource" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this datasource exists.""" @@ -209,6 +212,11 @@ class TableauDatasource(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -281,76 +289,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauDatasource instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.project is UNSET: - errors.append("project is required for creation") - if self.project_qualified_name is UNSET: - errors.append("project_qualified_name is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauDatasource validation failed: {errors}") - - def minimize(self) -> "TableauDatasource": - """ - Return a minimal copy of this TableauDatasource with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauDatasource with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauDatasource instance with only the minimum required fields. - """ - self.validate() - return TableauDatasource(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauDatasource": - """ - Create a :class:`RelatedTableauDatasource` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauDatasource reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauDatasource(guid=self.guid) - return RelatedTableauDatasource(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -503,6 +441,11 @@ class TableauDatasourceRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -600,6 +543,7 @@ class TableauDatasourceNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -706,9 +650,6 @@ def _tableau_datasource_to_nested( is_incomplete=tableau_datasource.is_incomplete, provenance_type=tableau_datasource.provenance_type, home_id=tableau_datasource.home_id, - depth=tableau_datasource.depth, - immediate_upstream=tableau_datasource.immediate_upstream, - immediate_downstream=tableau_datasource.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -744,6 +685,7 @@ def _tableau_datasource_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -752,9 +694,6 @@ def _tableau_datasource_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_datasource_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -838,6 +777,9 @@ def _tableau_datasource_from_nested_bytes( TableauDatasource.METRICS = RelationField("metrics") TableauDatasource.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") TableauDatasource.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +TableauDatasource.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) TableauDatasource.MEANINGS = RelationField("meanings") TableauDatasource.MC_MONITORS = RelationField("mcMonitors") TableauDatasource.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/tableau_datasource_field.py b/pyatlan_v9/model/assets/tableau_datasource_field.py index 90f6b2607..442ff39df 100644 --- a/pyatlan_v9/model/assets/tableau_datasource_field.py +++ b/pyatlan_v9/model/assets/tableau_datasource_field.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -53,7 +54,6 @@ from .spark_related import RelatedSparkJob from .tableau_related import ( RelatedTableauDatasource, - RelatedTableauDatasourceField, RelatedTableauWorksheet, RelatedTableauWorksheetField, ) @@ -101,6 +101,7 @@ class TableauDatasourceField(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -121,6 +122,8 @@ class TableauDatasourceField(Asset): WORKSHEETS: ClassVar[Any] = None TABLEAU_WORKSHEET_FIELD: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauDatasourceField" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this datasource field exists.""" @@ -221,6 +224,11 @@ class TableauDatasourceField(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -295,80 +303,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauDatasourceField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.datasource is UNSET: - errors.append("datasource is required for creation") - if self.datasource_qualified_name is UNSET: - errors.append("datasource_qualified_name is required for creation") - if self.project_qualified_name is UNSET: - errors.append("project_qualified_name is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauDatasourceField validation failed: {errors}") - - def minimize(self) -> "TableauDatasourceField": - """ - Return a minimal copy of this TableauDatasourceField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauDatasourceField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauDatasourceField instance with only the minimum required fields. - """ - self.validate() - return TableauDatasourceField( - qualified_name=self.qualified_name, name=self.name - ) - - def relate(self) -> "RelatedTableauDatasourceField": - """ - Create a :class:`RelatedTableauDatasourceField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauDatasourceField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauDatasourceField(guid=self.guid) - return RelatedTableauDatasourceField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -530,6 +464,11 @@ class TableauDatasourceFieldRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -629,6 +568,7 @@ class TableauDatasourceFieldNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -751,9 +691,6 @@ def _tableau_datasource_field_to_nested( is_incomplete=tableau_datasource_field.is_incomplete, provenance_type=tableau_datasource_field.provenance_type, home_id=tableau_datasource_field.home_id, - depth=tableau_datasource_field.depth, - immediate_upstream=tableau_datasource_field.immediate_upstream, - immediate_downstream=tableau_datasource_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -789,6 +726,7 @@ def _tableau_datasource_field_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -797,9 +735,6 @@ def _tableau_datasource_field_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_datasource_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -912,6 +847,9 @@ def _tableau_datasource_field_from_nested_bytes( TableauDatasourceField.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +TableauDatasourceField.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) TableauDatasourceField.MEANINGS = RelationField("meanings") TableauDatasourceField.MC_MONITORS = RelationField("mcMonitors") TableauDatasourceField.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/tableau_flow.py b/pyatlan_v9/model/assets/tableau_flow.py index ebdafbe97..d5fdaa9c1 100644 --- a/pyatlan_v9/model/assets/tableau_flow.py +++ b/pyatlan_v9/model/assets/tableau_flow.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -51,7 +52,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .tableau_related import RelatedTableauFlow, RelatedTableauProject +from .tableau_related import RelatedTableauProject # ============================================================================= # FLAT ASSET CLASS @@ -87,6 +88,7 @@ class TableauFlow(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -105,6 +107,8 @@ class TableauFlow(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None PROJECT: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauFlow" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this flow exists.""" @@ -178,6 +182,11 @@ class TableauFlow(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -244,76 +253,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauFlow instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.project is UNSET: - errors.append("project is required for creation") - if self.project_qualified_name is UNSET: - errors.append("project_qualified_name is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauFlow validation failed: {errors}") - - def minimize(self) -> "TableauFlow": - """ - Return a minimal copy of this TableauFlow with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauFlow with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauFlow instance with only the minimum required fields. - """ - self.validate() - return TableauFlow(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauFlow": - """ - Create a :class:`RelatedTableauFlow` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauFlow reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauFlow(guid=self.guid) - return RelatedTableauFlow(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -446,6 +385,11 @@ class TableauFlowRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -535,6 +479,7 @@ class TableauFlowNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -623,9 +568,6 @@ def _tableau_flow_to_nested(tableau_flow: TableauFlow) -> TableauFlowNested: is_incomplete=tableau_flow.is_incomplete, provenance_type=tableau_flow.provenance_type, home_id=tableau_flow.home_id, - depth=tableau_flow.depth, - immediate_upstream=tableau_flow.immediate_upstream, - immediate_downstream=tableau_flow.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -657,6 +599,7 @@ def _tableau_flow_from_nested(nested: TableauFlowNested) -> TableauFlow: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -665,9 +608,6 @@ def _tableau_flow_from_nested(nested: TableauFlowNested) -> TableauFlow: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_flow_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -723,6 +663,9 @@ def _tableau_flow_from_nested_bytes(data: bytes, serde: Serde) -> TableauFlow: TableauFlow.METRICS = RelationField("metrics") TableauFlow.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") TableauFlow.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +TableauFlow.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) TableauFlow.MEANINGS = RelationField("meanings") TableauFlow.MC_MONITORS = RelationField("mcMonitors") TableauFlow.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/tableau_metric.py b/pyatlan_v9/model/assets/tableau_metric.py index ccb6f4784..5bd82b998 100644 --- a/pyatlan_v9/model/assets/tableau_metric.py +++ b/pyatlan_v9/model/assets/tableau_metric.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -51,7 +52,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .tableau_related import RelatedTableauMetric, RelatedTableauProject +from .tableau_related import RelatedTableauProject # ============================================================================= # FLAT ASSET CLASS @@ -84,6 +85,7 @@ class TableauMetric(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -102,6 +104,8 @@ class TableauMetric(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None PROJECT: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauMetric" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this metric exists.""" @@ -166,6 +170,11 @@ class TableauMetric(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -232,76 +241,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauMetric instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.project is UNSET: - errors.append("project is required for creation") - if self.project_qualified_name is UNSET: - errors.append("project_qualified_name is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauMetric validation failed: {errors}") - - def minimize(self) -> "TableauMetric": - """ - Return a minimal copy of this TableauMetric with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauMetric with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauMetric instance with only the minimum required fields. - """ - self.validate() - return TableauMetric(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauMetric": - """ - Create a :class:`RelatedTableauMetric` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauMetric reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauMetric(guid=self.guid) - return RelatedTableauMetric(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -425,6 +364,11 @@ class TableauMetricRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -516,6 +460,7 @@ class TableauMetricNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -598,9 +543,6 @@ def _tableau_metric_to_nested(tableau_metric: TableauMetric) -> TableauMetricNes is_incomplete=tableau_metric.is_incomplete, provenance_type=tableau_metric.provenance_type, home_id=tableau_metric.home_id, - depth=tableau_metric.depth, - immediate_upstream=tableau_metric.immediate_upstream, - immediate_downstream=tableau_metric.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -634,6 +576,7 @@ def _tableau_metric_from_nested(nested: TableauMetricNested) -> TableauMetric: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -642,9 +585,6 @@ def _tableau_metric_from_nested(nested: TableauMetricNested) -> TableauMetric: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_metric_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -701,6 +641,9 @@ def _tableau_metric_from_nested_bytes(data: bytes, serde: Serde) -> TableauMetri TableauMetric.METRICS = RelationField("metrics") TableauMetric.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") TableauMetric.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +TableauMetric.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) TableauMetric.MEANINGS = RelationField("meanings") TableauMetric.MC_MONITORS = RelationField("mcMonitors") TableauMetric.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/tableau_project.py b/pyatlan_v9/model/assets/tableau_project.py index bcd1bba97..80c9c9381 100644 --- a/pyatlan_v9/model/assets/tableau_project.py +++ b/pyatlan_v9/model/assets/tableau_project.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -90,6 +91,7 @@ class TableauProject(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -113,6 +115,8 @@ class TableauProject(Asset): FLOWS: ClassVar[Any] = None WORKBOOKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauProject" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this project exists.""" @@ -177,6 +181,11 @@ class TableauProject(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -256,74 +265,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauProject instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.site is UNSET: - errors.append("site is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauProject validation failed: {errors}") - - def minimize(self) -> "TableauProject": - """ - Return a minimal copy of this TableauProject with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauProject with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauProject instance with only the minimum required fields. - """ - self.validate() - return TableauProject(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauProject": - """ - Create a :class:`RelatedTableauProject` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauProject reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauProject(guid=self.guid) - return RelatedTableauProject(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -447,6 +388,11 @@ class TableauProjectRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -553,6 +499,7 @@ class TableauProjectNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -642,9 +589,6 @@ def _tableau_project_to_nested(tableau_project: TableauProject) -> TableauProjec is_incomplete=tableau_project.is_incomplete, provenance_type=tableau_project.provenance_type, home_id=tableau_project.home_id, - depth=tableau_project.depth, - immediate_upstream=tableau_project.immediate_upstream, - immediate_downstream=tableau_project.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -678,6 +622,7 @@ def _tableau_project_from_nested(nested: TableauProjectNested) -> TableauProject updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -686,9 +631,6 @@ def _tableau_project_from_nested(nested: TableauProjectNested) -> TableauProject is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_project_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -751,6 +693,9 @@ def _tableau_project_from_nested_bytes(data: bytes, serde: Serde) -> TableauProj TableauProject.METRICS = RelationField("metrics") TableauProject.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") TableauProject.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +TableauProject.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) TableauProject.MEANINGS = RelationField("meanings") TableauProject.MC_MONITORS = RelationField("mcMonitors") TableauProject.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/tableau_site.py b/pyatlan_v9/model/assets/tableau_site.py index 2ac605740..b8100ed3e 100644 --- a/pyatlan_v9/model/assets/tableau_site.py +++ b/pyatlan_v9/model/assets/tableau_site.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .tableau_related import RelatedTableauProject, RelatedTableauSite +from .tableau_related import RelatedTableauProject # ============================================================================= # FLAT ASSET CLASS @@ -79,6 +80,7 @@ class TableauSite(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -97,6 +99,8 @@ class TableauSite(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None PROJECTS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauSite" + tableau_project_hierarchy_qualified_names: Union[List[str], None, UnsetType] = UNSET """Array of qualified names representing the project hierarchy for this Tableau asset.""" @@ -149,6 +153,11 @@ class TableauSite(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -207,66 +216,6 @@ class TableauSite(Asset): def __post_init__(self) -> None: self.type_name = "TableauSite" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauSite instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"TableauSite validation failed: {errors}") - - def minimize(self) -> "TableauSite": - """ - Return a minimal copy of this TableauSite with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauSite with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauSite instance with only the minimum required fields. - """ - self.validate() - return TableauSite(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauSite": - """ - Create a :class:`RelatedTableauSite` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauSite reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauSite(guid=self.guid) - return RelatedTableauSite(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -378,6 +327,11 @@ class TableauSiteRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -467,6 +421,7 @@ class TableauSiteNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -541,9 +496,6 @@ def _tableau_site_to_nested(tableau_site: TableauSite) -> TableauSiteNested: is_incomplete=tableau_site.is_incomplete, provenance_type=tableau_site.provenance_type, home_id=tableau_site.home_id, - depth=tableau_site.depth, - immediate_upstream=tableau_site.immediate_upstream, - immediate_downstream=tableau_site.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -575,6 +527,7 @@ def _tableau_site_from_nested(nested: TableauSiteNested) -> TableauSite: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -583,9 +536,6 @@ def _tableau_site_from_nested(nested: TableauSiteNested) -> TableauSite: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_site_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -630,6 +580,9 @@ def _tableau_site_from_nested_bytes(data: bytes, serde: Serde) -> TableauSite: TableauSite.METRICS = RelationField("metrics") TableauSite.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") TableauSite.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +TableauSite.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) TableauSite.MEANINGS = RelationField("meanings") TableauSite.MC_MONITORS = RelationField("mcMonitors") TableauSite.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/tableau_workbook.py b/pyatlan_v9/model/assets/tableau_workbook.py index 59ffdc75d..0b11c1f72 100644 --- a/pyatlan_v9/model/assets/tableau_workbook.py +++ b/pyatlan_v9/model/assets/tableau_workbook.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -55,7 +56,6 @@ RelatedTableauDashboard, RelatedTableauDatasource, RelatedTableauProject, - RelatedTableauWorkbook, RelatedTableauWorksheet, ) @@ -91,6 +91,7 @@ class TableauWorkbook(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -112,6 +113,8 @@ class TableauWorkbook(Asset): WORKSHEETS: ClassVar[Any] = None PROJECT: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauWorkbook" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this workbook exists.""" @@ -179,6 +182,11 @@ class TableauWorkbook(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -254,76 +262,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauWorkbook instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.project is UNSET: - errors.append("project is required for creation") - if self.project_qualified_name is UNSET: - errors.append("project_qualified_name is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauWorkbook validation failed: {errors}") - - def minimize(self) -> "TableauWorkbook": - """ - Return a minimal copy of this TableauWorkbook with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauWorkbook with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauWorkbook instance with only the minimum required fields. - """ - self.validate() - return TableauWorkbook(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauWorkbook": - """ - Create a :class:`RelatedTableauWorkbook` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauWorkbook reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauWorkbook(guid=self.guid) - return RelatedTableauWorkbook(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -452,6 +390,11 @@ class TableauWorkbookRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -552,6 +495,7 @@ class TableauWorkbookNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -643,9 +587,6 @@ def _tableau_workbook_to_nested( is_incomplete=tableau_workbook.is_incomplete, provenance_type=tableau_workbook.provenance_type, home_id=tableau_workbook.home_id, - depth=tableau_workbook.depth, - immediate_upstream=tableau_workbook.immediate_upstream, - immediate_downstream=tableau_workbook.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -679,6 +620,7 @@ def _tableau_workbook_from_nested(nested: TableauWorkbookNested) -> TableauWorkb updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -687,9 +629,6 @@ def _tableau_workbook_from_nested(nested: TableauWorkbookNested) -> TableauWorkb is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_workbook_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -751,6 +690,9 @@ def _tableau_workbook_from_nested_bytes(data: bytes, serde: Serde) -> TableauWor TableauWorkbook.METRICS = RelationField("metrics") TableauWorkbook.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") TableauWorkbook.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +TableauWorkbook.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) TableauWorkbook.MEANINGS = RelationField("meanings") TableauWorkbook.MC_MONITORS = RelationField("mcMonitors") TableauWorkbook.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/tableau_worksheet.py b/pyatlan_v9/model/assets/tableau_worksheet.py index 6fc7bb666..89675af52 100644 --- a/pyatlan_v9/model/assets/tableau_worksheet.py +++ b/pyatlan_v9/model/assets/tableau_worksheet.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -56,7 +57,6 @@ RelatedTableauDashboard, RelatedTableauDatasourceField, RelatedTableauWorkbook, - RelatedTableauWorksheet, RelatedTableauWorksheetField, ) @@ -92,6 +92,7 @@ class TableauWorksheet(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -114,6 +115,8 @@ class TableauWorksheet(Asset): CALCULATED_FIELDS: ClassVar[Any] = None WORKBOOK: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauWorksheet" + site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this worksheet exists.""" @@ -181,6 +184,11 @@ class TableauWorksheet(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -265,78 +273,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauWorksheet instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.workbook is UNSET: - errors.append("workbook is required for creation") - if self.workbook_qualified_name is UNSET: - errors.append("workbook_qualified_name is required for creation") - if self.project_qualified_name is UNSET: - errors.append("project_qualified_name is required for creation") - if self.site_qualified_name is UNSET: - errors.append("site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauWorksheet validation failed: {errors}") - - def minimize(self) -> "TableauWorksheet": - """ - Return a minimal copy of this TableauWorksheet with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauWorksheet with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauWorksheet instance with only the minimum required fields. - """ - self.validate() - return TableauWorksheet(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauWorksheet": - """ - Create a :class:`RelatedTableauWorksheet` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauWorksheet reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauWorksheet(guid=self.guid) - return RelatedTableauWorksheet(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -465,6 +401,11 @@ class TableauWorksheetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -574,6 +515,7 @@ class TableauWorksheetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -666,9 +608,6 @@ def _tableau_worksheet_to_nested( is_incomplete=tableau_worksheet.is_incomplete, provenance_type=tableau_worksheet.provenance_type, home_id=tableau_worksheet.home_id, - depth=tableau_worksheet.depth, - immediate_upstream=tableau_worksheet.immediate_upstream, - immediate_downstream=tableau_worksheet.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -702,6 +641,7 @@ def _tableau_worksheet_from_nested(nested: TableauWorksheetNested) -> TableauWor updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -710,9 +650,6 @@ def _tableau_worksheet_from_nested(nested: TableauWorksheetNested) -> TableauWor is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_worksheet_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -776,6 +713,9 @@ def _tableau_worksheet_from_nested_bytes(data: bytes, serde: Serde) -> TableauWo TableauWorksheet.METRICS = RelationField("metrics") TableauWorksheet.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") TableauWorksheet.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +TableauWorksheet.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) TableauWorksheet.MEANINGS = RelationField("meanings") TableauWorksheet.MC_MONITORS = RelationField("mcMonitors") TableauWorksheet.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/tableau_worksheet_field.py b/pyatlan_v9/model/assets/tableau_worksheet_field.py index 26afc0efe..774483ebe 100644 --- a/pyatlan_v9/model/assets/tableau_worksheet_field.py +++ b/pyatlan_v9/model/assets/tableau_worksheet_field.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -56,7 +57,6 @@ RelatedTableauDashboardField, RelatedTableauDatasourceField, RelatedTableauWorksheet, - RelatedTableauWorksheetField, ) # ============================================================================= @@ -102,6 +102,7 @@ class TableauWorksheetField(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -123,6 +124,8 @@ class TableauWorksheetField(Asset): TABLEAU_CALCULATED_FIELD: ClassVar[Any] = None TABLEAU_WORKSHEET: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "TableauWorksheetField" + tableau_site_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the site in which this worksheet field exists.""" @@ -229,6 +232,11 @@ class TableauWorksheetField(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -310,84 +318,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this TableauWorksheetField instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.tableau_worksheet is UNSET: - errors.append("tableau_worksheet is required for creation") - if self.tableau_worksheet_qualified_name is UNSET: - errors.append( - "tableau_worksheet_qualified_name is required for creation" - ) - if self.tableau_workbook_qualified_name is UNSET: - errors.append( - "tableau_workbook_qualified_name is required for creation" - ) - if self.tableau_project_qualified_name is UNSET: - errors.append("tableau_project_qualified_name is required for creation") - if self.tableau_site_qualified_name is UNSET: - errors.append("tableau_site_qualified_name is required for creation") - if errors: - raise ValueError(f"TableauWorksheetField validation failed: {errors}") - - def minimize(self) -> "TableauWorksheetField": - """ - Return a minimal copy of this TableauWorksheetField with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new TableauWorksheetField with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new TableauWorksheetField instance with only the minimum required fields. - """ - self.validate() - return TableauWorksheetField(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTableauWorksheetField": - """ - Create a :class:`RelatedTableauWorksheetField` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTableauWorksheetField reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTableauWorksheetField(guid=self.guid) - return RelatedTableauWorksheetField(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -555,6 +485,11 @@ class TableauWorksheetFieldRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -661,6 +596,7 @@ class TableauWorksheetFieldNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -798,9 +734,6 @@ def _tableau_worksheet_field_to_nested( is_incomplete=tableau_worksheet_field.is_incomplete, provenance_type=tableau_worksheet_field.provenance_type, home_id=tableau_worksheet_field.home_id, - depth=tableau_worksheet_field.depth, - immediate_upstream=tableau_worksheet_field.immediate_upstream, - immediate_downstream=tableau_worksheet_field.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -836,6 +769,7 @@ def _tableau_worksheet_field_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -844,9 +778,6 @@ def _tableau_worksheet_field_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tableau_worksheet_field_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -959,6 +890,9 @@ def _tableau_worksheet_field_from_nested_bytes( TableauWorksheetField.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +TableauWorksheetField.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) TableauWorksheetField.MEANINGS = RelationField("meanings") TableauWorksheetField.MC_MONITORS = RelationField("mcMonitors") TableauWorksheetField.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/tag.py b/pyatlan_v9/model/assets/tag.py index e75968664..bb823f8e8 100644 --- a/pyatlan_v9/model/assets/tag.py +++ b/pyatlan_v9/model/assets/tag.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .tag_related import RelatedTag # ============================================================================= # FLAT ASSET CLASS @@ -82,6 +82,7 @@ class Tag(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -99,6 +100,8 @@ class Tag(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Tag" + tag_id: Union[str, None, UnsetType] = UNSET """Unique identifier of the tag in the source system.""" @@ -160,6 +163,11 @@ class Tag(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -215,73 +223,6 @@ class Tag(Asset): def __post_init__(self) -> None: self.type_name = "Tag" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Tag instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if for_creation: - if self.tag_id is UNSET: - errors.append("tag_id is required for creation") - if self.tag_allowed_values is UNSET: - errors.append("tag_allowed_values is required for creation") - if self.mapped_classification_name is UNSET: - errors.append("mapped_classification_name is required for creation") - if errors: - raise ValueError(f"Tag validation failed: {errors}") - - def minimize(self) -> "Tag": - """ - Return a minimal copy of this Tag with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Tag with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Tag instance with only the minimum required fields. - """ - self.validate() - return Tag(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTag": - """ - Create a :class:`RelatedTag` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTag reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTag(guid=self.guid) - return RelatedTag(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -402,6 +343,11 @@ class TagRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -484,6 +430,7 @@ class TagNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -557,9 +504,6 @@ def _tag_to_nested(tag: Tag) -> TagNested: is_incomplete=tag.is_incomplete, provenance_type=tag.provenance_type, home_id=tag.home_id, - depth=tag.depth, - immediate_upstream=tag.immediate_upstream, - immediate_downstream=tag.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -589,6 +533,7 @@ def _tag_from_nested(nested: TagNested) -> Tag: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -597,9 +542,6 @@ def _tag_from_nested(nested: TagNested) -> Tag: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_tag_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -649,6 +591,9 @@ def _tag_from_nested_bytes(data: bytes, serde: Serde) -> Tag: Tag.METRICS = RelationField("metrics") Tag.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Tag.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Tag.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Tag.MEANINGS = RelationField("meanings") Tag.MC_MONITORS = RelationField("mcMonitors") Tag.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/task.py b/pyatlan_v9/model/assets/task.py index 875291046..e2a17b9f5 100644 --- a/pyatlan_v9/model/assets/task.py +++ b/pyatlan_v9/model/assets/task.py @@ -39,13 +39,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck -from .task_related import RelatedTask # ============================================================================= # FLAT ASSET CLASS @@ -82,6 +82,7 @@ class Task(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -93,6 +94,8 @@ class Task(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Task" + task_recipient: Union[str, None, UnsetType] = UNSET """Recipient of the task.""" @@ -167,6 +170,11 @@ class Task(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -204,66 +212,6 @@ class Task(Asset): def __post_init__(self) -> None: self.type_name = "Task" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Task instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Task validation failed: {errors}") - - def minimize(self) -> "Task": - """ - Return a minimal copy of this Task with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Task with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Task instance with only the minimum required fields. - """ - self.validate() - return Task(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedTask": - """ - Create a :class:`RelatedTask` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedTask reference to this asset. - """ - if self.guid is not UNSET: - return RelatedTask(guid=self.guid) - return RelatedTask(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -397,6 +345,11 @@ class TaskRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -457,6 +410,7 @@ class TaskNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -542,9 +496,6 @@ def _task_to_nested(task: Task) -> TaskNested: is_incomplete=task.is_incomplete, provenance_type=task.provenance_type, home_id=task.home_id, - depth=task.depth, - immediate_upstream=task.immediate_upstream, - immediate_downstream=task.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -574,6 +525,7 @@ def _task_from_nested(nested: TaskNested) -> Task: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -582,9 +534,6 @@ def _task_from_nested(nested: TaskNested) -> Task: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_task_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -644,6 +593,9 @@ def _task_from_nested_bytes(data: bytes, serde: Serde) -> Task: Task.METRICS = RelationField("metrics") Task.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Task.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Task.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Task.MEANINGS = RelationField("meanings") Task.MC_MONITORS = RelationField("mcMonitors") Task.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/thoughtspot.py b/pyatlan_v9/model/assets/thoughtspot.py index 504ef6f99..4a14b238d 100644 --- a/pyatlan_v9/model/assets/thoughtspot.py +++ b/pyatlan_v9/model/assets/thoughtspot.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .thoughtspot_related import RelatedThoughtspot # ============================================================================= # FLAT ASSET CLASS @@ -82,6 +82,7 @@ class Thoughtspot(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -99,6 +100,8 @@ class Thoughtspot(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Thoughtspot" + thoughtspot_chart_type: Union[str, None, UnsetType] = UNSET """""" @@ -160,6 +163,11 @@ class Thoughtspot(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -215,66 +223,6 @@ class Thoughtspot(Asset): def __post_init__(self) -> None: self.type_name = "Thoughtspot" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Thoughtspot instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Thoughtspot validation failed: {errors}") - - def minimize(self) -> "Thoughtspot": - """ - Return a minimal copy of this Thoughtspot with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Thoughtspot with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Thoughtspot instance with only the minimum required fields. - """ - self.validate() - return Thoughtspot(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedThoughtspot": - """ - Create a :class:`RelatedThoughtspot` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedThoughtspot reference to this asset. - """ - if self.guid is not UNSET: - return RelatedThoughtspot(guid=self.guid) - return RelatedThoughtspot(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -395,6 +343,11 @@ class ThoughtspotRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -481,6 +434,7 @@ class ThoughtspotNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -554,9 +508,6 @@ def _thoughtspot_to_nested(thoughtspot: Thoughtspot) -> ThoughtspotNested: is_incomplete=thoughtspot.is_incomplete, provenance_type=thoughtspot.provenance_type, home_id=thoughtspot.home_id, - depth=thoughtspot.depth, - immediate_upstream=thoughtspot.immediate_upstream, - immediate_downstream=thoughtspot.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -588,6 +539,7 @@ def _thoughtspot_from_nested(nested: ThoughtspotNested) -> Thoughtspot: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -596,9 +548,6 @@ def _thoughtspot_from_nested(nested: ThoughtspotNested) -> Thoughtspot: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_thoughtspot_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -656,6 +605,9 @@ def _thoughtspot_from_nested_bytes(data: bytes, serde: Serde) -> Thoughtspot: Thoughtspot.METRICS = RelationField("metrics") Thoughtspot.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Thoughtspot.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Thoughtspot.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Thoughtspot.MEANINGS = RelationField("meanings") Thoughtspot.MC_MONITORS = RelationField("mcMonitors") Thoughtspot.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/thoughtspot_answer.py b/pyatlan_v9/model/assets/thoughtspot_answer.py index 18dac0d1a..ae6666b7f 100644 --- a/pyatlan_v9/model/assets/thoughtspot_answer.py +++ b/pyatlan_v9/model/assets/thoughtspot_answer.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,6 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .thoughtspot_related import RelatedThoughtspotAnswer # ============================================================================= # FLAT ASSET CLASS @@ -82,6 +82,7 @@ class ThoughtspotAnswer(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -99,6 +100,8 @@ class ThoughtspotAnswer(Asset): INPUT_TO_SPARK_JOBS: ClassVar[Any] = None OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ThoughtspotAnswer" + thoughtspot_chart_type: Union[str, None, UnsetType] = UNSET """""" @@ -160,6 +163,11 @@ class ThoughtspotAnswer(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -215,66 +223,6 @@ class ThoughtspotAnswer(Asset): def __post_init__(self) -> None: self.type_name = "ThoughtspotAnswer" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ThoughtspotAnswer instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ThoughtspotAnswer validation failed: {errors}") - - def minimize(self) -> "ThoughtspotAnswer": - """ - Return a minimal copy of this ThoughtspotAnswer with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ThoughtspotAnswer with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ThoughtspotAnswer instance with only the minimum required fields. - """ - self.validate() - return ThoughtspotAnswer(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedThoughtspotAnswer": - """ - Create a :class:`RelatedThoughtspotAnswer` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedThoughtspotAnswer reference to this asset. - """ - if self.guid is not UNSET: - return RelatedThoughtspotAnswer(guid=self.guid) - return RelatedThoughtspotAnswer(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -397,6 +345,11 @@ class ThoughtspotAnswerRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -485,6 +438,7 @@ class ThoughtspotAnswerNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -564,9 +518,6 @@ def _thoughtspot_answer_to_nested( is_incomplete=thoughtspot_answer.is_incomplete, provenance_type=thoughtspot_answer.provenance_type, home_id=thoughtspot_answer.home_id, - depth=thoughtspot_answer.depth, - immediate_upstream=thoughtspot_answer.immediate_upstream, - immediate_downstream=thoughtspot_answer.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -602,6 +553,7 @@ def _thoughtspot_answer_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -610,9 +562,6 @@ def _thoughtspot_answer_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_thoughtspot_answer_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -676,6 +625,9 @@ def _thoughtspot_answer_from_nested_bytes( ThoughtspotAnswer.METRICS = RelationField("metrics") ThoughtspotAnswer.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ThoughtspotAnswer.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ThoughtspotAnswer.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ThoughtspotAnswer.MEANINGS = RelationField("meanings") ThoughtspotAnswer.MC_MONITORS = RelationField("mcMonitors") ThoughtspotAnswer.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/thoughtspot_column.py b/pyatlan_v9/model/assets/thoughtspot_column.py index 7d4b67a6c..cfed766cb 100644 --- a/pyatlan_v9/model/assets/thoughtspot_column.py +++ b/pyatlan_v9/model/assets/thoughtspot_column.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -52,7 +53,6 @@ from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob from .thoughtspot_related import ( - RelatedThoughtspotColumn, RelatedThoughtspotTable, RelatedThoughtspotView, RelatedThoughtspotWorksheet, @@ -72,8 +72,8 @@ class ThoughtspotColumn(Asset): THOUGHTSPOT_TABLE_QUALIFIED_NAME: ClassVar[Any] = None THOUGHTSPOT_VIEW_QUALIFIED_NAME: ClassVar[Any] = None THOUGHTSPOT_WORKSHEET_QUALIFIED_NAME: ClassVar[Any] = None - THOUGHTSPOT_COLUMN_DATA_TYPE: ClassVar[Any] = None - THOUGHTSPOT_COLUMN_TYPE: ClassVar[Any] = None + THOUGHTSPOT_DATA_TYPE: ClassVar[Any] = None + THOUGHTSPOT_TYPE: ClassVar[Any] = None THOUGHTSPOT_CHART_TYPE: ClassVar[Any] = None THOUGHTSPOT_QUESTION_TEXT: ClassVar[Any] = None THOUGHTSPOT_JOIN_COUNT: ClassVar[Any] = None @@ -93,6 +93,7 @@ class ThoughtspotColumn(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -113,6 +114,8 @@ class ThoughtspotColumn(Asset): THOUGHTSPOT_VIEW: ClassVar[Any] = None THOUGHTSPOT_WORKSHEET: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ThoughtspotColumn" + thoughtspot_table_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the table in which this column exists.""" @@ -122,10 +125,10 @@ class ThoughtspotColumn(Asset): thoughtspot_worksheet_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the worksheet in which this column exists.""" - thoughtspot_column_data_type: Union[str, None, UnsetType] = UNSET + thoughtspot_data_type: Union[str, None, UnsetType] = UNSET """Specifies the technical format of data stored in a column such as integer, float, string, date, boolean etc.""" - thoughtspot_column_type: Union[str, None, UnsetType] = UNSET + thoughtspot_type: Union[str, None, UnsetType] = UNSET """Defines the analytical role of a column in data analysis categorizing it as a dimension, measure, or attribute.""" thoughtspot_chart_type: Union[str, None, UnsetType] = UNSET @@ -189,6 +192,11 @@ class ThoughtspotColumn(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -259,76 +267,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ThoughtspotColumn instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.thoughtspot_table is UNSET: - errors.append("thoughtspot_table is required for creation") - if self.thoughtspot_table_qualified_name is UNSET: - errors.append( - "thoughtspot_table_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"ThoughtspotColumn validation failed: {errors}") - - def minimize(self) -> "ThoughtspotColumn": - """ - Return a minimal copy of this ThoughtspotColumn with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ThoughtspotColumn with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ThoughtspotColumn instance with only the minimum required fields. - """ - self.validate() - return ThoughtspotColumn(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedThoughtspotColumn": - """ - Create a :class:`RelatedThoughtspotColumn` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedThoughtspotColumn reference to this asset. - """ - if self.guid is not UNSET: - return RelatedThoughtspotColumn(guid=self.guid) - return RelatedThoughtspotColumn(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -395,10 +333,10 @@ class ThoughtspotColumnAttributes(AssetAttributes): thoughtspot_worksheet_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the worksheet in which this column exists.""" - thoughtspot_column_data_type: Union[str, None, UnsetType] = UNSET + thoughtspot_data_type: Union[str, None, UnsetType] = UNSET """Specifies the technical format of data stored in a column such as integer, float, string, date, boolean etc.""" - thoughtspot_column_type: Union[str, None, UnsetType] = UNSET + thoughtspot_type: Union[str, None, UnsetType] = UNSET """Defines the analytical role of a column in data analysis categorizing it as a dimension, measure, or attribute.""" thoughtspot_chart_type: Union[str, None, UnsetType] = UNSET @@ -466,6 +404,11 @@ class ThoughtspotColumnRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -563,6 +506,7 @@ class ThoughtspotColumnNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -595,8 +539,8 @@ def _populate_thoughtspot_column_attrs( attrs.thoughtspot_worksheet_qualified_name = ( obj.thoughtspot_worksheet_qualified_name ) - attrs.thoughtspot_column_data_type = obj.thoughtspot_column_data_type - attrs.thoughtspot_column_type = obj.thoughtspot_column_type + attrs.thoughtspot_data_type = obj.thoughtspot_data_type + attrs.thoughtspot_type = obj.thoughtspot_type attrs.thoughtspot_chart_type = obj.thoughtspot_chart_type attrs.thoughtspot_question_text = obj.thoughtspot_question_text attrs.thoughtspot_join_count = obj.thoughtspot_join_count @@ -612,8 +556,8 @@ def _extract_thoughtspot_column_attrs(attrs: ThoughtspotColumnAttributes) -> dic result["thoughtspot_worksheet_qualified_name"] = ( attrs.thoughtspot_worksheet_qualified_name ) - result["thoughtspot_column_data_type"] = attrs.thoughtspot_column_data_type - result["thoughtspot_column_type"] = attrs.thoughtspot_column_type + result["thoughtspot_data_type"] = attrs.thoughtspot_data_type + result["thoughtspot_type"] = attrs.thoughtspot_type result["thoughtspot_chart_type"] = attrs.thoughtspot_chart_type result["thoughtspot_question_text"] = attrs.thoughtspot_question_text result["thoughtspot_join_count"] = attrs.thoughtspot_join_count @@ -659,9 +603,6 @@ def _thoughtspot_column_to_nested( is_incomplete=thoughtspot_column.is_incomplete, provenance_type=thoughtspot_column.provenance_type, home_id=thoughtspot_column.home_id, - depth=thoughtspot_column.depth, - immediate_upstream=thoughtspot_column.immediate_upstream, - immediate_downstream=thoughtspot_column.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -697,6 +638,7 @@ def _thoughtspot_column_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -705,9 +647,6 @@ def _thoughtspot_column_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_thoughtspot_column_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -754,12 +693,10 @@ def _thoughtspot_column_from_nested_bytes( "thoughtspotWorksheetQualifiedName", "thoughtspotWorksheetQualifiedName.text", ) -ThoughtspotColumn.THOUGHTSPOT_COLUMN_DATA_TYPE = KeywordField( - "thoughtspotColumnDataType", "thoughtspotColumnDataType" -) -ThoughtspotColumn.THOUGHTSPOT_COLUMN_TYPE = KeywordField( - "thoughtspotColumnType", "thoughtspotColumnType" +ThoughtspotColumn.THOUGHTSPOT_DATA_TYPE = KeywordField( + "thoughtspotDataType", "thoughtspotDataType" ) +ThoughtspotColumn.THOUGHTSPOT_TYPE = KeywordField("thoughtspotType", "thoughtspotType") ThoughtspotColumn.THOUGHTSPOT_CHART_TYPE = KeywordField( "thoughtspotChartType", "thoughtspotChartType" ) @@ -793,6 +730,9 @@ def _thoughtspot_column_from_nested_bytes( ThoughtspotColumn.METRICS = RelationField("metrics") ThoughtspotColumn.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ThoughtspotColumn.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ThoughtspotColumn.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ThoughtspotColumn.MEANINGS = RelationField("meanings") ThoughtspotColumn.MC_MONITORS = RelationField("mcMonitors") ThoughtspotColumn.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/thoughtspot_dashlet.py b/pyatlan_v9/model/assets/thoughtspot_dashlet.py index 8fd065d28..20bf2cc4c 100644 --- a/pyatlan_v9/model/assets/thoughtspot_dashlet.py +++ b/pyatlan_v9/model/assets/thoughtspot_dashlet.py @@ -41,6 +41,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -51,7 +52,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .thoughtspot_related import RelatedThoughtspotDashlet, RelatedThoughtspotLiveboard +from .thoughtspot_related import RelatedThoughtspotLiveboard # ============================================================================= # FLAT ASSET CLASS @@ -85,6 +86,7 @@ class ThoughtspotDashlet(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -103,6 +105,8 @@ class ThoughtspotDashlet(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None THOUGHTSPOT_LIVEBOARD: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ThoughtspotDashlet" + thoughtspot_liveboard_name: Union[str, None, UnsetType] = UNSET """Simple name of the liveboard in which this dashlet exists.""" @@ -170,6 +174,11 @@ class ThoughtspotDashlet(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -234,78 +243,6 @@ def __post_init__(self) -> None: _QUALIFIED_NAME_PATTERN: ClassVar[re.Pattern] = re.compile(r"^.+/[^/]+/[^/]+$") - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ThoughtspotDashlet instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.thoughtspot_liveboard is UNSET: - errors.append("thoughtspot_liveboard is required for creation") - if self.thoughtspot_liveboard_name is UNSET: - errors.append("thoughtspot_liveboard_name is required for creation") - if self.thoughtspot_liveboard_qualified_name is UNSET: - errors.append( - "thoughtspot_liveboard_qualified_name is required for creation" - ) - if errors: - raise ValueError(f"ThoughtspotDashlet validation failed: {errors}") - - def minimize(self) -> "ThoughtspotDashlet": - """ - Return a minimal copy of this ThoughtspotDashlet with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ThoughtspotDashlet with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ThoughtspotDashlet instance with only the minimum required fields. - """ - self.validate() - return ThoughtspotDashlet(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedThoughtspotDashlet": - """ - Create a :class:`RelatedThoughtspotDashlet` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedThoughtspotDashlet reference to this asset. - """ - if self.guid is not UNSET: - return RelatedThoughtspotDashlet(guid=self.guid) - return RelatedThoughtspotDashlet(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -434,6 +371,11 @@ class ThoughtspotDashletRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -525,6 +467,7 @@ class ThoughtspotDashletNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -613,9 +556,6 @@ def _thoughtspot_dashlet_to_nested( is_incomplete=thoughtspot_dashlet.is_incomplete, provenance_type=thoughtspot_dashlet.provenance_type, home_id=thoughtspot_dashlet.home_id, - depth=thoughtspot_dashlet.depth, - immediate_upstream=thoughtspot_dashlet.immediate_upstream, - immediate_downstream=thoughtspot_dashlet.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -651,6 +591,7 @@ def _thoughtspot_dashlet_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -659,9 +600,6 @@ def _thoughtspot_dashlet_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_thoughtspot_dashlet_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -738,6 +676,9 @@ def _thoughtspot_dashlet_from_nested_bytes( ThoughtspotDashlet.METRICS = RelationField("metrics") ThoughtspotDashlet.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ThoughtspotDashlet.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ThoughtspotDashlet.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ThoughtspotDashlet.MEANINGS = RelationField("meanings") ThoughtspotDashlet.MC_MONITORS = RelationField("mcMonitors") ThoughtspotDashlet.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/thoughtspot_liveboard.py b/pyatlan_v9/model/assets/thoughtspot_liveboard.py index 11f595d8c..d87e5ad65 100644 --- a/pyatlan_v9/model/assets/thoughtspot_liveboard.py +++ b/pyatlan_v9/model/assets/thoughtspot_liveboard.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .thoughtspot_related import RelatedThoughtspotDashlet, RelatedThoughtspotLiveboard +from .thoughtspot_related import RelatedThoughtspotDashlet # ============================================================================= # FLAT ASSET CLASS @@ -82,6 +83,7 @@ class ThoughtspotLiveboard(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -100,6 +102,8 @@ class ThoughtspotLiveboard(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None THOUGHTSPOT_DASHLETS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ThoughtspotLiveboard" + thoughtspot_chart_type: Union[str, None, UnsetType] = UNSET """""" @@ -161,6 +165,11 @@ class ThoughtspotLiveboard(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -221,66 +230,6 @@ class ThoughtspotLiveboard(Asset): def __post_init__(self) -> None: self.type_name = "ThoughtspotLiveboard" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ThoughtspotLiveboard instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ThoughtspotLiveboard validation failed: {errors}") - - def minimize(self) -> "ThoughtspotLiveboard": - """ - Return a minimal copy of this ThoughtspotLiveboard with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ThoughtspotLiveboard with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ThoughtspotLiveboard instance with only the minimum required fields. - """ - self.validate() - return ThoughtspotLiveboard(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedThoughtspotLiveboard": - """ - Create a :class:`RelatedThoughtspotLiveboard` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedThoughtspotLiveboard reference to this asset. - """ - if self.guid is not UNSET: - return RelatedThoughtspotLiveboard(guid=self.guid) - return RelatedThoughtspotLiveboard(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -403,6 +352,11 @@ class ThoughtspotLiveboardRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -496,6 +450,7 @@ class ThoughtspotLiveboardNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -576,9 +531,6 @@ def _thoughtspot_liveboard_to_nested( is_incomplete=thoughtspot_liveboard.is_incomplete, provenance_type=thoughtspot_liveboard.provenance_type, home_id=thoughtspot_liveboard.home_id, - depth=thoughtspot_liveboard.depth, - immediate_upstream=thoughtspot_liveboard.immediate_upstream, - immediate_downstream=thoughtspot_liveboard.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -614,6 +566,7 @@ def _thoughtspot_liveboard_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -622,9 +575,6 @@ def _thoughtspot_liveboard_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_thoughtspot_liveboard_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -692,6 +642,9 @@ def _thoughtspot_liveboard_from_nested_bytes( ThoughtspotLiveboard.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +ThoughtspotLiveboard.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ThoughtspotLiveboard.MEANINGS = RelationField("meanings") ThoughtspotLiveboard.MC_MONITORS = RelationField("mcMonitors") ThoughtspotLiveboard.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/thoughtspot_related.py b/pyatlan_v9/model/assets/thoughtspot_related.py index aacb3b26e..c48b6705e 100644 --- a/pyatlan_v9/model/assets/thoughtspot_related.py +++ b/pyatlan_v9/model/assets/thoughtspot_related.py @@ -172,10 +172,10 @@ class RelatedThoughtspotColumn(RelatedThoughtspot): thoughtspot_worksheet_qualified_name: Union[str, None, UnsetType] = UNSET """Unique name of the worksheet in which this column exists.""" - thoughtspot_column_data_type: Union[str, None, UnsetType] = UNSET + thoughtspot_data_type: Union[str, None, UnsetType] = UNSET """Specifies the technical format of data stored in a column such as integer, float, string, date, boolean etc.""" - thoughtspot_column_type: Union[str, None, UnsetType] = UNSET + thoughtspot_type: Union[str, None, UnsetType] = UNSET """Defines the analytical role of a column in data analysis categorizing it as a dimension, measure, or attribute.""" def __post_init__(self) -> None: diff --git a/pyatlan_v9/model/assets/thoughtspot_table.py b/pyatlan_v9/model/assets/thoughtspot_table.py index a3b02f0ed..4e920806d 100644 --- a/pyatlan_v9/model/assets/thoughtspot_table.py +++ b/pyatlan_v9/model/assets/thoughtspot_table.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .thoughtspot_related import RelatedThoughtspotColumn, RelatedThoughtspotTable +from .thoughtspot_related import RelatedThoughtspotColumn # ============================================================================= # FLAT ASSET CLASS @@ -82,6 +83,7 @@ class ThoughtspotTable(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -100,6 +102,8 @@ class ThoughtspotTable(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None THOUGHTSPOT_COLUMNS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ThoughtspotTable" + thoughtspot_chart_type: Union[str, None, UnsetType] = UNSET """""" @@ -161,6 +165,11 @@ class ThoughtspotTable(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -219,66 +228,6 @@ class ThoughtspotTable(Asset): def __post_init__(self) -> None: self.type_name = "ThoughtspotTable" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ThoughtspotTable instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ThoughtspotTable validation failed: {errors}") - - def minimize(self) -> "ThoughtspotTable": - """ - Return a minimal copy of this ThoughtspotTable with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ThoughtspotTable with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ThoughtspotTable instance with only the minimum required fields. - """ - self.validate() - return ThoughtspotTable(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedThoughtspotTable": - """ - Create a :class:`RelatedThoughtspotTable` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedThoughtspotTable reference to this asset. - """ - if self.guid is not UNSET: - return RelatedThoughtspotTable(guid=self.guid) - return RelatedThoughtspotTable(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -401,6 +350,11 @@ class ThoughtspotTableRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -492,6 +446,7 @@ class ThoughtspotTableNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -572,9 +527,6 @@ def _thoughtspot_table_to_nested( is_incomplete=thoughtspot_table.is_incomplete, provenance_type=thoughtspot_table.provenance_type, home_id=thoughtspot_table.home_id, - depth=thoughtspot_table.depth, - immediate_upstream=thoughtspot_table.immediate_upstream, - immediate_downstream=thoughtspot_table.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -608,6 +560,7 @@ def _thoughtspot_table_from_nested(nested: ThoughtspotTableNested) -> Thoughtspo updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -616,9 +569,6 @@ def _thoughtspot_table_from_nested(nested: ThoughtspotTableNested) -> Thoughtspo is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_thoughtspot_table_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -680,6 +630,9 @@ def _thoughtspot_table_from_nested_bytes(data: bytes, serde: Serde) -> Thoughtsp ThoughtspotTable.METRICS = RelationField("metrics") ThoughtspotTable.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ThoughtspotTable.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ThoughtspotTable.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ThoughtspotTable.MEANINGS = RelationField("meanings") ThoughtspotTable.MC_MONITORS = RelationField("mcMonitors") ThoughtspotTable.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/thoughtspot_view.py b/pyatlan_v9/model/assets/thoughtspot_view.py index 34dc15dd4..45a135b7b 100644 --- a/pyatlan_v9/model/assets/thoughtspot_view.py +++ b/pyatlan_v9/model/assets/thoughtspot_view.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .thoughtspot_related import RelatedThoughtspotColumn, RelatedThoughtspotView +from .thoughtspot_related import RelatedThoughtspotColumn # ============================================================================= # FLAT ASSET CLASS @@ -82,6 +83,7 @@ class ThoughtspotView(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -100,6 +102,8 @@ class ThoughtspotView(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None THOUGHTSPOT_COLUMNS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ThoughtspotView" + thoughtspot_chart_type: Union[str, None, UnsetType] = UNSET """""" @@ -161,6 +165,11 @@ class ThoughtspotView(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -219,66 +228,6 @@ class ThoughtspotView(Asset): def __post_init__(self) -> None: self.type_name = "ThoughtspotView" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ThoughtspotView instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ThoughtspotView validation failed: {errors}") - - def minimize(self) -> "ThoughtspotView": - """ - Return a minimal copy of this ThoughtspotView with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ThoughtspotView with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ThoughtspotView instance with only the minimum required fields. - """ - self.validate() - return ThoughtspotView(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedThoughtspotView": - """ - Create a :class:`RelatedThoughtspotView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedThoughtspotView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedThoughtspotView(guid=self.guid) - return RelatedThoughtspotView(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -401,6 +350,11 @@ class ThoughtspotViewRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -492,6 +446,7 @@ class ThoughtspotViewNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -572,9 +527,6 @@ def _thoughtspot_view_to_nested( is_incomplete=thoughtspot_view.is_incomplete, provenance_type=thoughtspot_view.provenance_type, home_id=thoughtspot_view.home_id, - depth=thoughtspot_view.depth, - immediate_upstream=thoughtspot_view.immediate_upstream, - immediate_downstream=thoughtspot_view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -608,6 +560,7 @@ def _thoughtspot_view_from_nested(nested: ThoughtspotViewNested) -> ThoughtspotV updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -616,9 +569,6 @@ def _thoughtspot_view_from_nested(nested: ThoughtspotViewNested) -> ThoughtspotV is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_thoughtspot_view_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -680,6 +630,9 @@ def _thoughtspot_view_from_nested_bytes(data: bytes, serde: Serde) -> Thoughtspo ThoughtspotView.METRICS = RelationField("metrics") ThoughtspotView.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") ThoughtspotView.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +ThoughtspotView.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ThoughtspotView.MEANINGS = RelationField("meanings") ThoughtspotView.MC_MONITORS = RelationField("mcMonitors") ThoughtspotView.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/thoughtspot_worksheet.py b/pyatlan_v9/model/assets/thoughtspot_worksheet.py index 4a859132f..ad837e90e 100644 --- a/pyatlan_v9/model/assets/thoughtspot_worksheet.py +++ b/pyatlan_v9/model/assets/thoughtspot_worksheet.py @@ -40,6 +40,7 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -50,7 +51,7 @@ from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck from .spark_related import RelatedSparkJob -from .thoughtspot_related import RelatedThoughtspotColumn, RelatedThoughtspotWorksheet +from .thoughtspot_related import RelatedThoughtspotColumn # ============================================================================= # FLAT ASSET CLASS @@ -82,6 +83,7 @@ class ThoughtspotWorksheet(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -100,6 +102,8 @@ class ThoughtspotWorksheet(Asset): OUTPUT_FROM_SPARK_JOBS: ClassVar[Any] = None THOUGHTSPOT_COLUMNS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "ThoughtspotWorksheet" + thoughtspot_chart_type: Union[str, None, UnsetType] = UNSET """""" @@ -161,6 +165,11 @@ class ThoughtspotWorksheet(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -219,66 +228,6 @@ class ThoughtspotWorksheet(Asset): def __post_init__(self) -> None: self.type_name = "ThoughtspotWorksheet" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this ThoughtspotWorksheet instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"ThoughtspotWorksheet validation failed: {errors}") - - def minimize(self) -> "ThoughtspotWorksheet": - """ - Return a minimal copy of this ThoughtspotWorksheet with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new ThoughtspotWorksheet with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new ThoughtspotWorksheet instance with only the minimum required fields. - """ - self.validate() - return ThoughtspotWorksheet(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedThoughtspotWorksheet": - """ - Create a :class:`RelatedThoughtspotWorksheet` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedThoughtspotWorksheet reference to this asset. - """ - if self.guid is not UNSET: - return RelatedThoughtspotWorksheet(guid=self.guid) - return RelatedThoughtspotWorksheet(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -401,6 +350,11 @@ class ThoughtspotWorksheetRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -492,6 +446,7 @@ class ThoughtspotWorksheetNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -572,9 +527,6 @@ def _thoughtspot_worksheet_to_nested( is_incomplete=thoughtspot_worksheet.is_incomplete, provenance_type=thoughtspot_worksheet.provenance_type, home_id=thoughtspot_worksheet.home_id, - depth=thoughtspot_worksheet.depth, - immediate_upstream=thoughtspot_worksheet.immediate_upstream, - immediate_downstream=thoughtspot_worksheet.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -610,6 +562,7 @@ def _thoughtspot_worksheet_from_nested( updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -618,9 +571,6 @@ def _thoughtspot_worksheet_from_nested( is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_thoughtspot_worksheet_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -688,6 +638,9 @@ def _thoughtspot_worksheet_from_nested_bytes( ThoughtspotWorksheet.DQ_REFERENCE_DATASET_RULES = RelationField( "dqReferenceDatasetRules" ) +ThoughtspotWorksheet.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) ThoughtspotWorksheet.MEANINGS = RelationField("meanings") ThoughtspotWorksheet.MC_MONITORS = RelationField("mcMonitors") ThoughtspotWorksheet.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/view.py b/pyatlan_v9/model/assets/view.py index ea9a84048..a8fcfb2c7 100644 --- a/pyatlan_v9/model/assets/view.py +++ b/pyatlan_v9/model/assets/view.py @@ -49,6 +49,7 @@ RelatedDbtSource, RelatedDbtTest, ) +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .model_related import RelatedModelAttribute, RelatedModelEntity from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor @@ -64,7 +65,7 @@ RelatedSqlInsightBusinessQuestion, RelatedSqlInsightJoin, ) -from .sql_related import RelatedColumn, RelatedQuery, RelatedSchema, RelatedView +from .sql_related import RelatedColumn, RelatedQuery, RelatedSchema # ============================================================================= # FLAT ASSET CLASS @@ -130,6 +131,7 @@ class View(Asset): DBT_SOURCES: ClassVar[Any] = None SQL_DBT_SOURCES: ClassVar[Any] = None DBT_SEED_ASSETS: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -154,6 +156,8 @@ class View(Asset): SQL_INSIGHT_INCOMING_JOINS: ClassVar[Any] = None SQL_INSIGHT_BUSINESS_QUESTIONS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "View" + column_count: Union[int, None, UnsetType] = UNSET """Number of columns in this view.""" @@ -321,6 +325,11 @@ class View(Asset): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -413,80 +422,6 @@ def __post_init__(self) -> None: r"^.+/[^/]+/[^/]+/[^/]+$" ) - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this View instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - elif not self._QUALIFIED_NAME_PATTERN.match(self.qualified_name): - errors.append( - f"qualified_name '{self.qualified_name}' does not match expected " - f"pattern: {self._QUALIFIED_NAME_PATTERN.pattern}" - ) - if for_creation: - if self.connection_qualified_name is UNSET: - errors.append("connection_qualified_name is required for creation") - if self.atlan_schema is UNSET: - errors.append("atlan_schema is required for creation") - if self.schema_name is UNSET: - errors.append("schema_name is required for creation") - if self.schema_qualified_name is UNSET: - errors.append("schema_qualified_name is required for creation") - if self.database_name is UNSET: - errors.append("database_name is required for creation") - if self.database_qualified_name is UNSET: - errors.append("database_qualified_name is required for creation") - if errors: - raise ValueError(f"View validation failed: {errors}") - - def minimize(self) -> "View": - """ - Return a minimal copy of this View with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new View with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new View instance with only the minimum required fields. - """ - self.validate() - return View(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedView": - """ - Create a :class:`RelatedView` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedView reference to this asset. - """ - if self.guid is not UNSET: - return RelatedView(guid=self.guid) - return RelatedView(qualified_name=self.qualified_name) - @classmethod @init_guid def creator( @@ -813,6 +748,11 @@ class ViewRelationshipAttributes(AssetRelationshipAttributes): dbt_seed_assets: Union[List[RelatedDbtSeed], None, UnsetType] = UNSET """DBT seeds that materialize the SQL asset.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -930,6 +870,7 @@ class ViewNested(AssetNested): "dbt_sources", "sql_dbt_sources", "dbt_seed_assets", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -1080,9 +1021,6 @@ def _view_to_nested(view: View) -> ViewNested: is_incomplete=view.is_incomplete, provenance_type=view.provenance_type, home_id=view.home_id, - depth=view.depth, - immediate_upstream=view.immediate_upstream, - immediate_downstream=view.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -1112,6 +1050,7 @@ def _view_from_nested(nested: ViewNested) -> View: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -1120,9 +1059,6 @@ def _view_from_nested(nested: ViewNested) -> View: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_view_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -1220,6 +1156,9 @@ def _view_from_nested_bytes(data: bytes, serde: Serde) -> View: View.DBT_SOURCES = RelationField("dbtSources") View.SQL_DBT_SOURCES = RelationField("sqlDBTSources") View.DBT_SEED_ASSETS = RelationField("dbtSeedAssets") +View.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) View.MEANINGS = RelationField("meanings") View.MC_MONITORS = RelationField("mcMonitors") View.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/workflow.py b/pyatlan_v9/model/assets/workflow.py index 74abddde4..138af129a 100644 --- a/pyatlan_v9/model/assets/workflow.py +++ b/pyatlan_v9/model/assets/workflow.py @@ -39,13 +39,13 @@ from .data_contract_related import RelatedDataContract from .data_mesh_related import RelatedDataProduct from .data_quality_related import RelatedDataQualityRule, RelatedMetric +from .gcp_dataplex_related import RelatedGCPDataplexAspectType from .gtc_related import RelatedAtlasGlossaryTerm from .monte_carlo_related import RelatedMCIncident, RelatedMCMonitor from .referenceable_related import RelatedReferenceable from .resource_related import RelatedFile, RelatedLink, RelatedReadme from .schema_registry_related import RelatedSchemaRegistrySubject from .soda_related import RelatedSodaCheck -from .workflow_related import RelatedWorkflow # ============================================================================= # FLAT ASSET CLASS @@ -77,6 +77,7 @@ class Workflow(Asset): METRICS: ClassVar[Any] = None DQ_BASE_DATASET_RULES: ClassVar[Any] = None DQ_REFERENCE_DATASET_RULES: ClassVar[Any] = None + GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES: ClassVar[Any] = None MEANINGS: ClassVar[Any] = None MC_MONITORS: ClassVar[Any] = None MC_INCIDENTS: ClassVar[Any] = None @@ -88,6 +89,8 @@ class Workflow(Asset): SCHEMA_REGISTRY_SUBJECTS: ClassVar[Any] = None SODA_CHECKS: ClassVar[Any] = None + type_name: Union[str, UnsetType] = "Workflow" + workflow_template_guid: Union[str, None, UnsetType] = UNSET """GUID of the workflow template from which this workflow was created.""" @@ -147,6 +150,11 @@ class Workflow(Asset): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -184,66 +192,6 @@ class Workflow(Asset): def __post_init__(self) -> None: self.type_name = "Workflow" - # ========================================================================= - # SDK Methods - # ========================================================================= - - def validate(self, for_creation: bool = False) -> None: - """ - Dry-run validation of this Workflow instance. - - Checks that required fields (type_name, name, qualified_name) are set. - When ``for_creation=True``, also checks hierarchy-specific fields - (parent references, denormalized attributes) needed to create this asset. - - This is purely opt-in and is NOT called by any serde path — only by - explicit user invocation (e.g., validating JSONL before sending to Atlan). - - Args: - for_creation: If True, also validate fields required for asset creation. - - Raises: - ValueError: If any required fields are missing or invalid. - """ - errors: list[str] = [] - if self.type_name is UNSET: - errors.append("type_name is required") - if self.name is UNSET: - errors.append("name is required") - if self.qualified_name is UNSET or self.qualified_name is None: - errors.append("qualified_name is required") - if errors: - raise ValueError(f"Workflow validation failed: {errors}") - - def minimize(self) -> "Workflow": - """ - Return a minimal copy of this Workflow with only updater-required fields. - - Calls :meth:`validate` first to ensure the instance is valid, then - returns a new Workflow with only the fields needed for an update - (qualified_name, name, and any type-specific additional fields). - - Returns: - A new Workflow instance with only the minimum required fields. - """ - self.validate() - return Workflow(qualified_name=self.qualified_name, name=self.name) - - def relate(self) -> "RelatedWorkflow": - """ - Create a :class:`RelatedWorkflow` reference from this instance. - - Returns a lightweight reference suitable for use in relationship - attributes. Prefers ``guid`` if set, otherwise falls back to - ``qualified_name``. - - Returns: - A RelatedWorkflow reference to this asset. - """ - if self.guid is not UNSET: - return RelatedWorkflow(guid=self.guid) - return RelatedWorkflow(qualified_name=self.qualified_name) - # ========================================================================= # Optimized Serialization Methods (override Asset base class) # ========================================================================= @@ -362,6 +310,11 @@ class WorkflowRelationshipAttributes(AssetRelationshipAttributes): ) """Rules where this dataset is referenced.""" + gcp_dataplex_aspect_type_metadata_entities: Union[ + List[RelatedGCPDataplexAspectType], None, UnsetType + ] = UNSET + """Dataplex entries (assets) that have aspects of this Aspect Type attached.""" + meanings: Union[List[RelatedAtlasGlossaryTerm], None, UnsetType] = UNSET """Glossary terms that are linked to this asset.""" @@ -426,6 +379,7 @@ class WorkflowNested(AssetNested): "metrics", "dq_base_dataset_rules", "dq_reference_dataset_rules", + "gcp_dataplex_aspect_type_metadata_entities", "meanings", "mc_monitors", "mc_incidents", @@ -501,9 +455,6 @@ def _workflow_to_nested(workflow: Workflow) -> WorkflowNested: is_incomplete=workflow.is_incomplete, provenance_type=workflow.provenance_type, home_id=workflow.home_id, - depth=workflow.depth, - immediate_upstream=workflow.immediate_upstream, - immediate_downstream=workflow.immediate_downstream, attributes=attrs, relationship_attributes=replace_rels, append_relationship_attributes=append_rels, @@ -535,6 +486,7 @@ def _workflow_from_nested(nested: WorkflowNested) -> Workflow: updated_by=nested.updated_by, classifications=nested.classifications, classification_names=nested.classification_names, + meanings=nested.meanings, labels=nested.labels, business_attributes=nested.business_attributes, custom_attributes=nested.custom_attributes, @@ -543,9 +495,6 @@ def _workflow_from_nested(nested: WorkflowNested) -> Workflow: is_incomplete=nested.is_incomplete, provenance_type=nested.provenance_type, home_id=nested.home_id, - depth=nested.depth, - immediate_upstream=nested.immediate_upstream, - immediate_downstream=nested.immediate_downstream, **_extract_workflow_attrs(attrs), # Merged relationship attributes **merged_rels, @@ -597,6 +546,9 @@ def _workflow_from_nested_bytes(data: bytes, serde: Serde) -> Workflow: Workflow.METRICS = RelationField("metrics") Workflow.DQ_BASE_DATASET_RULES = RelationField("dqBaseDatasetRules") Workflow.DQ_REFERENCE_DATASET_RULES = RelationField("dqReferenceDatasetRules") +Workflow.GCP_DATAPLEX_ASPECT_TYPE_METADATA_ENTITIES = RelationField( + "gcpDataplexAspectTypeMetadataEntities" +) Workflow.MEANINGS = RelationField("meanings") Workflow.MC_MONITORS = RelationField("mcMonitors") Workflow.MC_INCIDENTS = RelationField("mcIncidents") diff --git a/pyatlan_v9/model/assets/workflow_related.py b/pyatlan_v9/model/assets/workflow_related.py index ecedda859..8f83b7970 100644 --- a/pyatlan_v9/model/assets/workflow_related.py +++ b/pyatlan_v9/model/assets/workflow_related.py @@ -76,16 +76,16 @@ class RelatedWorkflowRun(RelatedWorkflow): # type_name inherited from parent with default=UNSET # __post_init__ sets it to "WorkflowRun" so it serializes correctly - workflow_run_workflow_guid: Union[str, None, UnsetType] = UNSET + workflow_workflow_guid: Union[str, None, UnsetType] = UNSET """GUID of the workflow from which this run was created.""" - workflow_run_type: Union[str, None, UnsetType] = UNSET + workflow_type: Union[str, None, UnsetType] = UNSET """Type of the workflow from which this run was created.""" - workflow_run_action_choices: Union[List[str], None, UnsetType] = UNSET + workflow_action_choices: Union[List[str], None, UnsetType] = UNSET """List of workflow run action choices.""" - workflow_run_on_asset_guid: Union[str, None, UnsetType] = UNSET + workflow_on_asset_guid: Union[str, None, UnsetType] = UNSET """The asset for which this run was created.""" workflow_run_comment: Union[str, None, UnsetType] = UNSET @@ -94,19 +94,19 @@ class RelatedWorkflowRun(RelatedWorkflow): workflow_run_config: Union[str, None, UnsetType] = UNSET """Details of the approval workflow run.""" - workflow_run_status: Union[str, None, UnsetType] = UNSET + workflow_status: Union[str, None, UnsetType] = UNSET """Status of the run.""" - workflow_run_expires_at: Union[int, None, UnsetType] = UNSET + workflow_expires_at: Union[int, None, UnsetType] = UNSET """Time at which this run will expire.""" - workflow_run_created_by: Union[str, None, UnsetType] = UNSET + workflow_created_by: Union[str, None, UnsetType] = UNSET """Username of the user who created this workflow run.""" - workflow_run_updated_by: Union[str, None, UnsetType] = UNSET + workflow_updated_by: Union[str, None, UnsetType] = UNSET """Username of the user who updated this workflow run.""" - workflow_run_deleted_at: Union[int, None, UnsetType] = UNSET + workflow_deleted_at: Union[int, None, UnsetType] = UNSET """Deletion time of this workflow run.""" def __post_init__(self) -> None: diff --git a/requirements.txt b/requirements.txt index 4874f45c3..e69de29bb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,274 +0,0 @@ -# This file was autogenerated by uv via the following command: -# uv export --all-extras --no-hashes --e . -annotated-types==0.7.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pydantic -anyio==4.12.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via httpx -authlib==1.6.9 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -backports-asyncio-runner==1.2.0 ; python_full_version < '3.11' and platform_python_implementation == 'CPython' - # via pytest-asyncio -backports-tarfile==1.2.0 ; python_full_version < '3.12' and platform_python_implementation == 'CPython' - # via jaraco-context -certifi==2026.2.25 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # httpcore - # httpx - # requests -cffi==2.0.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via cryptography -cfgv==3.4.0 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via pre-commit -cfgv==3.5.0 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pre-commit -charset-normalizer==3.4.5 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via requests -colorama==0.4.6 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' and sys_platform == 'win32' - # via pytest -coverage==7.10.7 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via pytest-cov -coverage==7.13.4 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pytest-cov -cryptography==46.0.5 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # authlib - # secretstorage - # types-authlib -deepdiff==8.6.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -distlib==0.4.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via virtualenv -docutils==0.21.2 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via readme-renderer -exceptiongroup==1.3.1 ; python_full_version < '3.11' and platform_python_implementation == 'CPython' - # via - # anyio - # pytest -filelock==3.19.1 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via - # python-discovery - # virtualenv -filelock==3.25.0 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # python-discovery - # virtualenv -h11==0.16.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via httpcore -httpcore==1.0.9 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via httpx -httpx==0.28.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # httpx-retries - # pyatlan -httpx-retries==0.4.6 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -id==1.5.0 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via twine -id==1.6.1 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via twine -identify==2.6.15 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via pre-commit -identify==2.6.17 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pre-commit -idna==3.11 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # anyio - # httpx - # requests - # yarl -importlib-metadata==8.7.1 ; (python_full_version < '3.12' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython') or (python_full_version < '3.10' and platform_machine == 'ppc64le' and platform_python_implementation == 'CPython') or (python_full_version < '3.10' and platform_machine == 's390x' and platform_python_implementation == 'CPython') - # via - # keyring - # twine -iniconfig==2.1.0 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via pytest -iniconfig==2.3.0 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pytest -jaraco-classes==3.4.0 ; python_full_version < '3.14' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython' - # via keyring -jaraco-context==6.1.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via keyring -jaraco-functools==4.4.0 ; python_full_version < '3.14' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython' - # via keyring -jeepney==0.9.0 ; python_full_version < '3.14' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython' and sys_platform == 'linux' - # via - # keyring - # secretstorage -jinja2==3.1.6 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -keyring==25.7.0 ; python_full_version < '3.14' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython' - # via twine -lazy-loader==0.4 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -librt==0.8.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via mypy -markdown-it-py==3.0.0 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via rich -markdown-it-py==4.0.0 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via rich -markupsafe==3.0.3 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via jinja2 -mdurl==0.1.2 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via markdown-it-py -more-itertools==10.8.0 ; python_full_version < '3.14' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython' - # via - # jaraco-classes - # jaraco-functools -msgspec==0.20.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -multidict==6.7.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via yarl -mypy==1.19.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -mypy-extensions==1.1.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via mypy -nanoid==2.0.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -networkx==3.2.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via networkx-stubs -networkx-stubs==0.0.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -nh3==0.3.3 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via readme-renderer -nodeenv==1.10.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pre-commit -orderly-set==5.5.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via deepdiff -packaging==26.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # lazy-loader - # pytest - # twine -pathspec==1.0.4 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via mypy -platformdirs==4.4.0 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via - # python-discovery - # virtualenv -platformdirs==4.9.4 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # python-discovery - # virtualenv -pluggy==1.6.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # pytest - # pytest-cov -pre-commit==4.3.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -propcache==0.4.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via yarl -pycparser==2.23 ; python_full_version < '3.10' and implementation_name != 'PyPy' and platform_python_implementation == 'CPython' - # via cffi -pycparser==3.0 ; python_full_version >= '3.10' and python_full_version < '3.14' and implementation_name != 'PyPy' and platform_python_implementation == 'CPython' - # via cffi -pydantic==2.12.5 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -pydantic-core==2.41.5 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pydantic -pygments==2.19.2 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # pytest - # readme-renderer - # rich -pytest==8.4.2 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # pytest-asyncio - # pytest-cov - # pytest-order - # pytest-sugar - # pytest-timer - # pytest-vcr -pytest-asyncio==1.2.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -pytest-cov==7.0.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -pytest-order==1.3.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -pytest-sugar==1.1.1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -pytest-timer==1.0.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -pytest-vcr==1.0.2 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -python-dateutil==2.9.0.post0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -python-discovery==1.1.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via virtualenv -pytz==2026.1.post1 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -pywin32-ctypes==0.2.3 ; python_full_version < '3.14' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython' and sys_platform == 'win32' - # via keyring -pyyaml==6.0.3 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # pre-commit - # pyatlan - # vcrpy -readme-renderer==44.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via twine -requests==2.32.5 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # id - # requests-toolbelt - # twine -requests-toolbelt==1.0.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via twine -rfc3986==2.0.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via twine -rich==14.3.3 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via twine -ruff==0.15.5 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -secretstorage==3.3.3 ; python_full_version < '3.10' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython' and sys_platform == 'linux' - # via keyring -secretstorage==3.5.0 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython' and sys_platform == 'linux' - # via keyring -six==1.17.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via python-dateutil -tenacity==9.1.2 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via pyatlan -tenacity==9.1.4 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pyatlan -termcolor==3.1.0 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via - # pytest-sugar - # pytest-timer -termcolor==3.3.0 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # pytest-sugar - # pytest-timer -tomli==2.4.0 ; python_full_version <= '3.11' and platform_python_implementation == 'CPython' - # via - # coverage - # mypy - # pytest -twine==6.2.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -types-authlib==1.6.7.20260208 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -types-retry==0.9.9.20250322 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -types-setuptools==81.0.0.20260209 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' -typing-extensions==4.15.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # anyio - # cryptography - # exceptiongroup - # multidict - # mypy - # pydantic - # pydantic-core - # pytest-asyncio - # typing-inspection - # virtualenv -typing-inspection==0.4.2 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pydantic -urllib3==1.26.20 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via - # requests - # twine - # vcrpy -urllib3==2.6.3 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via - # id - # requests - # twine - # vcrpy -vcrpy==7.0.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pytest-vcr -virtualenv==21.1.0 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via pre-commit -wrapt==2.1.2 ; python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via vcrpy -yarl==1.22.0 ; python_full_version < '3.10' and platform_python_implementation == 'CPython' - # via vcrpy -yarl==1.23.0 ; python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython' - # via vcrpy -zipp==3.23.0 ; (python_full_version < '3.12' and platform_machine != 'ppc64le' and platform_machine != 's390x' and platform_python_implementation == 'CPython') or (python_full_version < '3.10' and platform_machine == 'ppc64le' and platform_python_implementation == 'CPython') or (python_full_version < '3.10' and platform_machine == 's390x' and platform_python_implementation == 'CPython') - # via importlib-metadata