You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
"connection": "A String", # Optional. The BigQuery connection used to create BigLake tables. Must be in the form projects/{project_id}/locations/{location_id}/connections/{connection_id}
208
208
"location": "A String", # Optional. The location of the BigQuery dataset to publish BigLake external or non-BigLake external tables to. 1. If the Cloud Storage bucket is located in a multi-region bucket, then BigQuery dataset can be in the same multi-region bucket or any single region that is included in the same multi-region bucket. The datascan can be created in any single region that is included in the same multi-region bucket 2. If the Cloud Storage bucket is located in a dual-region bucket, then BigQuery dataset can be located in regions that are included in the dual-region bucket, or in a multi-region that includes the dual-region. The datascan can be created in any single region that is included in the same dual-region bucket. 3. If the Cloud Storage bucket is located in a single region, then BigQuery dataset can be in the same single region or any multi-region bucket that includes the same single region. The datascan will be created in the same single region as the bucket. 4. If the BigQuery dataset is in single region, it must be in the same single region as the datascan.For supported values, refer to https://cloud.google.com/bigquery/docs/locations#supported_locations.
209
+
"project": "A String", # Optional. The project of the BigQuery dataset to publish BigLake external or non-BigLake external tables to. If not specified, the project of the Cloud Storage bucket will be used. The format is "projects/{project_id_or_number}".
209
210
"tableType": "A String", # Optional. Determines whether to publish discovered tables as BigLake external tables or non-BigLake external tables.
210
211
},
211
212
"storageConfig": { # Configurations related to Cloud Storage as the data source. # Cloud Storage related configurations.
@@ -307,9 +308,22 @@ <h3>Method Details</h3>
307
308
"samplingPercent": 3.14, # Optional. The percentage of the records to be selected from the dataset for DataScan. Value can range between 0.0 and 100.0 with up to 3 significant decimal digits. Sampling is not applied if sampling_percent is not specified, 0 or 100.
308
309
},
309
310
"dataQualityResult": { # The output of a DataQualityScan. # Output only. The result of a data quality scan.
311
+
"catalogPublishingStatus": { # The status of publishing the data scan result to Catalog. # Output only. The status of publishing the data scan to Catalog.
312
+
"state": "A String", # Output only. Execution state for catalog publishing.
313
+
},
310
314
"columns": [ # Output only. A list of results at the column level.A column will have a corresponding DataQualityColumnResult if and only if there is at least one rule with the 'column' field set to it.
311
315
{ # DataQualityColumnResult provides a more detailed, per-column view of the results.
312
316
"column": "A String", # Output only. The column specified in the DataQualityRule.
317
+
"dimensions": [ # Output only. The dimension-level results for this column.
318
+
{ # DataQualityDimensionResult provides a more detailed, per-dimension view of the results.
319
+
"dimension": { # A dimension captures data quality intent about a defined subset of the rules specified. # Output only. The dimension config specified in the DataQualitySpec, as is.
320
+
"name": "A String", # Optional. The dimension name a rule belongs to. Custom dimension name is supported with all uppercase letters and maximum length of 30 characters.
321
+
},
322
+
"passed": True or False, # Output only. Whether the dimension passed or failed.
323
+
"score": 3.14, # Output only. The dimension-level data quality score for this data scan job if and only if the 'dimension' field is set.The score ranges between 0, 100 (up to two decimal points).
324
+
},
325
+
],
326
+
"passed": True or False, # Output only. Whether the column passed or failed.
313
327
"score": 3.14, # Output only. The column-level data quality score for this data scan job if and only if the 'column' field is set.The score ranges between between 0, 100 (up to two decimal points).
314
328
},
315
329
],
@@ -394,6 +408,7 @@ <h3>Method Details</h3>
394
408
"score": 3.14, # Output only. The overall data quality score.The score ranges between 0, 100 (up to two decimal points).
395
409
},
396
410
"dataQualitySpec": { # DataQualityScan related setting. # Output only. Settings for a data quality scan.
411
+
"catalogPublishingEnabled": True or False, # Optional. If set, the latest DataScan job result will be published to Dataplex Catalog.
397
412
"postScanActions": { # The configuration of post scan actions of DataQualityScan. # Optional. Actions to take upon job completion.
398
413
"bigqueryExport": { # The configuration of BigQuery export post scan action. # Optional. If set, results will be exported to the provided BigQuery table.
399
414
"resultsTable": "A String", # Optional. The BigQuery table to export DataQualityScan results to. Format: //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID or projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID
"connection": "A String", # Optional. The BigQuery connection used to create BigLake tables. Must be in the form projects/{project_id}/locations/{location_id}/connections/{connection_id}
515
530
"location": "A String", # Optional. The location of the BigQuery dataset to publish BigLake external or non-BigLake external tables to. 1. If the Cloud Storage bucket is located in a multi-region bucket, then BigQuery dataset can be in the same multi-region bucket or any single region that is included in the same multi-region bucket. The datascan can be created in any single region that is included in the same multi-region bucket 2. If the Cloud Storage bucket is located in a dual-region bucket, then BigQuery dataset can be located in regions that are included in the dual-region bucket, or in a multi-region that includes the dual-region. The datascan can be created in any single region that is included in the same dual-region bucket. 3. If the Cloud Storage bucket is located in a single region, then BigQuery dataset can be in the same single region or any multi-region bucket that includes the same single region. The datascan will be created in the same single region as the bucket. 4. If the BigQuery dataset is in single region, it must be in the same single region as the datascan.For supported values, refer to https://cloud.google.com/bigquery/docs/locations#supported_locations.
531
+
"project": "A String", # Optional. The project of the BigQuery dataset to publish BigLake external or non-BigLake external tables to. If not specified, the project of the Cloud Storage bucket will be used. The format is "projects/{project_id_or_number}".
516
532
"tableType": "A String", # Optional. Determines whether to publish discovered tables as BigLake external tables or non-BigLake external tables.
517
533
},
518
534
"storageConfig": { # Configurations related to Cloud Storage as the data source. # Cloud Storage related configurations.
@@ -614,9 +630,22 @@ <h3>Method Details</h3>
614
630
"samplingPercent": 3.14, # Optional. The percentage of the records to be selected from the dataset for DataScan. Value can range between 0.0 and 100.0 with up to 3 significant decimal digits. Sampling is not applied if sampling_percent is not specified, 0 or 100.
615
631
},
616
632
"dataQualityResult": { # The output of a DataQualityScan. # Output only. The result of a data quality scan.
633
+
"catalogPublishingStatus": { # The status of publishing the data scan result to Catalog. # Output only. The status of publishing the data scan to Catalog.
634
+
"state": "A String", # Output only. Execution state for catalog publishing.
635
+
},
617
636
"columns": [ # Output only. A list of results at the column level.A column will have a corresponding DataQualityColumnResult if and only if there is at least one rule with the 'column' field set to it.
618
637
{ # DataQualityColumnResult provides a more detailed, per-column view of the results.
619
638
"column": "A String", # Output only. The column specified in the DataQualityRule.
639
+
"dimensions": [ # Output only. The dimension-level results for this column.
640
+
{ # DataQualityDimensionResult provides a more detailed, per-dimension view of the results.
641
+
"dimension": { # A dimension captures data quality intent about a defined subset of the rules specified. # Output only. The dimension config specified in the DataQualitySpec, as is.
642
+
"name": "A String", # Optional. The dimension name a rule belongs to. Custom dimension name is supported with all uppercase letters and maximum length of 30 characters.
643
+
},
644
+
"passed": True or False, # Output only. Whether the dimension passed or failed.
645
+
"score": 3.14, # Output only. The dimension-level data quality score for this data scan job if and only if the 'dimension' field is set.The score ranges between 0, 100 (up to two decimal points).
646
+
},
647
+
],
648
+
"passed": True or False, # Output only. Whether the column passed or failed.
620
649
"score": 3.14, # Output only. The column-level data quality score for this data scan job if and only if the 'column' field is set.The score ranges between between 0, 100 (up to two decimal points).
621
650
},
622
651
],
@@ -701,6 +730,7 @@ <h3>Method Details</h3>
701
730
"score": 3.14, # Output only. The overall data quality score.The score ranges between 0, 100 (up to two decimal points).
702
731
},
703
732
"dataQualitySpec": { # DataQualityScan related setting. # Output only. Settings for a data quality scan.
733
+
"catalogPublishingEnabled": True or False, # Optional. If set, the latest DataScan job result will be published to Dataplex Catalog.
704
734
"postScanActions": { # The configuration of post scan actions of DataQualityScan. # Optional. Actions to take upon job completion.
705
735
"bigqueryExport": { # The configuration of BigQuery export post scan action. # Optional. If set, results will be exported to the provided BigQuery table.
706
736
"resultsTable": "A String", # Optional. The BigQuery table to export DataQualityScan results to. Format: //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID or projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID
0 commit comments