-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathget_dataset_principals.py
53 lines (39 loc) · 1.52 KB
/
get_dataset_principals.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import csv
from google.cloud import bigquery
from google.api_core.exceptions import GoogleAPIError
def list_principals_for_project(project_id):
client = bigquery.Client(project=project_id)
principals = []
try:
datasets = client.list_datasets()
for dataset in datasets:
dataset_id = f"{project_id}.{dataset.dataset_id}"
dataset_info = client.get_dataset(dataset_id)
for entry in dataset_info.access_entries:
principals.append({
'project': project_id,
'principal': entry.entity_id,
'dataset': dataset_info.dataset_id,
'role': entry.role
})
except GoogleAPIError as e:
print(f"An error occurred: {e}")
return principals
def main(project_ids, output_csv):
all_principals = []
for project_id in project_ids:
principals = list_principals_for_project(project_id)
all_principals.extend(principals)
# Write to CSV
with open(output_csv, 'w', newline='') as csvfile:
fieldnames = ['project', 'principal', 'dataset', 'role']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for principal in all_principals:
writer.writerow(principal)
if __name__ == '__main__':
project_ids = [''] #todo: add project IDs here
# Output CSV file
output_csv = 'bigquery_dataset_principals.csv'
main(project_ids, output_csv)
print(f"Output saved to {output_csv}")