-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbeautify.py
executable file
·161 lines (122 loc) · 5.43 KB
/
beautify.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
#!/usr/bin/env python3
# Beautifies the logs in 'logs/' by creating an AsciiDoc table and a cross-reference
# to each file, allowing them to be opened directly from the preview window.
import os
import json
from collections import defaultdict
from utils.root import get_project_root
# Get the project root directory
PROJECT_ROOT = get_project_root()
ANTORA_LEVELS = ['fatal', 'error', 'warn', 'info', 'debug']
MULTIPLE_LOGS = [] # If more than one Antora playbook, add its name (i.e. 'docs', 'widget').
# Check if there are multiple input log files and return the input file for each one.
def get_input_file():
if not MULTIPLE_LOGS:
return [os.path.join(PROJECT_ROOT, 'logs/default.json')]
else:
return [os.path.join(PROJECT_ROOT, f'logs/default-{name}.json') for name in MULTIPLE_LOGS]
# Check if there are multiple input log files and return the output file for each one.
def get_output_file():
if not MULTIPLE_LOGS:
return [os.path.join(PROJECT_ROOT, 'logs/beautified.adoc')]
else:
return [os.path.join(PROJECT_ROOT, f'logs/beautified-{name}.adoc') for name in MULTIPLE_LOGS]
# Check if there are multiple input log files and return a title for each one's output file.
def get_title(log_name=None):
if log_name is None:
return 'Beautified logs'
else:
return f'Beautified {log_name.lower()} logs'
# For each log file, create a dictionary with the following information:
# 'data', 'log_type', 'absolute_file_path', 'local_path', and 'issue'
def create_dictionary(default_log):
# Initialize a new dictionary.
dictionary = defaultdict(lambda: defaultdict(dict))
for entry in default_log:
# Get default JSON logs.
log_data = json.loads(entry)
# First-level dictionary
antora_level = log_data['level']
# Second-level dictionary.
unique_id = log_data['time']
# Variables for second-level dictionary.
message = log_data['msg']
path = os.path.relpath(log_data['file']['path'], PROJECT_ROOT)
module = path.split('utils/')[1].split('/')[
0] if 'utils' in path else 'N/A'
xref = f"xref:../{path}[{path.split('/')[-1]}]"
line = log_data['file'].get('line', 'N/A')
# Create the following dictionary using the previous variables.
dictionary[antora_level][unique_id] = {
'message': message,
'path': path,
'module': module,
'xref': xref,
'line': line
}
return dictionary
# Sort logs alphabetically by column: type, issue, module, file.
def sort_dictionary(created_dictionary):
sorted_dictionary = defaultdict(lambda: defaultdict(list))
# Iterate through first two levels of dictionary (log_type, issue).
for log_type, issues in created_dictionary.items():
for issue, details in issues.items():
# No need to sort 'details.values()' directly, sort a list containing 'details'.
sorted_details = sorted([details], key=lambda x: x['path'])
sorted_dictionary[log_type][issue] = sorted_details
return sorted_dictionary
# Count the number of issues for reach 'antora_level'.
def count_issues(sorted_dictionary):
total = defaultdict(int)
for antora_level, unique_ids in sorted_dictionary.items():
if len(unique_ids) > 0:
total[antora_level] = len(unique_ids)
else:
total[antora_level] = 0
return total
# Reformat logs into a table
def create_table(sorted_dictionary, current_log):
# Set title
table = f'= {current_log}\n\n'
# Create table of contents with count of issues.
table += f'.Table of contents\n'
count_of_issues = count_issues(sorted_dictionary)
for level in ANTORA_LEVELS:
count = count_of_issues.get(level, 0)
table += f'* xref:_{level}[]: {count} issue(s)\n'
table += f'\n'
# Create data table
for level in ANTORA_LEVELS:
table += f'[#_{level}]\n'
table += f'== {level.upper()}\n\n'
if level in sorted_dictionary:
table += f'[cols="1,1,1,1"]\n'
table += f'|===\n'
table += f'|Issue|Module|File|Line\n\n'
for antora_level, unique_id in sorted_dictionary[level].items():
for issue_details in unique_id:
table += f"|{issue_details['message']}\n" \
f"|{issue_details['module']}\n" \
f"|{issue_details['xref']}\n" \
f"|{issue_details['line']}\n\n"
table += '|===\n\n'
else:
table += f'_NONE_\n\n'
return table
# For each input file in 'MULTIPLE_LOGS' create a dictionary, sort the dictionary, create a table, then
# write the table to the relevant output file.
def main():
input_log_files = get_input_file()
output_log_files = get_output_file()
# Open each file and run 'beautify_logs()'.
for log_name, log_details in enumerate(input_log_files):
# Read the content of the input log
with open(log_details, 'r') as f:
default_log = f.readlines()
log_title = get_title(MULTIPLE_LOGS[log_name] if MULTIPLE_LOGS else None)
beautified_logs = create_table(sort_dictionary(create_dictionary(default_log)), log_title)
# Write the processed content to the output file
with open(output_log_files[log_name], 'w') as f:
f.write(beautified_logs)
if __name__ == "__main__":
main()