4
4
# SPDX-License-Identifier: BSD-3-Clause
5
5
6
6
import decimal
7
+ import collections
7
8
import functools
8
9
import inspect
9
10
import json
@@ -225,8 +226,8 @@ class RunReport:
225
226
'''
226
227
def __init__ (self ):
227
228
# Initialize the report with the required fields
228
- self .__filename = None
229
- self .__report = {
229
+ self ._filename = None
230
+ self ._report = {
230
231
'session_info' : {
231
232
'data_version' : DATA_VERSION ,
232
233
'hostname' : socket .gethostname (),
@@ -240,16 +241,16 @@ def __init__(self):
240
241
241
242
@property
242
243
def filename (self ):
243
- return self .__filename
244
+ return self ._filename
244
245
245
246
def __getattr__ (self , name ):
246
- return getattr (self .__report , name )
247
+ return getattr (self ._report , name )
247
248
248
249
def __getitem__ (self , key ):
249
- return self .__report [key ]
250
+ return self ._report [key ]
250
251
251
252
def __rfm_json_encode__ (self ):
252
- return self .__report
253
+ return self ._report
253
254
254
255
@classmethod
255
256
def create_from_perflog (cls , * logfiles , format = None ,
@@ -372,23 +373,60 @@ def _convert(x):
372
373
'run_index' : run_index ,
373
374
'testcases' : testcases
374
375
})
375
- return report
376
+ return [report ]
377
+
378
+ @classmethod
379
+ def create_from_sqlite_db (cls , * dbfiles , exclude_sessions = None ,
380
+ include_sessions = None , time_period = None ):
381
+ dst_backend = StorageBackend .default ()
382
+ dst_schema = dst_backend .schema_version ()
383
+ if not time_period :
384
+ time_period = {'start' : '19700101T0000+0000' , 'end' : 'now' }
385
+
386
+ start = time_period .get ('start' , '19700101T0000+0000' )
387
+ end = time_period .get ('end' , 'now' )
388
+ ts_start , ts_end = parse_time_period (f'{ start } :{ end } ' )
389
+ include_sessions = set (include_sessions ) if include_sessions else set ()
390
+ exclude_sessions = set (exclude_sessions ) if exclude_sessions else set ()
391
+ reports = []
392
+ for filename in dbfiles :
393
+ src_backend = StorageBackend .create ('sqlite' , filename )
394
+ src_schema = src_backend .schema_version ()
395
+ if src_schema != dst_schema :
396
+ getlogger ().warning (
397
+ f'ignoring DB file { filename } : schema version mismatch: '
398
+ f'cannot import from DB v{ src_schema } to v{ dst_schema } '
399
+ )
400
+ continue
401
+
402
+ sessions = src_backend .fetch_sessions_time_period (ts_start , ts_end )
403
+ for sess in sessions :
404
+ uuid = sess ['session_info' ]['uuid' ]
405
+ if include_sessions and uuid not in include_sessions :
406
+ continue
407
+
408
+ if exclude_sessions and uuid in exclude_sessions :
409
+ continue
410
+
411
+ reports .append (_ImportedRunReport (sess ))
412
+
413
+ return reports
376
414
377
415
def _add_run (self , run ):
378
- self .__report ['runs' ].append (run )
416
+ self ._report ['runs' ].append (run )
379
417
380
418
def update_session_info (self , session_info ):
381
419
# Remove timestamps
382
420
for key , val in session_info .items ():
383
421
if not key .startswith ('time_' ):
384
- self .__report ['session_info' ][key ] = val
422
+ self ._report ['session_info' ][key ] = val
385
423
386
424
def update_restored_cases (self , restored_cases , restored_session ):
387
- self .__report ['restored_cases' ] = [restored_session .case (c )
388
- for c in restored_cases ]
425
+ self ._report ['restored_cases' ] = [restored_session .case (c )
426
+ for c in restored_cases ]
389
427
390
428
def update_timestamps (self , ts_start , ts_end ):
391
- self .__report ['session_info' ].update ({
429
+ self ._report ['session_info' ].update ({
392
430
'time_start' : time .strftime (_DATETIME_FMT ,
393
431
time .localtime (ts_start )),
394
432
'time_start_unix' : ts_start ,
@@ -403,10 +441,10 @@ def update_extras(self, extras):
403
441
# We prepend a special character to the user extras in order to avoid
404
442
# possible conflicts with existing keys
405
443
for k , v in extras .items ():
406
- self .__report ['session_info' ][f'${ k } ' ] = v
444
+ self ._report ['session_info' ][f'${ k } ' ] = v
407
445
408
446
def update_run_stats (self , stats ):
409
- session_uuid = self .__report ['session_info' ]['uuid' ]
447
+ session_uuid = self ._report ['session_info' ]['uuid' ]
410
448
for runidx , tasks in stats .runs ():
411
449
testcases = []
412
450
num_failures = 0
@@ -501,7 +539,7 @@ def update_run_stats(self, stats):
501
539
502
540
testcases .append (entry )
503
541
504
- self .__report ['runs' ].append ({
542
+ self ._report ['runs' ].append ({
505
543
'num_cases' : len (tasks ),
506
544
'num_failures' : num_failures ,
507
545
'num_aborted' : num_aborted ,
@@ -511,23 +549,23 @@ def update_run_stats(self, stats):
511
549
})
512
550
513
551
# Update session info from stats
514
- self .__report ['session_info' ].update ({
515
- 'num_cases' : self .__report ['runs' ][0 ]['num_cases' ],
516
- 'num_failures' : self .__report ['runs' ][- 1 ]['num_failures' ],
517
- 'num_aborted' : self .__report ['runs' ][- 1 ]['num_aborted' ],
518
- 'num_skipped' : self .__report ['runs' ][- 1 ]['num_skipped' ]
552
+ self ._report ['session_info' ].update ({
553
+ 'num_cases' : self ._report ['runs' ][0 ]['num_cases' ],
554
+ 'num_failures' : self ._report ['runs' ][- 1 ]['num_failures' ],
555
+ 'num_aborted' : self ._report ['runs' ][- 1 ]['num_aborted' ],
556
+ 'num_skipped' : self ._report ['runs' ][- 1 ]['num_skipped' ]
519
557
})
520
558
521
559
def _save (self , filename , compress , link_to_last ):
522
560
filename = _expand_report_filename (filename , newfile = True )
523
561
with open (filename , 'w' ) as fp :
524
562
if compress :
525
- jsonext .dump (self .__report , fp )
563
+ jsonext .dump (self ._report , fp )
526
564
else :
527
- jsonext .dump (self .__report , fp , indent = 2 )
565
+ jsonext .dump (self ._report , fp , indent = 2 )
528
566
fp .write ('\n ' )
529
567
530
- self .__filename = filename
568
+ self ._filename = filename
531
569
if not link_to_last :
532
570
return
533
571
@@ -547,7 +585,7 @@ def _save(self, filename, compress, link_to_last):
547
585
548
586
def is_empty (self ):
549
587
'''Return :obj:`True` is no test cases where run'''
550
- return self .__report ['session_info' ]['num_cases' ] == 0
588
+ return self ._report ['session_info' ]['num_cases' ] == 0
551
589
552
590
def save (self , filename , compress = False , link_to_last = True ):
553
591
prefix = os .path .dirname (filename ) or '.'
@@ -562,7 +600,7 @@ def store(self):
562
600
def generate_xml_report (self ):
563
601
'''Generate a JUnit report from a standard ReFrame JSON report.'''
564
602
565
- report = self .__report
603
+ report = self ._report
566
604
xml_testsuites = etree .Element ('testsuites' )
567
605
# Create a XSD-friendly timestamp
568
606
session_ts = time .strftime (
@@ -623,6 +661,30 @@ def save_junit(self, filename):
623
661
)
624
662
625
663
664
+ class _ImportedRunReport (RunReport ):
665
+ def __init__ (self , report ):
666
+ self ._filename = f'{ report ["session_info" ]["uuid" ]} .json'
667
+ self ._report = report
668
+
669
+ def _add_run (self , run ):
670
+ raise NotImplementedError
671
+
672
+ def update_session_info (self , session_info ):
673
+ raise NotImplementedError
674
+
675
+ def update_restored_cases (self , restored_cases , restored_session ):
676
+ raise NotImplementedError
677
+
678
+ def update_timestamps (self , ts_start , ts_end ):
679
+ raise NotImplementedError
680
+
681
+ def update_extras (self , extras ):
682
+ raise NotImplementedError
683
+
684
+ def update_run_stats (self , stats ):
685
+ raise NotImplementedError
686
+
687
+
626
688
def _group_key (groups , testcase ):
627
689
key = []
628
690
for grp in groups :
0 commit comments