44# SPDX-License-Identifier: BSD-3-Clause
55
66import decimal
7+ import collections
78import functools
89import inspect
910import json
@@ -225,8 +226,8 @@ class RunReport:
225226 '''
226227 def __init__ (self ):
227228 # Initialize the report with the required fields
228- self .__filename = None
229- self .__report = {
229+ self ._filename = None
230+ self ._report = {
230231 'session_info' : {
231232 'data_version' : DATA_VERSION ,
232233 'hostname' : socket .gethostname (),
@@ -240,16 +241,16 @@ def __init__(self):
240241
241242 @property
242243 def filename (self ):
243- return self .__filename
244+ return self ._filename
244245
245246 def __getattr__ (self , name ):
246- return getattr (self .__report , name )
247+ return getattr (self ._report , name )
247248
248249 def __getitem__ (self , key ):
249- return self .__report [key ]
250+ return self ._report [key ]
250251
251252 def __rfm_json_encode__ (self ):
252- return self .__report
253+ return self ._report
253254
254255 @classmethod
255256 def create_from_perflog (cls , * logfiles , format = None ,
@@ -372,23 +373,60 @@ def _convert(x):
372373 'run_index' : run_index ,
373374 'testcases' : testcases
374375 })
375- return report
376+ return [report ]
377+
378+ @classmethod
379+ def create_from_sqlite_db (cls , * dbfiles , exclude_sessions = None ,
380+ include_sessions = None , time_period = None ):
381+ dst_backend = StorageBackend .default ()
382+ dst_schema = dst_backend .schema_version ()
383+ if not time_period :
384+ time_period = {'start' : '19700101T0000+0000' , 'end' : 'now' }
385+
386+ start = time_period .get ('start' , '19700101T0000+0000' )
387+ end = time_period .get ('end' , 'now' )
388+ ts_start , ts_end = parse_time_period (f'{ start } :{ end } ' )
389+ include_sessions = set (include_sessions ) if include_sessions else set ()
390+ exclude_sessions = set (exclude_sessions ) if exclude_sessions else set ()
391+ reports = []
392+ for filename in dbfiles :
393+ src_backend = StorageBackend .create ('sqlite' , filename )
394+ src_schema = src_backend .schema_version ()
395+ if src_schema != dst_schema :
396+ getlogger ().warning (
397+ f'ignoring DB file { filename } : schema version mismatch: '
398+ f'cannot import from DB v{ src_schema } to v{ dst_schema } '
399+ )
400+ continue
401+
402+ sessions = src_backend .fetch_sessions_time_period (ts_start , ts_end )
403+ for sess in sessions :
404+ uuid = sess ['session_info' ]['uuid' ]
405+ if include_sessions and uuid not in include_sessions :
406+ continue
407+
408+ if exclude_sessions and uuid in exclude_sessions :
409+ continue
410+
411+ reports .append (_ImportedRunReport (sess ))
412+
413+ return reports
376414
377415 def _add_run (self , run ):
378- self .__report ['runs' ].append (run )
416+ self ._report ['runs' ].append (run )
379417
380418 def update_session_info (self , session_info ):
381419 # Remove timestamps
382420 for key , val in session_info .items ():
383421 if not key .startswith ('time_' ):
384- self .__report ['session_info' ][key ] = val
422+ self ._report ['session_info' ][key ] = val
385423
386424 def update_restored_cases (self , restored_cases , restored_session ):
387- self .__report ['restored_cases' ] = [restored_session .case (c )
388- for c in restored_cases ]
425+ self ._report ['restored_cases' ] = [restored_session .case (c )
426+ for c in restored_cases ]
389427
390428 def update_timestamps (self , ts_start , ts_end ):
391- self .__report ['session_info' ].update ({
429+ self ._report ['session_info' ].update ({
392430 'time_start' : time .strftime (_DATETIME_FMT ,
393431 time .localtime (ts_start )),
394432 'time_start_unix' : ts_start ,
@@ -403,10 +441,10 @@ def update_extras(self, extras):
403441 # We prepend a special character to the user extras in order to avoid
404442 # possible conflicts with existing keys
405443 for k , v in extras .items ():
406- self .__report ['session_info' ][f'${ k } ' ] = v
444+ self ._report ['session_info' ][f'${ k } ' ] = v
407445
408446 def update_run_stats (self , stats ):
409- session_uuid = self .__report ['session_info' ]['uuid' ]
447+ session_uuid = self ._report ['session_info' ]['uuid' ]
410448 for runidx , tasks in stats .runs ():
411449 testcases = []
412450 num_failures = 0
@@ -501,7 +539,7 @@ def update_run_stats(self, stats):
501539
502540 testcases .append (entry )
503541
504- self .__report ['runs' ].append ({
542+ self ._report ['runs' ].append ({
505543 'num_cases' : len (tasks ),
506544 'num_failures' : num_failures ,
507545 'num_aborted' : num_aborted ,
@@ -511,23 +549,23 @@ def update_run_stats(self, stats):
511549 })
512550
513551 # Update session info from stats
514- self .__report ['session_info' ].update ({
515- 'num_cases' : self .__report ['runs' ][0 ]['num_cases' ],
516- 'num_failures' : self .__report ['runs' ][- 1 ]['num_failures' ],
517- 'num_aborted' : self .__report ['runs' ][- 1 ]['num_aborted' ],
518- 'num_skipped' : self .__report ['runs' ][- 1 ]['num_skipped' ]
552+ self ._report ['session_info' ].update ({
553+ 'num_cases' : self ._report ['runs' ][0 ]['num_cases' ],
554+ 'num_failures' : self ._report ['runs' ][- 1 ]['num_failures' ],
555+ 'num_aborted' : self ._report ['runs' ][- 1 ]['num_aborted' ],
556+ 'num_skipped' : self ._report ['runs' ][- 1 ]['num_skipped' ]
519557 })
520558
521559 def _save (self , filename , compress , link_to_last ):
522560 filename = _expand_report_filename (filename , newfile = True )
523561 with open (filename , 'w' ) as fp :
524562 if compress :
525- jsonext .dump (self .__report , fp )
563+ jsonext .dump (self ._report , fp )
526564 else :
527- jsonext .dump (self .__report , fp , indent = 2 )
565+ jsonext .dump (self ._report , fp , indent = 2 )
528566 fp .write ('\n ' )
529567
530- self .__filename = filename
568+ self ._filename = filename
531569 if not link_to_last :
532570 return
533571
@@ -547,7 +585,7 @@ def _save(self, filename, compress, link_to_last):
547585
548586 def is_empty (self ):
549587 '''Return :obj:`True` is no test cases where run'''
550- return self .__report ['session_info' ]['num_cases' ] == 0
588+ return self ._report ['session_info' ]['num_cases' ] == 0
551589
552590 def save (self , filename , compress = False , link_to_last = True ):
553591 prefix = os .path .dirname (filename ) or '.'
@@ -562,7 +600,7 @@ def store(self):
562600 def generate_xml_report (self ):
563601 '''Generate a JUnit report from a standard ReFrame JSON report.'''
564602
565- report = self .__report
603+ report = self ._report
566604 xml_testsuites = etree .Element ('testsuites' )
567605 # Create a XSD-friendly timestamp
568606 session_ts = time .strftime (
@@ -623,6 +661,30 @@ def save_junit(self, filename):
623661 )
624662
625663
664+ class _ImportedRunReport (RunReport ):
665+ def __init__ (self , report ):
666+ self ._filename = f'{ report ["session_info" ]["uuid" ]} .json'
667+ self ._report = report
668+
669+ def _add_run (self , run ):
670+ raise NotImplementedError
671+
672+ def update_session_info (self , session_info ):
673+ raise NotImplementedError
674+
675+ def update_restored_cases (self , restored_cases , restored_session ):
676+ raise NotImplementedError
677+
678+ def update_timestamps (self , ts_start , ts_end ):
679+ raise NotImplementedError
680+
681+ def update_extras (self , extras ):
682+ raise NotImplementedError
683+
684+ def update_run_stats (self , stats ):
685+ raise NotImplementedError
686+
687+
626688def _group_key (groups , testcase ):
627689 key = []
628690 for grp in groups :
0 commit comments