diff --git a/Makefile b/Makefile index 4f95d85f..2ddaeb4e 100644 --- a/Makefile +++ b/Makefile @@ -270,6 +270,7 @@ check: mkdir -p $(COVER_DB) ; \ echo "*** Run once, force parallel ***" ; \ LCOV_FORCE_PARALLEL=1 $(MAKE) -s -C tests check LCOV_HOME=`pwd` ; \ + LCOV_FORCE_PARALLEL=1 $(MAKE) -s -C example LCOV_HOME=`pwd` ; \ echo "*** Run again, no force ***" ; \ fi @$(MAKE) -s -C tests check LCOV_HOME=`pwd` @@ -293,7 +294,7 @@ else @echo "Checking changes in source files for coding style issues (MODE=diff):" endif @RC=0 ; \ - CHECKFILES=`find . -path ./.git -prune -o \( \( -type f -exec grep -q '^#!.*perl' {} \; \) -o -name '*.pm' \) -not \( -name '*.tdy' -o -name '*.orig' -o -name '*~' \) -print `; \ + CHECKFILES=`find bin lib scripts tests -path ./.git -prune -o \( \( -type f -exec grep -q '^#!.*perl' {} \; \) -o -name '*.pm' \) -not \( -name '*.tdy' -o -name '*.orig' -o -name '*~' \) -print `; \ for FILE in $$CHECKFILES ; do \ $(CHECKSTYLE) "$$FILE"; \ if [ 0 != $$? ] ; then \ diff --git a/README b/README index db82db4d..c30f7b94 100644 --- a/README +++ b/README @@ -1,6 +1,6 @@ ------------------------------------------------- - README file for the LTP GCOV extension (LCOV) - -- Last changes: 2025-10-01 +- Last changes: 2026-02-10 ------------------------------------------------- Description @@ -430,6 +430,9 @@ LCOV features and capabilities fall into 7 major categories: vii) compress the 'function detail' table to improve readability by shortening long C++ template and function names. Sample script: simplify.pm + viii) use history from previous execution to improve runtime + performance of current execution by improving load balancing. + Sample script: history.pm The callback may be any desired script or executable - but there may be performance advantages if it is written as a Perl module. @@ -445,7 +448,7 @@ LCOV features and capabilities fall into 7 major categories: Related options: --annotate-script, --criteria-script, --version-script --resolve-script, --select-script, --context-script - --simplify-script + --simplify-script --history-script f) Performance @@ -466,7 +469,7 @@ LCOV features and capabilities fall into 7 major categories: See the genhtml/lcov/geninfo man pages for details - Related options: --parallel, --memory, --profile + Related options: --parallel, --memory, --profile, --history-script g) Language/tool support diff --git a/bin/genhtml b/bin/genhtml index f9684987..38ae1f56 100755 --- a/bin/genhtml +++ b/bin/genhtml @@ -2231,18 +2231,18 @@ sub type sub lineNo { - my ($self, $which, $lineNo) = @_; + my ($self, $which, $lineNo, $filename) = @_; my $loc; if ($which eq "current") { $loc = LINENO_CURRENT; } else { - die("unknown key $which - should be 'base' or 'current'") + die("unknown key '$which' - should be 'base' or 'current'") unless $which eq "base"; $loc = LINENO_BASE; } lcovutil::ignorable_error($lcovutil::ERROR_INCONSISTENT_DATA, - "inconsistent $which line location $loc: " . - $self->[$loc] . " -> $lineNo") + "\"$filename\": inconsistent '$which' line location: " . + $self->[$loc] . " -> $lineNo") if (defined($lineNo) && defined($self->[$loc]) && $self->[$loc] != $lineNo); @@ -2320,7 +2320,7 @@ sub _mergeBranchData if (!$branchData->hasBlock($branchId)) { # don't know how to get here...but someone on the internet # managed to do it - so we need to handle the error - my $which = $loc == DATA_BASELINE ? 'baseline' : 'current'; + my $which = $loc == DATA_BASELINE ? 'base' : 'current'; lcovutil::ignorable_error($lcovutil::ERROR_INTERNAL, $filename . ':' . $current->line() . ": '$which' line " . @@ -2341,7 +2341,7 @@ sub _mergeBranchData # LCOV_EXCL_START if ($nc != $nd) { # similarly: this should not happen - but it might - my $which = $loc == DATA_BASELINE ? 'baseline' : 'current'; + my $which = $loc == DATA_BASELINE ? 'base' : 'current'; lcovutil::ignorable_error($lcovutil::ERROR_INTERNAL, $filename . ':' . $current->line() . ": '$which' line " . $self->lineNo($which) . @@ -2351,7 +2351,7 @@ sub _mergeBranchData # LCOV_EXCL_STOP for (my $i = ($nc > $nd ? $nd : $nc) - 1; $i >= 0; --$i) { my $br = $d->[$i]; - $c->[$i]->merge($br); + $c->[$i]->merge($br, $filename, $current->line()); } # now append any new values from 'branchData': # (go here only if |D| > |C| and we ignore the mismatch error @@ -2498,6 +2498,16 @@ sub differential_function return $function->[DATA_DIFFERENTIAL]; } +# forward-declare some types +package DiffMap; + +use constant { + EQUAL => 0, + INSERT => 1, + DELETE => 2, + +}; + # structure holding coverage data for a particular file: # - associated with a line line number: # - line coverage @@ -2683,52 +2693,29 @@ sub _findLineData my $lineDataMap = $self->lineMap(); my $linedata; - if ($type ne "delete") { + if ($type != DiffMap::DELETE) { if (!defined($lineDataMap->{$current_lineNo})) { $linedata = LineData->new($type); $lineDataMap->{$current_lineNo} = $linedata; - $linedata->lineNo('current', $current_lineNo); + $linedata->lineNo('current', $current_lineNo, $filename); } else { $linedata = $lineDataMap->{$current_lineNo}; } - $linedata->lineNo('base', $base_lineNo); + $linedata->lineNo('base', $base_lineNo, $filename); } else { # nothing walks the keylist so a prefix is sufficient to distinguish # records that should be summarized but not displayed my $dline = "<<<" . $base_lineNo; if (!exists($lineDataMap->{$dline})) { $linedata = LineData->new($type); - $linedata->lineNo('base', $base_lineNo); + $linedata->lineNo('base', $base_lineNo, $filename); $lineDataMap->{$dline} = $linedata; # look up and/or down to find the first baseline line # which is not deleted - and store that as the corresponding # 'current' line. # this way, we can know the extents of the deleted region - my $c; - for (my $i = $base_lineNo - 1; $i > 0; --$i) { - if ('delete' ne $diffMap->type($filename, $diffMap->OLD, $i)) { - $c = $diffMap->lookup($filename, $diffMap->OLD, $i); - last; - } - } - if (!defined($c)) { - # there were no 'current' lines above me - so I must be - # at the first line in the file. It must not be deleted - $c = 1; - die("$filename:1: incorrectly marked 'delete'") - if ( - 'delete' eq $diffMap->type($filename, $diffMap->OLD, $c)); - } - die("$filename: no current block for deleted line $base_lineNo") - unless defined($c); - $linedata->lineNo('current', -$c); - # keep track of where deleted lines were - so we can - # mark them in the source view - if (exists($self->[DELETED_LINE_LEADER]->{$c})) { - push(@{$self->[DELETED_LINE_LEADER]->{$c}}, $linedata); - } else { - $self->[DELETED_LINE_LEADER]->{$c} = [$linedata]; - } + my $c = $diffMap->find_deleted_line_leader($filename, $base_lineNo); + $linedata->lineNo('current', -$c, $filename); } else { $linedata = $lineDataMap->{$dline}; } @@ -2742,31 +2729,34 @@ sub _categorizeLineCov my ($self, $filename, $base_data, $current_data, $diffMap, $verbose) = @_; my $lineDataMap = $self->lineMap(); - if ($verbose) { - print("categorize lines $filename\n"); - } + print("categorize lines $filename\n") + if ($verbose); + # $lineCovBase, $lineCovCurrent are CountData objects my $lineCovBase = $base_data->sum() if defined($base_data); my $lineCovCurrent = $current_data->sum(); - # walk the branch coverpoints to check for data consistency: - # - we expect a line coverpoint in every location which has branches - # - if not found, the generate message and/or create a fake coverpoint - # LLVM seems to like to generate inconsistent data. - my $branchCurrent = $current_data->sumbr(); - foreach my $line ($branchCurrent->keylist()) { - # just ignore bogus data - we already warned when we read the data - next if ($line <= 0); - - my $type = $diffMap->type($filename, $diffMap->NEW, $line); - # LCOV_EXCL_START - if ($type eq 'delete') { - lcovutil::ignorable_error($lcovutil::ERROR_INTERNAL, - "'current' line $filename:$line should not be marked 'delete'"); - delete($branchCurrent->{$line}); - next; + if ($lcovutil::check_data_consistency) { + # walk the branch coverpoints to check for data consistency: + # - we expect a line coverpoint in every location which has branches + # - if not found, the generate message and/or create a fake coverpoint + # LLVM seems to like to generate inconsistent data. + my $branchCurrent = $current_data->sumbr(); + foreach my $line ($branchCurrent->keylist()) { + # just ignore bogus data - we already warned when we read the data + next if ($line <= 0); + + my $type = $diffMap->type($filename, $diffMap->NEW, $line); + # LCOV_EXCL_START + if ($type == DiffMap::DELETE) { + lcovutil::ignorable_error($lcovutil::ERROR_INTERNAL, + "'current' line $filename:$line should not be marked 'delete'" + ); + delete($branchCurrent->{$line}); + next; + } + # LCOV_EXCL_STOP } - # LCOV_EXCL_STOP } # it is sufficient to just walk the 'global' (merged) line @@ -2778,7 +2768,7 @@ sub _categorizeLineCov # just ignore bogus data - we already warned when we read the data next if ($line <= 0); my $type = $diffMap->type($filename, $diffMap->NEW, $line); - if ($type eq 'delete') { + if ($type == DiffMap::DELETE) { # can happen in some inconsistent case, when there are certain # out-of-range references in a file which contained diffs - and we # ignored the error check @@ -2795,7 +2785,7 @@ sub _categorizeLineCov $linedata = $lineDataMap->{$line}; } my $val = $lineCovCurrent->value($line); - $linedata->lineNo("current", $line); + $linedata->lineNo("current", $line, $filename); $linedata->curr_count($val); $linedata->tla($val == 0 ? 'UNC' : 'GNC') if (!defined($lineCovBase)); @@ -2831,7 +2821,7 @@ sub _categorizeLineCov foreach my $line (sort keys %$lineDataMap) { my $linedata = $lineDataMap->{$line}; my $tla; - if ($linedata->type() eq "insert") { + if ($linedata->type() == DiffMap::INSERT) { if (!$linedata->in_curr()) { # can get here if the 'diff' file is wrong with respect to # baseline vs. current coverage data - e.g., showing that @@ -2843,7 +2833,7 @@ sub _categorizeLineCov } $tla = ($linedata->curr_count() > 0) ? "GNC" : "UNC"; print(" insert $line $tla\n") if ($verbose); - } elsif ($linedata->type() eq "delete") { + } elsif ($linedata->type() == DiffMap::DELETE) { if (!$linedata->in_base()) { # similarly: can get here if the diff vs baseline/current data # is inconsistent. @@ -2856,7 +2846,7 @@ sub _categorizeLineCov print(" delete $line $tla\n") if ($verbose); } else { die("FileCoverageInfo:: deleted segment line=$line file=$filename") - unless $linedata->type() eq "equal"; + unless $linedata->type() == DiffMap::EQUAL; if ($linedata->in_base() && $linedata->in_curr()) { $tla = @@ -2924,13 +2914,15 @@ sub _categorizeBranchCov $branchCovLines{$line} = 1; # we expect that the line number matches... - $data->lineNo("current", $line); + $data->lineNo("current", $line, $filename); # append this branch data for the line my $currBranchData = $branchCurrent->value($line); $data->current_branch($currBranchData, $filename); if (!defined($branchBaseline)) { my $categorized = BranchEntry->new($line); $data->differential_branch($categorized); + die("no branch data for deleted $filename:$line") + unless defined($currBranchData); _cloneBranchEntry($categorized, $currBranchData, 'UNC', 'GNC'); } } # foreach line in 'current' branch data @@ -2943,7 +2935,7 @@ sub _categorizeBranchCov my $data = $self->_findLineData($diffMap, $filename, $base_line); my $curr_line = $data->lineNo('current'); my $type = $data->type(); - if ($type ne 'delete') { + if ($type != DiffMap::DELETE) { $branchCovLines{$curr_line} = 1; } else { # the line has been deleted...just record the data @@ -2966,24 +2958,39 @@ sub _categorizeBranchCov my @currBlocks = defined($curr) ? $curr->blocks() : (); my @baseBlocks = defined($base) ? $base->blocks() : (); - if ($type eq 'insert') { + if ($type == DiffMap::INSERT) { # can get here if diff data vs baseline/current is not consistent lcovutil::ignorable_error($lcovutil::ERROR_INCONSISTENT_DATA, "baseline branch data should not be defined for inserted line $filename:$line" ) if defined($base); - - _cloneBranchEntry($categorized, $curr, 'UNC', 'GNC'); - } elsif ($type eq 'delete') { + if (defined($curr)) { + _cloneBranchEntry($categorized, $curr, 'UNC', 'GNC'); + } else { + # LCOV_EXCL_START + lcovutil::ignorable_error($lcovutil::ERROR_INTERNAL, + "current branch data should not be defined for inserted line $filename:$line" + ); + # LCOV_EXCL_STOP + } + } elsif ($type == DiffMap::DELETE) { # similarly: get here if diff data vs baseline/current is not consistent lcovutil::ignorable_error($lcovutil::ERROR_INCONSISTENT_DATA, "current branch data should not be defined for deleted line $filename:$line" ) if defined($curr); - _cloneBranchEntry($categorized, $base, 'DUB', 'DCB'); + if (defined($base)) { + _cloneBranchEntry($categorized, $base, 'DUB', 'DCB'); + } else { + # LCOV_EXCL_START + lcovutil::ignorable_error($lcovutil::ERROR_INTERNAL, + "baseline branch data should not be defined for deleted line $filename:$line" + ); + # LCOV_EXCL_STOP + } } else { lcovutil::ignorable_error($lcovutil::ERROR_INTERNAL, "unexpected branch coverage type $type at $filename:$line") - if $type ne 'equal'; + if $type != DiffMap::EQUAL; # branch might or might not be in both baseline and current foreach my $branchId (@baseBlocks) { @@ -3096,7 +3103,7 @@ sub _categorizeMcdcCov $mcdcCovLines{$line} = 1; # we expect that the line number matches... - $data->lineNo("current", $line); + $data->lineNo("current", $line, $filename); # append this MC/DC data for the line (this is an MCDC_Block) my $currMcdcData = $mcdcCurrent->value($line); $data->current_mcdc($currMcdcData, $filename); @@ -3116,7 +3123,7 @@ sub _categorizeMcdcCov my $data = $self->_findLineData($diffMap, $filename, $base_line); my $curr_line = $data->lineNo('current'); my $type = $data->type(); - if ($type ne 'delete') { + if ($type != DiffMap::DELETE) { $mcdcCovLines{$curr_line} = 1; } else { # the line has been deleted...just record the data @@ -3139,14 +3146,14 @@ sub _categorizeMcdcCov my $currGroups = defined($curr) ? $curr->groups() : (); my $baseGroups = defined($base) ? $base->groups() : (); - if ($type eq 'insert') { + if ($type == DiffMap::INSERT) { # can get here if diff data vs baseline/current is not consistent lcovutil::ignorable_error($lcovutil::ERROR_INCONSISTENT_DATA, "baseline MC/DC data should not be defined for inserted line $filename:$line" ) if defined($base); _cloneMcdcEntry($categorized, $curr, 'UNC', 'GNC'); - } elsif ($type eq 'delete') { + } elsif ($type == DiffMap::DELETE) { # similarly: get here if diff data vs baseline/current is not consistent lcovutil::ignorable_error($lcovutil::ERROR_INCONSISTENT_DATA, "current MC/DC data should not be defined for deleted line $filename:$line" @@ -3156,7 +3163,7 @@ sub _categorizeMcdcCov } else { lcovutil::ignorable_error($lcovutil::ERROR_INTERNAL, "unexpected MC/DC coverage type $type at $filename:$line") - if $type ne 'equal'; + if $type != DiffMap::EQUAL; # group might or might not be in both baseline and current while (my ($groupSize, $bgroup) = each(%$baseGroups)) { @@ -3236,7 +3243,7 @@ sub _categorizeFunctionCov $funcCovLines{$line} = 1; lcovutil::ignorable_error($lcovutil::ERROR_INTERNAL, "'current' line $filename:$line should not be marked 'delete'") - if $type eq 'delete'; + if $type == DiffMap::DELETE; my $data; if (!exists($lineDataMap->{$line})) { $data = LineData->new($type); @@ -3249,7 +3256,7 @@ sub _categorizeFunctionCov unless $data->type() eq $type; } # we expect that the line number matches... - $data->lineNo("current", $line); + $data->lineNo("current", $line, $filename); # function data for the line $data->current_function($func); if (!defined($funcBase)) { @@ -3275,7 +3282,7 @@ sub _categorizeFunctionCov my $type = $data->type(); my $curr_line = $data->lineNo('current'); - if ($type ne 'delete') { + if ($type != DiffMap::DELETE) { $funcCovLines{$curr_line} = 1; } else { # the line has been deleted...just record the data @@ -3310,21 +3317,23 @@ sub _categorizeFunctionCov # wasn't recognized as a function before (e.g., unused template) lcovutil::ignorable_error($lcovutil::ERROR_INCONSISTENT_DATA, "$filename:$line: unexpected undef baseline function data for deleted $name" - ) if $type eq 'delete'; - _cloneFunctionEntry($categorized, $curr, - $type eq 'insert' ? 'UNC' : 'UIC', - $type eq 'insert' ? 'GNC' : 'GIC'); + ) if $type == DiffMap::DELETE; + _cloneFunctionEntry($categorized, + $curr, + $type == DiffMap::INSERT ? 'UNC' : 'UIC', + $type == DiffMap::INSERT ? 'GNC' : 'GIC'); } elsif (!defined($curr)) { lcovutil::ignorable_error($lcovutil::ERROR_INCONSISTENT_DATA, "$filename:$line: unexpected undef current function data for inserted $name" - ) if $type eq 'insert'; - _cloneFunctionEntry($categorized, $base, - $type eq 'delete' ? 'DUB' : 'EUB', - $type eq 'delete' ? 'DCB' : 'ECB'); + ) if $type == DiffMap::INSERT; + _cloneFunctionEntry($categorized, + $base, + $type == DiffMap::DELETE ? 'DUB' : 'EUB', + $type == DiffMap::DELETE ? 'DCB' : 'ECB'); } else { lcovutil::ignorable_error($lcovutil::ERROR_INTERNAL, "unexpected function coverage type $type at $filename:$line") - if $type ne 'equal'; + if $type != DiffMap::EQUAL; # if we know end lines for this function, then check if there # have been any changes in the function body. If any changes, # then mark GNC or UNC @@ -3345,7 +3354,7 @@ sub _categorizeFunctionCov # - line is new and is source code, OR # - line is unchanged and was code before and isn't code # now, or line wasn't code before and is now - if ($type eq 'insert' && + if ($type == DiffMap::INSERT && defined($lineCovCurrent->value($line))) # line is code { $changed = 1; @@ -3373,7 +3382,8 @@ sub _categorizeFunctionCov # claim a change if line is deleted and was code # before. # note that we already checked unchanged lines, above - if ($type ne 'equal' && # line is in old but not in new + if ($type != + DiffMap::EQUAL && # line is in old but not in new defined($lineCovBase->value($bline)) ) # line is code { @@ -3477,7 +3487,7 @@ use constant { TYPE => 2, _START => 0, - _END => 1 + _END => 1, }; sub new @@ -3490,7 +3500,7 @@ sub new [{}, {}], # def location # element 0: old filename -> line number where this # entry starts - # element 1: new filename -> line numbern + # element 1: new filename -> line number {} # unchanged ]; bless $self, $class; @@ -3588,7 +3598,7 @@ sub recreateBaseline my @lines; my $didWarn = 0; foreach my $chunk (@$diffs) { - if ($chunk->[TYPE] eq 'equal') { + if ($chunk->[TYPE] == EQUAL) { my ($from, $to) = @{$chunk->[NEW]}; my $f = $from - 1; my $t = $to - 1; @@ -3607,7 +3617,7 @@ sub recreateBaseline push(@lines, $currentSrcLines->[$f++]); } } - } elsif ($chunk->[TYPE] eq 'delete') { + } elsif ($chunk->[TYPE] == DiffMap::DELETE) { my $r = $chunk->[OLD]; for (my $i = $r->[_START]; $i <= $r->[_END]; ++$i) { die("missing baseline line $i") @@ -3622,8 +3632,128 @@ sub recreateBaseline return \@lines; } +sub _findChunk +{ + my ($lineMap, $vers, $line, $filename) = @_; + my $notThis = $vers == OLD ? INSERT : DELETE; + # LCOV_EXCL_START + my $linearSearch = sub { + my $idx = 0; + while ($idx <= $#$lineMap) { + my $chunk = $lineMap->[$idx]; + my $v = $chunk->[$vers]; + last + if ($v->[_START] <= $line && + $line <= $v->[_END] && + $chunk->[TYPE] != $notThis); + ++$idx; + } + return $idx; + }; + # LCOV_EXCL_STOP + + # might be marginally faster to use linear search up to some small + # list size threshold...maybe 5 or 10? + my $max = $#$lineMap; + my $min = 0; + my $mid; + while (1) { + $mid = int(($max + $min) / 2); + my $chunk = $lineMap->[$mid]; + my $d = $chunk->[$vers]; + if ($line < $d->[_START]) { + $max = $mid; + } elsif ($line > $d->[_END]) { + $min = $mid; + } else { + # this entry is in range - match is either this one + # or the one on one side or the other. + # There are at most 2 entries for any particular line - + # and there are two only if this is the insert or delete point + if ($chunk->[TYPE] != $notThis) { + die("bad lookup") + unless (!$lcovutil::debug || + $mid == &$linearSearch()); + return $chunk; + } + # is it one side or the other? + die("unexpectedly at beginning") if $mid == 0; + my $prev = $lineMap->[$mid - 1]; + my $pd = $prev->[$vers]; + if ($pd->[_END] >= $line) { + die("unexpected prev element") + unless ($pd->[_START] <= $line && + $prev->[TYPE] != $notThis); + die("bad lookup 2") + unless (!$lcovutil::debug || + $mid - 1 == &$linearSearch()); + return $prev; + } + if ($mid == $#$lineMap) { + # we are at last element + die("wrong place") + unless (!$lcovutil::debug || + &$linearSearch() == $#$lineMap + 1); + return undef; + } + my $next = $lineMap->[$mid + 1]; + my $nd = $next->[$vers]; + die("expected 'next' to match but did not") + unless ($nd->[_START] <= $line && + $nd->[_END] >= $line && + $next->[TYPE] != $notThis); + die("bad lookup 3") + unless (!$lcovutil::debug || + $mid - 1 == &$linearSearch()); + return $next; + } + my $diff = $max - $min; + if ($diff <= 1) { + $mid = $min; + $mid = $max + if $line > $lineMap->[$min]->[$vers]->[_END]; + last; + } + } + my $chunk = $lineMap->[$mid]; + my $d = $chunk->[$vers]; + if ($d->[_START] <= $line && + $d->[_END] >= $line) { + if ($chunk->[TYPE] != $notThis) { + die("bad lookup 4") + unless (!$lcovutil::debug || + $mid == &$linearSearch()); + return $chunk; + } + if ($mid <= $#$lineMap) { + # we aren't at the last element in the list + my $next = $lineMap->[$mid + 1]; + my $nd = $next->[$vers]; + if ($nd->[_START] <= $line && + $nd->[_END] >= $line && + $next->[TYPE] != $notThis) { + + die("wrong lookup") + unless (!$lcovutil::debug || + $mid + 1 == &$linearSearch()); + return $next; + } + } + } + die("didn't find element for $filename:$line: $#$lineMap") + unless (!$lcovutil::debug || + $#$lineMap + 1 == &$linearSearch()); + return undef; +} + sub lookup { + # given line in OLD or NEW version, return corresponding line in the + # other. + # $vers == OLD if $line is in OLD (ie., we are looking for NEW + # and + # $vers == NEW if $line is in NEW and we are looking for line number in OLD + my ($self, $file, $vers, $line) = @_; $file = $self->findName($file); @@ -3633,20 +3763,22 @@ sub lookup return $line; } - my @candidates = - grep { $_->[$vers]->[_START] < $line } @{$self->[LINEMAP]->{$file}}; - # candidates is empty if $line==1 - which is unusual, as there is typically - # a comment, copyright notice, #include, or whatever on the first line - return $line unless @candidates; - - my $chunk = pop @candidates; - - my $alt = ($vers == OLD) ? NEW : OLD; - - if ($line > $chunk->[$vers]->[_END]) { - return ($chunk->[$alt]->[_END] + ($line - $chunk->[$vers]->[_END])); + # search from end of list to find last chunk which starts before this line + my $lineMap = $self->[LINEMAP]->{$file}; + my $chunk = _findChunk($lineMap, $vers, $line, $file); + my $alt = ($vers == OLD) ? NEW : OLD; + my $c; + if (defined($chunk)) { + my $delta = $line - $chunk->[$vers]->[_START]; + $c = $chunk->[$alt]->[_START] + $delta; + $c += $chunk->[TYPE] == DiffMap::DELETE; + } else { + # this element is past the last chunk + my $v = $lineMap->[-1]; + my $delta = $line - $v->[$vers]->[_END]; + $c = $v->[$alt]->[_END] + $delta; } - return ($chunk->[$alt]->[_START] + ($line - $chunk->[$vers]->[_START])); + return $c; } sub type @@ -3661,39 +3793,56 @@ sub type (@main::base_filenames || $main::diff_filename) ) { - return "equal"; # categories will be "GIC", "UIC" + return DiffMap::EQUAL; # categories will be "GIC", "UIC" } else { - return "insert"; # categories will be "GNC", "UNC" + return DiffMap::INSERT; # categories will be "GNC", "UNC" } } if (!defined($self->[FILEMAP]->{$file})) { #mapping with no filemap when baseline file was deleted - return "delete"; + return DiffMap::DELETE; } + my $lineMap = $self->[LINEMAP]->{$file}; + my $chunk = _findChunk($lineMap, $vers, $line, $file); + # '$line' is past the end of the last chunk in the diff - + # all the lines which weren't mentioned must be equal + return EQUAL unless defined($chunk); - # ->{start} equal $line only if beginning of range or omitted in ->{type} - my @candidates = - grep { $_->[$vers]->[_START] <= $line } @{$self->[LINEMAP]->{$file}}; - my $chunk = pop @candidates; - my $prev = pop @candidates; - while (defined($prev) && - $line >= $prev->[$vers]->[_START] && - $line <= $prev->[$vers]->[_END]) { - $chunk = $prev; - $prev = pop @candidates; - } - if (!defined($chunk)) { - warn "DiffMap::type(): got undef chunk at $file, $vers, $line\n"; - return "undef chunk"; - } - if (!defined($chunk->[TYPE])) { - warn "DiffMap::type(): got undef type at $file, $vers, $line\n"; - return "undef type"; - } + die("DiffMap::type(): got undef type at $file, $vers, $line") + unless defined($chunk->[TYPE]); return $chunk->[TYPE]; } +sub find_deleted_line_leader +{ + my ($self, $filename, $base_lineNo) = @_; + + # find the 'delete' chunk which contains this line - + # - the line number of the leader in the baseline is the line + # immediately before the first line in the chunk (in the 'OLD' + # section + # - the line number of the leader in the current file is the + # first line in the 'NEW' section of this chunk. + # - we expect the 'NEW' begin and end lines are the same + # - we also expect the leader to be the 'end' line of the + # 'NEW' section in the previous chunk. + + my $file = $self->findName($filename); + my $lineMap = $self->[LINEMAP]->{$file}; + # there must be a lineMap, if we have a deleted line + die("lineMap must exist $filename:$base_lineNo") unless defined($lineMap); + + my $chunk = _findChunk($lineMap, OLD, $base_lineNo, $filename); + # should not fall off end of 'diff' - there must be a 'delete' chunk + die("missing 'delete' chunk in diff $filename:$base_lineNo") + unless defined($chunk); + die("unexpected chunk $filename:$base_lineNo") + unless ($chunk->[TYPE] == DELETE && + $chunk->[NEW]->[_START] == $chunk->[NEW]->[_END]); + return $chunk->[NEW]->[_START]; +} + sub compute_deleted_lines { my ($self, $filename) = @_; @@ -3702,7 +3851,7 @@ sub compute_deleted_lines return undef unless defined($lineMap); my %hash; foreach my $chunk (@$lineMap) { - next unless $chunk->[TYPE] eq 'delete'; + next unless $chunk->[TYPE] == DELETE; $hash{$chunk->[NEW]->[_START]} = $chunk->[OLD]; } @@ -3988,7 +4137,8 @@ sub _read_udiff my $time = '[1-9]{1}[0-9]{3}\-[0-9]{2}\-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]*)?( .[0-9]+)?'; # Parse diff file line by line - my $verbose = 0; + my $verbose = 0; + my $skipCurrentFile = 0; while (<$diffHdl>) { chomp($_); s/\r//g; @@ -4010,7 +4160,7 @@ sub _read_udiff # === /^=== (.+)$/ && do # note: filename may contain whitespace { - if ($filename) { + if ($filename && !$skipCurrentFile) { die("not case insensitive") unless !$lcovutil::case_insensitive || ($filename eq lc($filename)); @@ -4020,6 +4170,10 @@ sub _read_udiff my $file = $1; $file = lcovutil::strip_directories($file, $main::strip); my $key = ReadCurrentSource::resolve_path($file, 1); + # skip diff data for excluded files - but skip quietly + $skipCurrentFile = TraceFile::skipCurrentFile($key, 'diff'); + last if $skipCurrentFile; + $key = lc($key) if $lcovutil::case_insensitive; if (exists($self->[UNCHANGED]->{$key})) { # unchanged entry flag value should be 1 @@ -4047,6 +4201,8 @@ sub _read_udiff last; }; + next if $skipCurrentFile; + # Filename of old file: # --- /^--- (.+)$/ && do { @@ -4103,7 +4259,7 @@ sub _read_udiff $.; # record original name too.. $file_new } # new file - chunk starts here - $chunk = _newChunk('equal', 1, 1); + $chunk = _newChunk(EQUAL, 1, 1); last; }; # Start of diff block: @@ -4111,13 +4267,13 @@ sub _read_udiff /^\@\@\s+-(\d+),(\d+)\s+\+(\d+),(\d+)\s+\@\@.*$/ && do { if ($1 > ($chunk->[OLD]->[_END])) { # old start skips "equal" lines - if ($chunk->[TYPE] ne "equal") { + if ($chunk->[TYPE] != EQUAL) { if ($filename) { push(@{$self->[LINEMAP]->{$filename}}, $chunk); _printChunk($chunk) if ($verbose); } $chunk = - _newChunk('equal', + _newChunk(EQUAL, $chunk->[OLD]->[_END] + 1, $chunk->[NEW]->[_END] + 1); } @@ -4136,13 +4292,13 @@ sub _read_udiff if ($old_block == 0 && $new_block == 0) { last; } - if ($chunk->[TYPE] ne "equal") { + if ($chunk->[TYPE] != EQUAL) { if ($filename) { push(@{$self->[LINEMAP]->{$filename}}, $chunk); _printChunk($chunk) if ($verbose); } # next chunk starts right after current one - $chunk = _newChunk('equal', + $chunk = _newChunk(EQUAL, $chunk->[OLD]->[_END] + 1, $chunk->[NEW]->[_END] + 1); } else { @@ -4169,12 +4325,12 @@ sub _read_udiff } $lines->{$baseline_lineno} = $1; - if ($chunk->[TYPE] ne "delete") { + if ($chunk->[TYPE] != DELETE) { if ($filename) { push(@{$self->[LINEMAP]->{$filename}}, $chunk); _printChunk($chunk) if ($verbose); } - $chunk = _newChunk('delete', $baseline_lineno, + $chunk = _newChunk(DELETE, $baseline_lineno, $chunk->[NEW]->[_END]); } else { $chunk->[OLD]->[_END] = $baseline_lineno; @@ -4187,12 +4343,12 @@ sub _read_udiff if ($old_block == 0 && $new_block == 0) { last; } - if ($chunk->[TYPE] ne "insert") { + if ($chunk->[TYPE] != INSERT) { if ($filename) { push(@{$self->[LINEMAP]->{$filename}}, $chunk); _printChunk($chunk) if ($verbose); } - $chunk = _newChunk('insert', + $chunk = _newChunk(INSERT, $chunk->[OLD]->[_END], $chunk->[NEW]->[_END] + 1); } else { @@ -4205,12 +4361,12 @@ sub _read_udiff if ($old_block == 0 && $new_block == 0) { last; } - if ($chunk->[TYPE] ne "equal") { + if ($chunk->[TYPE] != EQUAL) { if ($filename) { push(@{$self->[LINEMAP]->{$filename}}, $chunk); _printChunk($chunk) if ($verbose); } - $chunk = _newChunk('equal', + $chunk = _newChunk(EQUAL, $chunk->[OLD]->[_END] + 1, $chunk->[NEW]->[_END] + 1); } else { @@ -4944,6 +5100,9 @@ sub _countLineTlaData if ($SummaryInfo::tlaLocation{$tla} & 0x1) { # skip "DUB' and 'DCB' categories - which are not in current # and thus have no line associated + # @todo if desired, we _could_ enhance the 'annotate' callback + # to look up the baseline SHA/CL and get age/author information + # from that. Seems unnecessary, though. #lcovutil::info("$l: $tla" . $lineData->in_curr() . "\n"); @@ -5049,7 +5208,8 @@ sub _countFunctionTlaData # LCOV_EXCL_START if (!defined($mergedTla)) { lcovutil::ignorable_error($lcovutil::ERROR_INTERNAL, - "undef TLA for function '" . $func->name() . + "undef TLA for function '" . + $func->name() . "' hit " . $h->[0] . " at line " . $line . " (" . $lineData->tla() . ' ' . $lineData->curr_count() . ")"); @@ -5742,9 +5902,10 @@ sub _synthesize $suffix = ' ... synthesizing fake content for last ' . $plural; } lcovutil::ignorable_error($lcovutil::ERROR_RANGE, - $self->path() . ' contains only ' . $currentLast . - ' lines but coverage data refers to line ' . - $last_line . $suffix); + $self->path() . + ' contains only ' . $currentLast . + ' lines but coverage data refers to line ' . + $last_line . $suffix); $why = 'not long enough'; } # Simulate gcov behavior @@ -5979,10 +6140,68 @@ sub new {} ]; - # sort the worklist so segment will tend to contain files from the same directory - foreach my $f (sort $current_data->files()) { + # if the worklist is sorted alphabetically, then each segment tends to + # contain files from the same directory. + # However, large files (which may take a long time to process) often + # cluster inside the same directory - so a random (hash) order often + # performs better. + # Better is to look at profile data from the prior execution of + # this job - say, from last night's regressions - and to distribute + # the files we think will be long-running (...because they ran a long + # the last time) into different segments so they can run in parallel. + # We also want to schedule those long-running files so they are executed + # first. That way, we don't end up starting some long job late in the + # game - and then have to wait a long time for a single task to finish + # (as opposed to it running in parallel with a lot of other stuff). + my @predicted; + my @unknown; # no history + foreach my $f ($current_data->files()) { my $traceInfo = $current_data->data($f); my $filename = ReadCurrentSource::resolve_path($traceInfo->filename()); + + my $predicted; + if ( defined($lcovutil::profileHistoryCallback) + && + defined($predicted = + $lcovutil::profileHistoryCallback->history($filename)) + ) { + push(@predicted, [$f, $filename, $predicted]); + } else { + # put the ones we don't know about at the start of the list + # - work on them first, trusting that we will have parallelism + # capacity for the ones we know about + push(@unknown, [$f, $filename]); + } + } + lcovutil::info( + scalar(@unknown) . ' (of ' . (scalar(@predicted) + scalar(@unknown)) . + ") files do not appear in \'--history\' profile.\n") + if (defined($lcovutil::profileHistoryCallback) && + @unknown); + + # sort the 'predicted' ones based on expected execution time + # tie break alphabetically + @predicted = + sort({ $b->[2] <=> $a->[2] or $a->[1] cmp $b->[1] } @predicted); + # interleave the long-running files so they get distributed between segments + my @fileOrder; + my $offset = 0; + my $idx = 0; + for (my $i = 0; $i <= $#predicted; ++$i) { + push(@fileOrder, $predicted[$idx]); + $idx += $lcovutil::maxParallelism; + if ($idx > $#predicted) { + ++$offset; + $idx = $offset; + } + } + die("wrong calculation") unless $#predicted == $#fileOrder; + push(@fileOrder, @unknown); # unknown ones to the back + + foreach my $d (@fileOrder) { + my ($f, $filename) = @$d; + #my $traceInfo = $current_data->data($f); + #my $filename = ReadCurrentSource::resolve_path($traceInfo->filename()); my ($vol, $parentDir, $file) = File::Spec->splitpath($filename); if (!File::Spec->file_name_is_absolute($filename)) { if ($parentDir) { @@ -7998,7 +8217,8 @@ sub write_function_page($$$$$$$$$$$$$$$) File::Spec->catfile($rel_dir, "$base_name.func-b.$html_ext"); } html_create(*HTML_HANDLE, $filename); - my $pagetitle = "LCOV - $title - " . + my $pagetitle = + "LCOV - $title - " . File::Spec->catfile($trunc_dir, $base_name) . " - functions"; write_html_prolog(*HTML_HANDLE, $base_dir, $pagetitle); write_header(*HTML_HANDLE, @@ -8031,8 +8251,7 @@ sub process_file($$$$$) . (($main::diff_filename && $diff_data->containsFile($filename)) ? ' (source code changed)' : - '') . - "\n"); + '')); my $base_name = basename($filename); my $base_dir = get_relative_base_path($rel_dir); @@ -8077,9 +8296,10 @@ sub process_file($$$$$) $diff_data, defined($main::verboseScopeRegexp) && $filename =~ m/$main::verboseScopeRegexp/); - my $then = Time::HiRes::gettimeofday(); - $lcovutil::profileData{categorize}{$filename} = $then - $now; - + my $then = Time::HiRes::gettimeofday(); + my $elapsed = $then - $now; + $lcovutil::profileData{categorize}{$filename} = $elapsed; + info(sprintf(" (%0.3fs)\n", $elapsed)); my $r = " lines=$lines_found hit=$lines_hit"; $r .= " functions=$fn_found hit=$fn_hit" if $lcovutil::func_coverage && $fn_found != 0; @@ -8230,6 +8450,7 @@ sub process_file($$$$$) } } if (%$differentialFunctionMap) { + my $funcStart = Time::HiRes::gettimeofday(); foreach my $sort_type (@funcview_sortlist) { write_function_page([$differentialFunctionMap, \%lineCov, \%branchCov, \%mcdcCov, @@ -8249,6 +8470,10 @@ sub process_file($$$$$) $sort_type, $fileSummary); } + my $funcEnd = Time::HiRes::gettimeofday(); + my $elapsed = $funcEnd - $funcStart; + $lcovutil::profileData{function_html}{$filename} = $elapsed; + #info(sprintf(" function HTML %0.3fs\n", $elapsed)); } } @@ -8285,7 +8510,10 @@ sub process_file($$$$$) } } my $to = Time::HiRes::gettimeofday(); - $lcovutil::profileData{html}{$filename} = $to - $from; + $elapsed = $to - $from; + $lcovutil::profileData{html}{$filename} = $elapsed; + #info(sprintf(" HTML %0.3fs\n", $elapsed)); + return ($testdata, $testfncdata, $testbrdata, $testcase_mcdc); } @@ -10831,7 +11059,8 @@ sub get_branch_html($$) my $line = $differentialBranch->line(); my $next = $fileDetail->nextBranchTlaGroup($tla, $line); $href = - "$char"; $tlaLinks{$tla} = $href; } @@ -10929,7 +11158,8 @@ sub get_mcdc_html($$) my ($char, $title); my $expr_name = $expr->expression(); - $title = ${prefix} . ($sense ? 'True' : 'False') . + $title = + ${prefix} . ($sense ? 'True' : 'False') . ' sense of expression "' . $expr->expression() . '" '; if ($expr->parent()->num_groups() > 1) { $title .= 'in group "' . $expr->groupSize() . '" '; @@ -10968,7 +11198,8 @@ sub get_mcdc_html($$) my $line = $differentialMcdc->line(); my $next = $fileDetail->nextMcdcTlaGroup($tla, $line); $href = - "SENSE"; # cache the href as 'next' lookup is moderately expensive $tlaLinks{$tla} = $href; @@ -12085,7 +12316,8 @@ sub write_header(*$$$$$$$) } elsif ($type == $HDR_TESTDESC) { # Test description header $base_dir = ""; - $view = "" . + $view = + "" . "$overview_title - test case descriptions"; } @@ -12097,12 +12329,14 @@ sub write_header(*$$$$$$$) if ($frames && ($type == $HDR_SOURCE || $type == $HDR_FUNC)) { # Need to break frameset when clicking this link $test .= - " ( " . "" . + "" . "view descriptions )"; } else { $test .= - " ( " . "" . + "" . "view descriptions )"; } } @@ -12578,8 +12812,8 @@ sub get_sort_code($$$) } my $help = " title=\"Click to sort table by $alt\""; $alt = "Sort by $alt"; - return " " . - $link_start . '" . $link_start . + '' . $link_end . ''; } @@ -12615,7 +12849,8 @@ sub get_line_code($$$$$$$) } elsif ($type == $HEAD_DETAIL_HIDDEN) { # Text + link to detail view my $help = "title=\"Click to go to per-testcase coverage details\""; - my $detail_link = 'index' . $key . $bin_type . + my $detail_link = + 'index' . $key . $bin_type . '-detail' . $fileview_sortname[$sort_type] . '.' . $html_ext; $result .= " ( " . "$testname"; } write_file_table_detail_entry(*HTML_HANDLE, $base_dir, @@ -13421,7 +13657,8 @@ sub write_source($$$$$$$$) } # using 8 characters for line number field, with different # foreground/background - to make distinctive - $empty .= ' ' . + $empty .= + ' ' . (' ' x (8 - 3)) . '...'; $empty .= ' ' x $main::br_field_width if $showBranches; $empty .= ' ' x $main::mcdc_field_width if $showMcdc; diff --git a/bin/geninfo b/bin/geninfo index e6521397..2d60569a 100755 --- a/bin/geninfo +++ b/bin/geninfo @@ -223,6 +223,7 @@ our $defaultChunkSize; our $defaultInterval; our %childRetryCounts; our @large_files; +our $callFromLcov; our $cwd = getcwd(); chomp($cwd); @@ -243,6 +244,7 @@ $ENV{"LC_ALL"} = "C"; # retrieve settings from RC file - use these if not overridden on command line my %geninfo_opts = ("test-name|t=s" => \$test_name, + 'call-from-lcov' => \$callFromLcov, "output-filename|o=s" => \$output_filename, "base-directory|b=s" => \$base_directory, "follow|f" => \$lcovutil::opt_follow, @@ -906,15 +908,15 @@ sub add_worklist_entry { my ($self, $filename, $directory) = @_; if (exists($self->[1]->{$filename})) { - lcovutil::ignorable_error( - $lcovutil::ERROR_USAGE, - "duplicate file $filename in both " . - $self->[1]->{$filename} . " and $directory" - . - (lcovutil::is_ignored( + lcovutil::ignorable_error($lcovutil::ERROR_USAGE, + "duplicate file $filename in both " . + $self->[1]->{$filename} . + " and $directory" + . + (lcovutil::is_ignored( $lcovutil::ERROR_USAGE) ? - ' (skip latter)' : - '')); + ' (skip latter)' : + '')); return; } $self->[1]->{$filename} = $directory; @@ -1333,8 +1335,45 @@ sub gen_info(@) } my @worklist; my $serialChunk = [1, []]; - my $chunk = [0, []]; # [isSerial, [fileList]] - FILE: foreach my $j (@$filelist) { + my $nChunks = + int(floor((scalar(@$filelist) + $chunkSize - 1) / $chunkSize)); + for (my $i = 0; $i < $nChunks; ++$i) { + push(@worklist, [0, []]); + } + my $work = $filelist; # default to the original order + if ($lcovutil::profileHistoryCallback && + ($chunkSize > 1 || exists($ENV{LCOV_FORCE_PARALLEL}))) { + my (@predicted, @unknown); + foreach my $j (@$filelist) { + my ($dir, $gcda, $gcno) = @$j; + my $key = defined($gcda) ? $gcda : $gcno; + my $k = substr($key, length($dir) + 1); + my $predicted = $lcovutil::profileHistoryCallback->history($k); + if (defined($predicted)) { + push(@predicted, [$j, $predicted, $key]); + } else { + # put the ones we don't know about at the start of the list + # - work on them first, trusting that we will have parallelism + # capacity for the ones we know about + push(@unknown, $j); + } + } + @predicted = + sort({ $b->[1] <=> $a->[1] or $a->[2] cmp $b->[2] } @predicted); + + lcovutil::info(scalar(@unknown) . + ' (of ' . (scalar(@predicted) + scalar(@unknown)) . + ") files do not appear in \'--history\' profile.\n") + if (@unknown); + if (@predicted) { + my @order = map({ $_->[0] } @predicted); + # unknown to the back... + push(@order, @unknown); + $work = \@order; + } + } + my $chunkId = 0; + FILE: foreach my $j (@$work) { my ($dir, $gcda, $gcno) = @$j; foreach my $f ($gcda, $gcno) { next unless defined($f); # might not be a GCDA file @@ -1345,15 +1384,15 @@ sub gen_info(@) next FILE; } } + die("unexpected chunkId $chunkId") if $chunkId >= $nChunks; + my $chunk = $worklist[$chunkId++]; push(@{$chunk->[1]}, $j); - if (scalar(@{$chunk->[1]}) == $chunkSize) { - push(@worklist, $chunk); - $chunk = [0, []]; + if ($chunkId == $nChunks) { + $chunkId = 0; } } #foreach DATA_FILE - push(@worklist, $chunk) if @{$chunk->[1]}; - # serial chunk is at the top of the stack - so serial processing - # happens before we fork multiple processes + # serial chunk is at the top of the stack - so serial processing + # happens before we fork multiple processes push(@worklist, $serialChunk) if (@{$serialChunk->[1]}); diff --git a/bin/get_version.sh b/bin/get_version.sh index e515231b..3d158527 100755 --- a/bin/get_version.sh +++ b/bin/get_version.sh @@ -25,7 +25,7 @@ else fi # Fallback -[ -z "$VERSION" ] && VERSION="2.4" +[ -z "$VERSION" ] && VERSION="2.4.1" [ -z "$RELEASE" ] && RELEASE="beta" [ -z "$FULL" ] && FULL="$VERSION-$RELEASE" diff --git a/bin/lcov b/bin/lcov index 4e1fef23..7d3eeaba 100755 --- a/bin/lcov +++ b/bin/lcov @@ -821,6 +821,7 @@ sub lcov_geninfo(@) @param = (File::Spec->catfile($tool_dir, 'geninfo'), @dir); # make things less confusing for user, by using the name they actually invoked push(@param, '--toolname', $lcovutil::tool_name); + push(@param, '--call-from-lcov'); if ($output_filename) { push(@param, "--output-filename", $output_filename); } @@ -914,6 +915,9 @@ sub lcov_geninfo(@) # memory has not been multiplied by Mb yet - so just pass the integer value push(@param, '--memory', $lcovutil::maxMemory) if defined($lcovutil::maxMemory); + # function coverage was enabled by default + push(@param, '--no-function-coverage') + if (!$lcovutil::func_coverage); push(@param, "--branch-coverage") if $lcovutil::br_coverage; push(@param, "--mcdc") if $lcovutil::mcdc_coverage; push(@param, '--fail-under-lines', $lcovutil::fail_under_lines) @@ -935,6 +939,9 @@ sub lcov_geninfo(@) ['--version-script', \@lcovutil::extractVersionScript], ['--resolve-script', \@lcovutil::resolveCallback], ['--substitute', \@lcovutil::file_subst_patterns], + ['--history-script', + \@lcovutil::profileHistoryCallback + ], ['--omit-lines', \@lcovutil::omit_line_patterns], ['--erase-functions', \@lcovutil::exclude_function_patterns diff --git a/example/Makefile b/example/Makefile index 51a5ca78..d88e4317 100644 --- a/example/Makefile +++ b/example/Makefile @@ -83,7 +83,7 @@ gauss.o: methods/gauss.c gauss.h output: example descriptions test_noargs test_2_to_2000 test_overflow test_differential @echo @echo '*' - @echo '* Generating HTML output' + @echo '* Generating HTML output from data captured from tests' @echo '*' @echo $(GENHTML) trace_noargs.info trace_args.info trace_overflow.info \ @@ -95,7 +95,7 @@ output: example descriptions test_noargs test_2_to_2000 test_overflow test_diffe @echo '* See '`pwd`/output/index.html @echo '*' @echo - @echo "Generate HTML with hierarchical report and additional navigation features" + @echo "Generate HTML with hierarchical report and additional navigation features (same testcase data)" @echo '*' $(GENHTML) trace_noargs.info trace_args.info trace_overflow.info \ --output-directory hierarchical \ @@ -175,7 +175,8 @@ test_differential: @echo "Step 2: Capture initial coverage" (cd $(REPO) ; \ - $(LCOV) --capture -o baseline.info -d . --version-script $(SCRIPTDIR)/gitversion.pm ) + $(LCOV) --capture -o baseline.info -d . \ + --version-script $(SCRIPTDIR)/gitversion.pm --profile ) @echo "Step 3: Modify source code" @(cd $(REPO) ; \ @@ -192,27 +193,34 @@ test_differential: ./example ) @echo "Step 5: Capture new coverage (after source changes)" + @echo " (note that this example is too tiny for history reuse" + @echo " to affect runtime performance)" (cd $(REPO) ; \ - $(LCOV) --capture -o current.info -d . --version-script $(SCRIPTDIR)/gitversion.pm ) + $(LCOV) --capture -o current.info -d . \ + --version-script $(SCRIPTDIR)/gitversion.pm \ + --profile \ + --history $(SCRIPTDIR)/history.pm,baseline.info.json ) @echo "Compute source diffs" (cd $(REPO) ; \ $(SCRIPTDIR)/gitdiff -b . `git rev-list -n 1 baseline` \ `git rev-list -n 1 current` > udiff.txt ) @echo "Step 6: Generate differential coverage report" - @echo " (caching revision control data may result in improved runtime performance)" + @echo " - caching revision control data may result in improved runtime performance)" + @echo " - capture 'genhtml' execution profile that we can use later" (cd $(REPO) ; \ $(GENHTML) -o differential --baseline-file baseline.info \ --diff-file udiff.txt --show-owners \ --title "Differential coverage example" \ --annotate $(SCRIPTDIR)/gitblame.pm,--cache,./my_cache \ - --version-script $(SCRIPTDIR)/gitversion.pm \ + --version-script $(SCRIPTDIR)/gitversion.pm --profile \ -- current.info ) @echo "point your browser to `realpath $(REPO)`/differential/index.html" @echo "Step 7: Generate subset report for code review:" - @echo " (reuse revision control data cached in previous step)" + @echo " - reuse execution profile and cached revision control data" + @echo " captured in previous step" (cd $(REPO) ; \ $(GENHTML) -o review --baseline-file baseline.info \ --diff-file udiff.txt --show-owners \ @@ -221,7 +229,9 @@ test_differential: --version-script $(SCRIPTDIR)/gitversion.pm \ --select-script $(SCRIPTDIR)/select.pm \ --select-script --tla --select-script UNC,UIC,LBC \ - -- current.info ) + --history-script $(SCRIPTDIR)/history.pm \ + --history-script ./differential/genhtml.json --profile \ + -- current.info ) @echo "point your browser to `realpath $(REPO)`/review/index.html" diff --git a/example/README b/example/README index 6c3facbc..d0cf08cb 100644 --- a/example/README +++ b/example/README @@ -66,5 +66,16 @@ Code review: - Real use cases are likely to use more sophisticated select-script callbacks (e.g., to select from a range of changelists). + - The example uses caching and profile history to improve runtime + performance - see the man pages for a more detailed description + of the features. There is no effect with a tiny example - but + a real project may see benefit. + The 'spreadsheet.py' application script can be used to convert + JSON profile files into more readable excel spreadsheets. This + can be useful to see the effect (if any) of the caching and/or + history features, and can show where time is spent for your example. + This can be helpful, to suggest opportunities to optimize the LCOV + implementation. + Feel free to edit the Makefile or to run the lcov utilities directly, to see the effect of other options that you find in the lcov man pages. diff --git a/lib/lcovutil.pm b/lib/lcovutil.pm index 7f55c4ff..8fe91f81 100644 --- a/lib/lcovutil.pm +++ b/lib/lcovutil.pm @@ -490,7 +490,12 @@ our %pngMap = ('=' => ['CBC', 'LBC'] our @opt_rc; # list of command line RC overrides our %profileData; -our $profile; # the 'enable' flag/name of output file +our $profile; # the 'enable' flag/name of output file +# historical profile - optimize performance by somewhat carefully sorting +# job list - use callback mechanism to provide more configurable support +# for complex build environments +our @profileHistoryCallback; +our $profileHistoryCallback; # need to defer any errors until after the options have been # processed as user might have suppressed the error we were @@ -998,6 +1003,8 @@ sub configure_callback #$package->import(qw(new)); # the first value in @_ is the script name $$cb = $class->new(@args); + die("callback constructor returned 'undef'") + unless defined($$cb); if (exists($ENV{LCOV_FORCE_PARALLEL}) || (defined($lcovutil::maxParallelism) && 1 != $lcovutil::maxParallelism) @@ -1030,7 +1037,8 @@ sub configure_callback # not module $$cb = ScriptCaller->new(@args); } - push(@configured_callbacks, $cb); + push(@configured_callbacks, $cb) + if defined($$cb); } sub cleanup_callbacks @@ -1258,8 +1266,9 @@ our %argCommon = ("tempdir=s" => \$tempdirname, "config-file=s" => \@unsupported_config, "rc=s%" => \%unsupported_rc, "profile:s" => \$lcovutil::profile, - "exclude=s" => \@lcovutil::exclude_file_patterns, - "include=s" => \@lcovutil::include_file_patterns, + 'history-script=s' => \@lcovutil::profileHistoryCallback, + "exclude=s" => \@lcovutil::exclude_file_patterns, + "include=s" => \@lcovutil::include_file_patterns, "erase-functions=s" => \@lcovutil::exclude_function_patterns, "omit-lines=s" => \@lcovutil::omit_line_patterns, "substitute=s" => \@lcovutil::file_subst_patterns, @@ -1325,24 +1334,13 @@ my @include_stack; sub read_config($$) { my ($filename, $opts) = @_; - my $key; - my $value; - local *HANDLE; - my $set_value = 0; info(1, "read_config: $filename\n"); - my $path = abs_path($filename); - if (!defined($path)) { - lcovutil::ignorable_error($lcovutil::ERROR_USAGE, - "config file '$filename' does not exist"); - # as below - this line is unreachable because we can't ignore - # the message due to order of processing - see below. - return 0; # LCOV_UNREACHABLE_LINE - } elsif (exists($included_config_files{$path})) { + my $f; + eval { $f = InOutFile->in($filename); }; + if ($@) { lcovutil::ignorable_error($lcovutil::ERROR_USAGE, - 'config file inclusion loop detected: "' . - join('" -> "', @include_stack) . - '" -> "' . $filename . '"'); + "cannot read configuration file '$filename': $!"); # this line is unreachable as we can't ignore the 'usage' error # because it is generated when we parse the config-file options # but the '--ignore-errors' option isn't parsed until later, after @@ -1350,18 +1348,21 @@ sub read_config($$) # This could be fixed by doing some early processing on the command # line (similar to how config file options are handled) - but that # seems like overkill. Just force the user to fix the issues. - return 0; # LCOV_UNREACHABLE_LINE + return 0; # didn't set anything LCOV_UNREACHABLE_LINE } - - if (!open(HANDLE, "<", $filename)) { + my $path = abs_path($filename); + die("abs_path returned undef for $filename") unless defined($path); + if (exists($included_config_files{$path})) { lcovutil::ignorable_error($lcovutil::ERROR_USAGE, - "cannot read configuration file '$filename': $!"); - # similarly, this line is also unreachable for the same reasons as - # described above. - return 0; # didn't set anything LCOV_UNREACHABLE_LINE + 'config file inclusion loop detected: "' . + join('" -> "', @include_stack) . + '" -> "' . $filename . '"'); + return 0; # LCOV_UNREACHABLE_LINE } $included_config_files{$path} = 1; push(@include_stack, $filename); + + local *HANDLE = $f->hdl(); VAR: while () { chomp; # Skip comments @@ -1371,7 +1372,7 @@ sub read_config($$) # Remove trailing blanks s/\s+$//; next unless length; - ($key, $value) = split(/\s*=\s*/, $_, 2); + my ($key, $value) = split(/\s*=\s*/, $_, 2); # is this an environment variable? while (defined($value) && $value =~ /\$ENV\{([^}]+)\}/) { @@ -1413,7 +1414,6 @@ sub read_config($$) ]); } } - close(HANDLE) or die("unable to close $filename: $!\n"); delete $included_config_files{$path}; pop(@include_stack); return $set_value; @@ -1591,12 +1591,22 @@ sub parseOptions $lcovutil::verify_checksum = 0 if (defined($rc_no_checksum)); + # Determine which errors the user wants us to ignore + parse_ignore_errors(@opt_ignore_errors); + + # if lcov --capture: no further initialization required - is handled + # in geninfo call + return 1 if $lcov_capture; + foreach my $cb ([\$versionCallback, \@lcovutil::extractVersionScript], [\$resolveCallback, \@lcovutil::resolveCallback], [\$CoverageCriteria::criteriaCallback, \@CoverageCriteria::coverageCriteriaScript ], [\$contextCallback, \@lcovutil::contextCallback], + [\$profileHistoryCallback, + \@lcovutil::profileHistoryCallback + ], ) { lcovutil::configure_callback($cb->[0], @{$cb->[1]}) if (@{$cb->[1]}); @@ -1623,35 +1633,30 @@ sub parseOptions $lcovutil::profile = '' if ($contextCallback && !defined($lcovutil::profile)); - if (!$lcov_capture) { - if ($lcovutil::compute_file_version && - !defined($versionCallback)) { - lcovutil::ignorable_warning($lcovutil::ERROR_USAGE, - "'compute_file_version=1' option has no effect without either '--version-script' or 'version_script=...'." - ); - } - lcovutil::munge_file_patterns(); - lcovutil::init_parallel_params(); - # Determine which errors the user wants us to ignore - parse_ignore_errors(@opt_ignore_errors); - parse_expected_message_counts(@opt_expected_message_counts); - # Determine what coverpoints the user wants to filter - push(@opt_filter, 'exception') if $lcovutil::exclude_exception_branch; - parse_cov_filters(@opt_filter); - - # Ensure that the c++filt tool is available when using --demangle-cpp - lcovutil::do_mangle_check(); - - foreach my $entry (@deferred_rc_errors) { - my ($isErr, $type, $msg) = @$entry; - if ($isErr) { - lcovutil::ignorable_error($type, $msg); - } else { - lcovutil::ignorable_warning($type, $msg); - } - } + if ($lcovutil::compute_file_version && + !defined($versionCallback)) { + lcovutil::ignorable_warning($lcovutil::ERROR_USAGE, + "'compute_file_version=1' option has no effect without either '--version-script' or 'version_script=...'." + ); } + lcovutil::munge_file_patterns(); + lcovutil::init_parallel_params(); + parse_expected_message_counts(@opt_expected_message_counts); + # Determine what coverpoints the user wants to filter + push(@opt_filter, 'exception') if $lcovutil::exclude_exception_branch; + parse_cov_filters(@opt_filter); + + # Ensure that the c++filt tool is available when using --demangle-cpp + lcovutil::do_mangle_check(); + foreach my $entry (@deferred_rc_errors) { + my ($isErr, $type, $msg) = @$entry; + if ($isErr) { + lcovutil::ignorable_error($type, $msg); + } else { + lcovutil::ignorable_warning($type, $msg); + } + } return 1; } @@ -1785,14 +1790,24 @@ sub munge_file_patterns sub warn_pattern_list { my ($type, $patterns) = @_; + my $unused = 0; foreach my $pat (@$patterns) { my $count = $pat->[-1]; if (0 == $count) { my $str = $pat->[-2]; lcovutil::ignorable_error($ERROR_UNUSED, "'$type' pattern '$str' is unused."); + ++$unused; } } + if ($unused) { + lcovutil::ignorable_error($ERROR_UNUSED, + "$unused of " . + scalar(@$patterns) . + " '$type' pattern" . + (scalar(@$patterns) == 1 ? '' : 's') . + ' were never applied.'); + } } sub warn_file_patterns @@ -1927,7 +1942,8 @@ sub summarize_messages foreach my $type ('error', 'warning', 'ignore') { next unless $total{$type}; $found = 1; - my $leader = $header . ' ' . $total{$type} . " $type message" . + my $leader = + $header . ' ' . $total{$type} . " $type message" . ($total{$type} > 1 ? 's' : '') . ":\n"; my $h = $message_types{$type}; foreach my $k (sort keys %$h) { @@ -2586,7 +2602,8 @@ sub summarize_cov_filters next if 0 == $histogram->[-2]; my $points = ''; if ($histogram->[-2] != $histogram->[-1]) { - $points = ' ' . $histogram->[-1] . ' coverpoint' . + $points = + ' ' . $histogram->[-1] . ' coverpoint' . ($histogram->[-1] > 1 ? 's' : '') . "\n"; } info(-1, @@ -3403,7 +3420,6 @@ sub annotate sub check_criteria { my ($self, $name, $type, $data) = @_; - my $iter = $self->pipe('criteria', $name, $type, JsonSupport::encode($data)); return (0) unless $iter; # constructor will have given error message @@ -3420,7 +3436,6 @@ sub check_criteria sub select { my ($self, $lineData, $annotateData, $filename, $lineNo) = @_; - my @params = ('select', defined($lineData) ? JsonSupport::encode($lineData->to_list()) : '', @@ -3434,19 +3449,32 @@ sub select sub simplify { my ($self, $func) = @_; - my $name; my $pipe = $self->pipe('simplify', $func); die("broken 'simplify' callback") unless ($pipe && ($name = $pipe->next())); - chomp($name); $name =~ s/\r//; lcovutil::info(1, " simplify: $name\n"); return $name; } +sub history +{ + my ($self, $item) = @_; + + my $time; + my $pipe = $self->pipe('history', $item); + die("broken 'history' callback") + unless ($pipe && + ($time = $pipe->next())); + chomp($time); + $time =~ s/\r//; + lcovutil::info(1, " history: $item = $time\n"); + return $time eq '' ? undef : $time; +} + package JsonSupport; our $rc_json_module = 'auto'; @@ -4143,9 +4171,9 @@ sub merge # return 1 if something changed, 0 if nothing new covered or discovered my ($self, $that, $filename, $line) = @_; # should have called 'iscompatible' first - die('attempt to merge incompatible expressions for id' . - $self->id() . ', ' . $that->id() . - ": '" . $self->exprString() . "' -> '" . $that->exprString() . "'") + die("$filename:$line: attempt to merge incompatible expressions for id" . + $self->id() . ', ' . $that->id() . ": '" . + $self->exprString() . "' -> '" . $that->exprString() . "'") if ($self->exprString() ne $that->exprString()); if ($self->is_exception() != $that->is_exception()) { @@ -4656,7 +4684,8 @@ sub merge { my ($self, $that) = @_; lcovutil::ignorable_error($lcovutil::ERROR_INCONSISTENT_DATA, - $self->name() . " has different location than " . + $self->name() . + " has different location than " . $that->name() . " during merge") if ($self->line() != $self->line()); while (my ($name, $count) = each(%{$that->[ALIASES]})) { @@ -4831,7 +4860,8 @@ sub define_function ) { lcovutil::ignorable_error($lcovutil::ERROR_INCONSISTENT_DATA, "mismatched end line for $fnName at " . - $self->filename() . ":$start_line: " + $self->filename() . + ":$start_line: " . (defined($data->end_line()) ? $data->end_line() : 'undef') . @@ -4941,8 +4971,8 @@ sub union my $thisData; if (!exists($myData->{$key})) { $thisData = - $self->define_function($thatData->name(), - $thatData->line(), $thatData->end_line()); + $self->define_function($thatData->name(), $thatData->line(), + $thatData->end_line()); $changed = 1; # something new... } else { $thisData = $myData->{$key}; @@ -6300,7 +6330,8 @@ sub parseLines lcovutil::ignorable_error($lcovutil::ERROR_MISMATCH, "$filename: overlapping exclude directives. Found " . $d->[4] . - " at line $line - but no matching " . $d->[5] . + " at line $line - but no matching " . + $d->[5] . ' for ' . $d->[4] . ' at line ' . $$ref->[0]) if $$ref; $$ref = [$line, $reason, $d->[4], $d->[5]]; @@ -6309,7 +6340,8 @@ sub parseLines lcovutil::ignorable_error($lcovutil::ERROR_MISMATCH, "$filename: found " . $d->[5] . " directive at line $line without matching " . - ($$ref ? $$ref->[2] : $d->[4]) . ' directive') + ($$ref ? $$ref->[2] : $d->[4]) . + ' directive') unless $$ref && $$ref->[2] eq $d->[4] && $$ref->[3] eq $d->[5]; @@ -6531,16 +6563,17 @@ sub isExcluded lcovutil::message_count($lcovutil::ERROR_RANGE) == 0 ]); lcovutil::store_deferred_message( - $lcovutil::ERROR_RANGE, - 1, $key, - "unknown line '$lineNo' in " . $self->filename() - . - ( - defined($data->[EXCLUDE]) ? - (" there are only " . - scalar(@{$data->[EXCLUDE]}) . " lines in the file.") : - "") . - $suffix) if lcovutil::warn_once($lcovutil::ERROR_RANGE, $key); + $lcovutil::ERROR_RANGE, + 1, $key, + "unknown line '$lineNo' in " . $self->filename() + . + (defined($data->[EXCLUDE]) ? + (" there are only " . + scalar(@{$data->[EXCLUDE]}) . + " lines in the file.") : + "") . + $suffix + ) if lcovutil::warn_once($lcovutil::ERROR_RANGE, $key); return 0; # even though out of range - this is not excluded by filter } my $reason; @@ -6991,8 +7024,13 @@ sub print_summary sub skipCurrentFile { - my $filename = shift; + my ($filename, $fileTypeName) = @_; + if (defined($fileTypeName)) { + $fileTypeName .= ' '; + } else { + $fileTypeName = ''; + } my $filt = $lcovutil::cov_filter[$lcovutil::FILTER_MISSING_FILE]; if ($filt) { my $missing = !-r $filename; @@ -7005,7 +7043,8 @@ sub skipCurrentFile if ($missing) { lcovutil::info( - "Excluding \"$filename\": does not exist/is not readable\n"); + "Excluding $fileTypeName\"$filename\": does not exist/is not readable\n" + ); ++$filt->[-2]; ++$filt->[-1]; return 1; @@ -7016,7 +7055,8 @@ sub skipCurrentFile foreach my $p (@lcovutil::exclude_file_patterns) { my $pattern = $p->[0]; if ($filename =~ $pattern) { - lcovutil::info(1, "exclude $filename: matches '" . $p->[1] . "\n"); + lcovutil::info(1, + "exclude $fileTypeName$filename: matches '" . $p->[1] . "\n"); ++$p->[-1]; return 1; # all done - explicitly excluded } @@ -7026,13 +7066,15 @@ sub skipCurrentFile my $pattern = $p->[0]; if ($filename =~ $pattern) { lcovutil::info(1, - "include: $filename: matches '" . $p->[1] . "\n"); + "include: $fileTypeName$filename: matches '" . + $p->[1] . "\n"); ++$p->[-1]; return 0; # explicitly included } } - lcovutil::info(1, "exclude $filename: no include matches\n"); - return 1; # not explicitly included - so exclude + lcovutil::info(1, + "exclude $fileTypeName$filename: no include matches\n"); + return 1; # not explicitly included - so exclude } return 0; } @@ -7236,9 +7278,8 @@ sub _eraseFunctions lcovutil::info( 1 + (0 == $isMasterList), "exclude FN $name line range $source_file:[" . - $fcn->line() . - ":$end_line] due to '" . $p->[-2] . "'\n" - ); + $fcn->line() . ":$end_line] due to '" . + $p->[-2] . "'\n"); } _eraseFunction($fcn, $alias, $end_line, $source_file, $functionMap, $lineData, @@ -7379,9 +7420,10 @@ sub _deriveFunctionEndLines lcovutil::explain_once('derive_end_line', " See lcovrc man entry for 'derive_function_end_line'."); lcovutil::ignorable_error($lcovutil::ERROR_INCONSISTENT_DATA, - '"' . $func->filename() . '":' . $func->line() . - ': failed to set end line for function ' . - $func->name() . '.' . $suffix); + '"' . + $func->filename() . '":' . $func->line() . + ': failed to set end line for function ' . + $func->name() . '.' . $suffix); next FUNC; } @@ -8659,8 +8701,8 @@ sub _read_info } # Check for .gz extension - my $inFile = InOutFile->in($tracefile, $lcovutil::demangle_cpp_cmd); - my $infoHdl = $inFile->hdl(); + my $inFile = InOutFile->in($tracefile, $lcovutil::demangle_cpp_cmd); + local *INFO = $inFile->hdl(); $testname = ""; my $fileData; @@ -8678,7 +8720,7 @@ sub _read_info my %excludedFunction; my $skipCurrentFile = 0; my %fnIdxMap; - while (<$infoHdl>) { + while () { chomp($_); my $line = $_; $line =~ s/\s+$//; # whitespace diff --git a/man/genhtml.1 b/man/genhtml.1 index 0990f9b8..179dec58 100644 --- a/man/genhtml.1 +++ b/man/genhtml.1 @@ -99,7 +99,10 @@ genhtml \- Generate HTML view from LCOV coverage data files .IR config\-file ] .br .RB [ \-\-profile -.IR [profile\-file ] +.IR [profile\-file ]] +.br +.RB [ \-\-history-script +.IR callback ] .br .RB [ \-\-rc .IR keyword = value ] @@ -439,6 +442,8 @@ following options: .br .B \-\-version\-script .br +.B \-\-history\-script +.br .RE While each script performs a separate function there are some common aspects @@ -626,6 +631,37 @@ and does not modify information in the coveage DB. .PP +.IP history\-script +$cpu_seconds = $callback_obj->history($element_name) +.br +.br + +where +.I $cpu_seconds +is the predicted time taken to process +.I $element_name +or +.I undef +if there is no prediction/the element is unknown. + +.br +See the sample implementation +.RS +\*[scriptdir]/history.pm, +which uses the +.I "\-\-profile" +data generated by a previous +.B genhtml +execution to predict the time required this time. +.br +The prediction may improve load balancing - and thus improve overall +runtime performance ( +.I i.e., +because we won't be waiting for some "long pole" task to complete while +all other threads are idle. + +.PP + .RE .IP 2. 3 @@ -2287,6 +2323,13 @@ and .I fork_fail_timeout entries in man .B lcovrc(5). +.br +A previously generated execution profile may help to enable better utilization +and faster parallel execution. See the +.I "\-\-profile" +and +.I "\-\-history\-script" +sections of this man page. .RE @@ -3415,9 +3458,35 @@ If the optional is not specified, then the profile data is written to a file named .I "genhtml.json" in the output directory. +.br +Profile data is useful if you are trying to optimize the +.B lcov +implementation (see +.B $LCOV_ROOT/share/lcov/support-scripts/spreadsheet.py +), and can also enable faster 'genhtml --parallel' execution (see the +"\-\-history\-script" section of this man page). + .RE +.BI "\-\-history-script" script +.br +.RS +Tell the tool to use performance data from a prior job to predict resource usage by the current job. +This may allow better segmentation to enable more balanced workloads between parallel threads - thus improving wall clock execution time. + +A common source for the +.I "previous\-profile\-data\-file" +is the profile data generated by a prior regression suite execution. +See +.B $LCOV_ROOT/share/lcov/example +and +.B $LCOV_ROOT/share/lcov/support-scripts/history.pm +in the installed release (or +.B .../example +in the source repository). +.RE + .B \-\-rc .IR keyword = value diff --git a/man/geninfo.1 b/man/geninfo.1 index d5a9892f..70fd9ed5 100644 --- a/man/geninfo.1 +++ b/man/geninfo.1 @@ -88,6 +88,9 @@ geninfo \- Generate tracefiles from GCOV coverage data files .RB [ \-\-profile .IR [ profile\-file ] ] .br +.RB [ \-\-history\-script +.IR callback ] +.br .RB [ \-\-derive\-func\-data ] .RB [ \-\-compat .IR mode =on|off|auto] @@ -652,9 +655,45 @@ basename as the .I \-\-output\-filename, with suffix .I ".json" appended. +.br +Profile data is useful if you are trying to optimize the +.B geninfo +implementation (see +.B $LCOV_ROOT/share/lcov/support-scripts/spreadsheet.py +), and can also enable faster 'geninfo --parallel' execution (see the +"\-\-history\-script" section of this man page). + +.RE + + +.B \-\-history\-script +.I callback +.br +.RS +Tell the tool to use performance data from a prior job to predict resource usage by the current job. +This may allow better segmentation to enable more balanced workloads between parallel threads - thus improving wall clock execution time. +A common source for the performance history is the +.I "previous\-profile\-data\-file" +generated by the +.I "\-\-profile" +argument. + +See a sample callback implementation in +.B $LCOV_ROOT/share/lcov/support\-scripts/history.pm +and its use in +.B $LCOV_ROOT/share/lcov/example +in the installed release (or +.B .../example +and +.B .../scripts/history.pm +in the source repository). .RE +See man +.B genhtml(1) +for more details. + .B \-\-derive\-func\-data .br @@ -1389,7 +1428,6 @@ option described below may be necessary to enable parallelism to succeed in the presence of data files which consume excessive memory in .B gcov. - Also see the .I memory, memory_percentage, max_fork_fails, fork_fail_timeout, geninfo_chunk_size and @@ -1397,6 +1435,13 @@ and entries in man .B lcovrc(5) for a description of some options which may aid in parameter tuning and performance optimization. +.br +A previously generated execution profile may help to enable better utilization +and faster parallel execution. See the +.I "\-\-profile" +and +.I "\-\-history\-script" +sections of this man page. .RE .BI "\-\-large\-file " diff --git a/man/lcov.1 b/man/lcov.1 index 6838d0a5..2f637f3b 100644 --- a/man/lcov.1 +++ b/man/lcov.1 @@ -96,6 +96,9 @@ format is described in man .RB [ \-\-version\-script .IR script_file ] .br +.RB [ \-\-history\-script +.IR callback ] +.br .RB [ \-\-comment .IR comment_string ] .br @@ -103,7 +106,7 @@ format is described in man .IR regexp ] .br .RE -.RE +RE Generate tracefile (from compiler-generated data) with all counter values set to zero: .br @@ -260,7 +263,6 @@ such that coverpoints found only in the set on the left will be retained and all .RE - Summarize tracefile content: .br @@ -704,6 +706,29 @@ basename as the .I \-\-output\-filename, with suffix .I ".json" appended. +Profile data is useful if you are trying to optimize the +.B lcov +implementation (see +.B $LCOV_ROOT/share/lcov/support\-scripts/spreadsheet.py +), and can also enable faster +.I --parallel +execution (see the +.I "\-\-history\-script" + section of this man page). + +.B \-\-history-script +.I callback +.br +.RS +Use +.I callback +to predict current runtime cost using observed cost from prior execution. + +See man +.B genhtml(1) +for more information. +.RE + .RE @@ -1723,6 +1748,12 @@ and .I fork_fail_timeout entries in man .B lcovrc(5). +A previously generated execution profile may help to enable better utilization +and faster parallel execution. See the +.I "\-\-profile" +and +.I "\-\-history" +sections of this man page. .RE .BI "\-\-memory " diff --git a/rpm/lcov.spec b/rpm/lcov.spec index 591cb163..b7d3c71b 100644 --- a/rpm/lcov.spec +++ b/rpm/lcov.spec @@ -1,6 +1,6 @@ Summary: A graphical code coverage front-end Name: lcov -Version: 2.4 +Version: 2.4.1 Release: 1 License: GPLv2+ Group: Development/Tools diff --git a/scripts/gitblame.pm b/scripts/gitblame.pm index a26b9d70..d4f2aa19 100644 --- a/scripts/gitblame.pm +++ b/scripts/gitblame.pm @@ -130,7 +130,8 @@ sub annotate (-f $pathname || -l $pathname) && -r $pathname) { $context = ':' . $context if $context; - die($self->[SCRIPT] . $context . ' expected readable file, found \'' . + die($self->[SCRIPT] . + $context . ' expected readable file, found \'' . (defined($pathname) ? $pathname : '') . "'"); } diff --git a/scripts/history.pm b/scripts/history.pm new file mode 100644 index 00000000..8de53d03 --- /dev/null +++ b/scripts/history.pm @@ -0,0 +1,196 @@ +#!/usr/bin/env perl + +# Copyright (c) MediaTek USA Inc., 2025-2026 +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or (at +# your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, see +# . +# +# history +# +# This script is used as a "--history-script history" callback for +# genhtml/lcov/geninfo. +# The intent is to reuse profile history from a prior tool execution +# to enable better load balancing and thus better performance in a +# subsequent execution. +# The basic observation is that the next regression job is very similar to +# the previous one - so we expect similar resource utilization. +# +# The 'history-script' is called as: +# +# history->new(previousProfileFile) +# +# where 'previousProfileFile' is the path to the '--profile' data generated +# in your previous regression job. +# +# The configured callback is executed as +# +# $callback->history(name) +# +# where 'name' is typically the filename seen by genhtml or lcov. +# The return value is expected to be the execution time associated +# with 'name'. +# +# This example script can be used with 'genhtml' and 'lcov --capture' +# and should work as expected if: +# - your command line is the same as the previous execution +# - file 'previousProfileFile' was generated by the previous execution, and +# - source and data files (.gdca, etc) are in the same relative locations +# as the previous execution +# +# It may be useful to dig into the format of the '--profile' data +# to implement your own callbacks. +# +# Note that history is not going to help if: +# - Individual tasks all have roughly the same resource utilization - +# there is no 'long pole' which needs to be scheduled first. +# - Random testing/tests are not reused: either test names never +# repeat, or the same name is not the same test. +# (You will need to implement your own prediction callback in that +# case...history won't help.) +# + +use strict; +use lcovutil; +use Time::HiRes; + +package history; + +our @ISA = qw(Exporter); +our @EXPORT_OK = qw(new); + +sub appendElements +{ + my ($predicted, $in, $sub) = @_; + + while (my ($name, $time) = each(%$in)) { + $name = $sub->($name) if defined($sub); + if (!exists($predicted->{$name})) { + $predicted->{$name} = [$time, 1]; + } else { + my $p = $predicted->{$name}; + $p->[0] += $time; + $p->[1] += 1; + } + } +} + +sub new +{ + my $class = shift; + my $script = shift; + my $standalone = $script eq $0; + + my $start = Time::HiRes::gettimeofday(); + # these keys should be present in the data file + # (else user has given us the wrong one) + my @verifyKeysList; + if ($lcovutil::tool_name eq 'genhtml') { + push(@verifyKeysList, 'file'); + } elsif ($lcovutil::tool_name eq 'geninfo' || + ($lcovutil::tool_name eq 'lcov' && + $main::callFromLcov) + ) { + push(@verifyKeysList, 'file', 'find'); + } + die("--history-script: JSON file argument required") + unless (@_); + # is the profile history a valid JSON profile file + my @data; + foreach my $glob (@_) { + my @glob = glob($glob); + lcovutil::ignorable_error($lcovutil::ERROR_EMPTY, + "'--history-script '$glob' does not match any files") + unless (@glob); + foreach my $history (@glob) { + eval { + my $data = JsonSupport::load($history); + for my $k (@verifyKeysList) { + die("profile key '$k' not found") + unless (exists($data->{$k})); + } + push(@data, $data); + }; + if ($@) { + $@ =~ s/.+?ERROR: //; + lcovutil::ignorable_error($lcovutil::ERROR_USAGE, + "'--history-script $script $history' is not a valid $lcovutil::tool_name profile file: $@." + ); + next; + } + } + } + unless (@data) { + lcovutil::ignorable_error($lcovutil::ERROR_USAGE, + "'--history-script $script " . + join(',', @_) . + "': profile history not found."); + return undef; + } + my %predicted; + if ($lcovutil::tool_name eq 'genhtml') { + # just keep the 'files' hash + if (1 < scalar(@data)) { + # compute average + foreach my $d (@data) { + appendElements(\%predicted, $d); + } + } else { + return bless $data[0]->{$verifyKeysList[0]}, $class; + } + } elsif (scalar(@verifyKeysList)) { + # this is geninfo..munge the data + # - there may have been multiple 'capture' calls - say, if the + # testsuite is segmented across machines. + # - the individual capture directories may have different content + # from one regression run to the next - say, if tests are + # distributed adaptively. + # Thus: the only way to have a complete history is if we merge + # the data from all of those jobs. + foreach my $d (@data) { + # read the profile data to figure out how much each file costs + my $sep = '^('; + my $pat = ''; + for my $dir (keys %{$d->{find}}) { + $pat .= $sep . $dir; + $sep = '|'; + } + $pat .= ')/'; + my $re = qr($pat); + my $sub = sub { + my $n = shift; + $n =~ s/$re//; + return $n; + }; + appendElements(\%predicted, $d->{file}, $sub); + } + } + for my $name (keys %predicted) { + my $p = $predicted{$name}; + # storing the average + $predicted{$name} = $p->[0] / $p->[1]; + } + my $now = Time::HiRes::gettimeofday(); + $lcovutil::profileData{history} = $now - $start; + + return bless \%predicted, $class; +} + +sub history +{ + my ($self, $name) = @_; + + return exists($self->{$name}) ? $self->{$name} : undef; +} + +1; diff --git a/scripts/simplify.pm b/scripts/simplify.pm index db9d8207..b14338bc 100644 --- a/scripts/simplify.pm +++ b/scripts/simplify.pm @@ -92,7 +92,18 @@ EOF # verify that the patterns are valid... lcovutil::verify_regexp_patterns($script, \@patterns); - my @munged = map({ [$_, 0]; } @patterns); + # pre-compile the regexps (to the extent that we can) + my @munged; + foreach my $p (@patterns) { + my $sep = substr($p, 1, 1); + my @s = split($sep, $p); + die("unexpected substitution pattern '$p'") + unless ($s[0] eq 's' && + $#s == 3 && + $s[3] eq 'g'); + + push(@munged, [qr/$s[1]/, $s[2], $p, 0]); + } return bless \@munged, $class; } @@ -103,13 +114,12 @@ sub simplify foreach my $p (@$self) { my $orig = $name; - # sadly, no support for pre-compiled patterns - eval "\$name =~ $p->[0] ;"; # apply pattern that user provided... - # $@ should never match: we already checked pattern validity during - # initialization - above. Still: belt and braces. - die("invalid 'simplify' regexp '$p->[0]': $@") + eval { $name =~ s/$p->[0]/$p->[1]/g; }; + # $@ should never match: we already checked pattern validity during + # initialization - above. Still: belt and braces. + die("invalid 'simplify' regexp '$p->[-2]': $@") if ($@); - ++$p->[1] + ++$p->[-1] if ($name ne $orig); } return $name; @@ -119,7 +129,7 @@ sub start { my $self = shift; foreach my $p (@$self) { - $p->[1] = 0; + $p->[-1] = 0; } } @@ -128,7 +138,7 @@ sub save my $self = shift; my @data; foreach my $p (@$self) { - push(@data, $p->[1]); + push(@data, $p->[-1]); } return \@data; } diff --git a/scripts/spreadsheet.py b/scripts/spreadsheet.py index ebb37813..fb6d6d43 100755 --- a/scripts/spreadsheet.py +++ b/scripts/spreadsheet.py @@ -24,7 +24,7 @@ def __init__(self, excelFile, files, args): # keep a list of sheets so we can insert a summary.. geninfoSheets = [] - summarySheet = s.add_worksheet("geninfo_summary") if 1 < len(files) else None + summarySheet = s.add_worksheet("capture_summary") if 1 < len(files) else None # order: order of processing # file: time to process one GCDA file @@ -41,7 +41,7 @@ def __init__(self, excelFile, files, args): # queue: time between child finish and start of merge in parent # merge: time to merge returned chunk info geninfoChunkKeys = ('work', 'chunk', 'queue', 'child', 'process', 'undump', 'merge') - geninfoSpecialKeys = ('total', 'parallel', 'filter', 'write') + geninfoSpecialKeys = ('total', 'parallel', 'filter', 'write', 'history') # keys related to filtering filterKeys = ('filt_chunk', 'filt_queue', 'filt_child', 'filt_proc', 'filt_undump', 'filt_merge', 'derive_end') @@ -166,6 +166,7 @@ def insertStats(keys, sawData, sumRow, avgRow, devRow, beginRow, endRow, col): insertConditional(sheet, avgRow, devRow, beginRow, firstCol, endRow, col) + activeSheet = None for name in files: try: with open(name) as f: @@ -179,6 +180,9 @@ def insertStats(keys, sawData, sumRow, avgRow, devRow, beginRow, endRow, col): try: tool = data['config']['tool'] + if (tool == 'lcov' and + -1 != data['config']['cmdLine'].find('--call-from-lcov')): + tool = 'geninfo' except: tool = 'unknown' print("%s: unknown tool" %(name)) @@ -198,6 +202,8 @@ def insertStats(keys, sawData, sumRow, avgRow, devRow, beginRow, endRow, col): for i in range(1000): try: sheet = s.add_worksheet(sn[-31:]) + if activeSheet == None: + activeSheet = sheet break except: sn = sheetname + "_" + str(i) @@ -217,7 +223,7 @@ def insertStats(keys, sawData, sumRow, avgRow, devRow, beginRow, endRow, col): for n in sorted(data['config'].keys()): try: sheet.write_string(row, 1, n) - if n in ("tool", 'date', ): + if n in ("tool", 'cmdLine', 'date', ): sheet.write_string(row, 2, data['config'][n]) else: sheet.write_number(row, 2, data['config'][n], intFormat) @@ -266,7 +272,7 @@ def insertStats(keys, sawData, sumRow, avgRow, devRow, beginRow, endRow, col): effectiveParallelism += sep + xl_rowcol_to_cell(row, 2) sep = "+" except: - print("%s: failed to write %s for lcov[seg %d][%s]" % ( + print("Warning: %s: unable to write %s for lcov[seg %d][%s]" % ( name, str(d[k]) if k in d else "??", seg, k)) row += 1 begin = row @@ -280,10 +286,10 @@ def insertStats(keys, sawData, sumRow, avgRow, devRow, beginRow, endRow, col): try: sheet.write_number(row, 3, float(d2[f]), twoDecimal) except: - print("%s: failed to write %s for lcov[seg %d][%s][$s]" % (name, str(d2[f]), seg, k, f)) + print("Warning: %s: unable to write %s for lcov[seg %d][%s][$s]" % (name, str(d2[f]), seg, k, f)) row += 1 except: - print("%s: failed to write %s for lcov[seg %d]" % (name, k, seg)) + print("Warning: %s: unable to write %s for lcov[seg %d]" % (name, k, seg)) effectiveParallelism += ")/%(total)s" % { 'total': total, } @@ -300,7 +306,7 @@ def insertStats(keys, sawData, sumRow, avgRow, devRow, beginRow, endRow, col): val = data[k] sheet.write_number(row, 2, float(val), twoDecimal) except: - print("%s: failed to write %s for lcov[%s]" % (name, str(val), k)) + print("Warning: %s: unable to write %s for lcov[%s]" % (name, str(val), k)) row += 1 for k in ('parse', 'append'): try: @@ -311,10 +317,10 @@ def insertStats(keys, sawData, sumRow, avgRow, devRow, beginRow, endRow, col): try: sheet.write_number(row, 3, float(d2[f]), twoDecimal) except: - print("%s: failed to write %s for lcov[%s][$s]" % (name, str(d2[f]), k, f)) + print("Warning: %s: unable to write %s for lcov[%s][$s]" % (name, str(d2[f]), k, f)) row += 1 except: - print("%s: failed to find key '%s'" %(name, k)) + print("Warning: %s: failed to find key '%s'" %(name, k)) # go on to the next file continue @@ -557,11 +563,15 @@ def cmpFile(a, b): elif tool == 'genhtml': for k in ('parse_source', 'parse_diff', - 'parse_current', 'parse_baseline'): + 'parse_current', 'parse_baseline', + 'history'): if k in data: - sheet.write_string(row, 0, k) - sheet.write_number(row, 1, data[k], twoDecimal) - row += 1 + try: + sheet.write_string(row, 0, k) + sheet.write_number(row, 1, data[k], twoDecimal) + row += 1 + except: + pass # 'history' key might not be there # total: time from start to end of the particular unit - # child: time from start to end of child process @@ -625,7 +635,7 @@ def printDataRow(name): else: sawData[k] = 1 except: - print("%s: failed to write %s" %(name, data[k][name])) + print("Warning: %s: unable to write %s" %(name, data[k][name])) col += 1 def visitScope(f): @@ -674,7 +684,7 @@ def visitScope(f): try: sheet.write_number(row, 2, float(d[n]), twoDecimal) except: - print("%s: failed to write %s for [%s][%s]" %(name, str(d[n]), k, n)) + print("Warning: %s: unable to write %s for [%s][%s]" %(name, str(d[n]), k, n)) row += 1; continue elif k in ('config', 'overall', 'total'): @@ -684,6 +694,7 @@ def visitScope(f): if summarySheet: if len(geninfoSheets) < 2: + activeSheet.activate() summarySheet.hide() # insert the average and variance data... diff --git a/tests/gendiffcov/errs/msgtest.sh b/tests/gendiffcov/errs/msgtest.sh index f2f8ea54..dd0d9430 100755 --- a/tests/gendiffcov/errs/msgtest.sh +++ b/tests/gendiffcov/errs/msgtest.sh @@ -4,7 +4,7 @@ set +x source ../../common.tst rm -f test.cpp *.gcno *.gcda a.out *.info *.log *.json diff.txt loop*.rc markers.err* readThis.rc testing.rc -rm -rf select criteria annotate empty unused_src scriptErr scriptFixed epoch inconsistent highlight etc mycache cacheFail expect subset context labels sortTables simplify_* simplify missingRestore selectErr1 selectErr2 selectErr3 +rm -rf select criteria annotate empty unused_src scriptErr scriptFixed epoch inconsistent highlight etc mycache cacheFail expect subset context labels sortTables simplify_* simplify missingRestore selectErr1 selectErr2 selectErr3 mcdc clean_cover @@ -23,6 +23,7 @@ GITBLAME_SCRIPT=$SCRIPT_DIR/gitblame.pm GITVERSION_SCRIPT=$SCRIPT_DIR/gitversion.pm P4VERSION_SCRIPT=$SCRIPT_DIR/P4version.pm SIMPLIFY_SCRIPT=$SCRIPT_DIR/simplify.pm +HISTORY_SCRIPT=$SCRIPT_DIR/history.pm if [ 1 == "$USE_GIT" ] ; then # this is git @@ -222,13 +223,7 @@ for missing in noSuchFile missingDirectory/nofile ; do fi fi grep "cannot read configuration file '$missing'" err_missing.log - FOUND=$? - if [ 0 != $FOUND ] ; then - # look for alternate message found with some perl versions...see #450 - grep "config file '$missing' does not exist" err_missing.log - FOUND=$? - fi - if [ 0 != $FOUND ] ; then + if [ 0 != $? ] ; then echo "ERROR: missing config file '$missing' message" if [ 0 == $KEEP_GOING ] ; then exit 1 @@ -241,7 +236,7 @@ echo "message_log = message_file.log" > testing.rc echo "config_file = testing.rc" > readThis.rc echo lcov $LCOV_OPTS --summary initial.info --config-file readThis.rc $COVER $LCOV_TOOL $LCOV_OPTS --summary initial.info --config-file readThis.rc -if [ ! == ${PIPESTATUS[0]} ] ; then +if [ 0 != $? ] ; then echo "ERROR: didn't read config file" if [ 0 == $KEEP_GOING ] ; then exit 1 @@ -428,7 +423,7 @@ for arg in "--annotate-script $ANNOTATE_SCRIPT --select-script $SELECT_SCRIPT,-- fi # run again without error echo genhtml $DIFCOV_OPTS initial.info -o scriptFixed ${arg} - $COVER $GENHTML_TOOL $DIFFCOV_OPTS initial.info -o scriptFixed ${arg} --ignore annotate 2>&1 | tee script_err.log + $COVER $GENHTML_TOOL $DIFFCOV_OPTS initial.info -o scriptFixed ${arg} --ignore annotate --profile 2>&1 | tee script_err.log if [ 0 != ${PIPESTATUS[0]} ] ; then echo "ERROR: genhtml scriptFixed failed" if [ 0 == $KEEP_GOING ] ; then @@ -971,8 +966,60 @@ if [ 0 != $? ] ; then fi fi +# test profile history fails +for f in noFile initial.info ; do + $COVER $GENHTML_TOOL $DIFFCOV_OPTS initial.info -o history --parallel --history $HISTORY_SCRIPT,$f 2>&1 | tee history.log + if [ 0 == ${PIPESTATUS[0]} ] ; then + echo "ERROR: genhtml --history $f didn't fail" + if [ 0 == $KEEP_GOING ] ; then + exit 1 + fi + fi + grep -E "ERROR.*usage.*--history.* is not a valid genhtml profile file" history.log + if [ 0 != $? ] ; then + echo "ERROR: didn't find expected --history message" + if [ 0 == $KEEP_GOING ] ; then + exit 1 + fi + fi +done + +# wrong profile type +$COVER $GENINFO_TOOL $LCOV_OPTS . -o profileTest.info --parallel --history $HISTORY_SCRIPT,scriptFixed.info.json 2>&1 | tee geninfo_history.log +if [ 0 == ${PIPESTATUS[0]} ] ; then + echo "ERROR: geninfo --history scriptFixed.info.json didn't fail" + if [ 0 == $KEEP_GOING ] ; then + exit 1 + fi +fi +grep -E "ERROR.*usage.*--history.* is not a valid geninfo profile file" geninfo_history.log +if [ 0 != $? ] ; then + echo "ERROR: didn't find expected geninfo --history message" + if [ 0 == $KEEP_GOING ] ; then + exit 1 + fi +fi + +# ignore the wrong history message - also need to ignore the resulting +# 'package' error when the callback can't be installed +$COVER $GENINFO_TOOL $LCOV_OPTS . -o profileTest.info --parallel --history $HISTORY_SCRIPT,scriptFixed.info.json --ignore usage,package 2>&1 | tee geninfo_history_ignore.log +if [ 0 != ${PIPESTATUS[0]} ] ; then + echo "ERROR: geninfo --history --ignore failed" + if [ 0 == $KEEP_GOING ] ; then + exit 1 + fi +fi +grep -E "WARNING.*usage.*--history.* profile history not found" geninfo_history_ignore.log +if [ 0 != $? ] ; then + echo "ERROR: didn't find expected geninfo --history --ignore message" + if [ 0 == $KEEP_GOING ] ; then + exit 1 + fi +fi + + if [ "$ENABLE_MCDC" != 1 ] ; then - $COVER $GENINFO_TOOL . -o mccd --mcdc-coverage $LCOV_OPTS --msg-log mcdc_errs.log + $COVER $GENINFO_TOOL . -o mcdc --mcdc-coverage $LCOV_OPTS --msg-log mcdc_errs.log if [ 0 == $? ] ; then echo "ERROR: no error for unsupported MC/DC" if [ 0 == $KEEP_GOING ] ; then diff --git a/tests/gendiffcov/simple/script.sh b/tests/gendiffcov/simple/script.sh index 72e11515..65bc20d8 100755 --- a/tests/gendiffcov/simple/script.sh +++ b/tests/gendiffcov/simple/script.sh @@ -26,6 +26,7 @@ echo * CRITERIA=${SCRIPT_DIR}/criteria SELECT=${SCRIPT_DIR}/select.pm +HISTORY=${SCRIPT_DIR}/history.pm #PARALLEL='' #PROFILE="'' @@ -248,7 +249,7 @@ gzip -c baseline_nobranch.info > baseline_nobranch.info.gz #genhtml baseline.info --output-directory ./baseline echo genhtml $DIFFCOV_OPTS baseline_orig.info --output-directory ./baseline $IGNORE --rc memory_percentage=50 --serialize ./baseline/coverage.dat -$COVER $GENHTML_TOOL $DIFFCOV_OPTS baseline_orig.info --output-directory ./baseline --save $IGNORE --rc memory_percentage=50 --serialize ./baseline/coverage.dat +$COVER $GENHTML_TOOL $DIFFCOV_OPTS baseline_orig.info --output-directory ./baseline --save $IGNORE --rc memory_percentage=50 --serialize ./baseline/coverage.dat --profile if [ 0 != $? ] ; then echo "ERROR: genhtml baseline failed" status=1 @@ -279,7 +280,7 @@ fi gzip -c baseline-filter.info > baseline-filter.info.gz #genhtml baseline.info --output-directory ./baseline echo genhtml $DIFFCOV_OPTS baseline-filter.info --output-directory ./baseline-filter $IGNORE --missed -$COVER $GENHTML_TOOL $DIFFCOV_OPTS baseline-filter.info --output-directory ./baseline-filter $IGNORE --missed +$COVER $GENHTML_TOOL $DIFFCOV_OPTS baseline-filter.info --output-directory ./baseline-filter $IGNORE --missed --profile --history $HISTORY,baseline/genhtml.json,baseline/genhtml.json if [ 0 != $? ] ; then echo "ERROR: genhtml baseline-filter failed" status=1 @@ -290,7 +291,7 @@ fi #genhtml baseline.info --dark --output-directory ./baseline echo genhtml $DIFFCOV_OPTS --dark baseline-filter.info --output-directory ./baseline-filter-dark $IGNORE -$COVER $GENHTML_TOOL $DIFFCOV_OPTS --dark baseline-filter.info --output-directory ./baseline-filter-dark $IGNORE +$COVER $GENHTML_TOOL $DIFFCOV_OPTS --dark baseline-filter.info --output-directory ./baseline-filter-dark $IGNORE --history $HISTORY,baseline-filter/genhtml.json if [ 0 != $? ] ; then echo "ERROR: genhtml baseline-filter-dark failed" status=1 diff --git a/tests/lcov/add/Makefile b/tests/lcov/add/Makefile index d9e5359c..4733f40e 100644 --- a/tests/lcov/add/Makefile +++ b/tests/lcov/add/Makefile @@ -8,4 +8,4 @@ TESTS := zero.sh zero2.sh full.sh full2.sh prune.sh track.sh DISABLED: part.sh part2.sh concatenated4.sh clean: - rm -f *.info prune prune2 prune3 track + rm -f *.info prune prune2 prune3 track prune3s prune3t diff --git a/tests/lcov/add/prune.sh b/tests/lcov/add/prune.sh index 7ad38ccd..8c22f4c6 100755 --- a/tests/lcov/add/prune.sh +++ b/tests/lcov/add/prune.sh @@ -4,6 +4,8 @@ set +x source ../../common.tst +rm -f prune3s prune3t prune3 + if [[ 1 == $CLEAN_ONLY ]] ; then exit 0 fi diff --git a/tests/lcov/demangle/demangle.sh b/tests/lcov/demangle/demangle.sh index fdf82bd2..d5e9705f 100755 --- a/tests/lcov/demangle/demangle.sh +++ b/tests/lcov/demangle/demangle.sh @@ -59,7 +59,7 @@ for k in FNA ; do done # see if we can "simplify" the function names.. -for callback in './simplify.pl' "${SIMPLIFY_SCRIPT},--sep,;,--re,s/Animal::Animal/subst1/;s/Cat::Cat/subst2/;s/subst2/subst3/" "${SIMPLIFY_SCRIPT},--file,simplify.cmd" ; do +for callback in './simplify.pl' "${SIMPLIFY_SCRIPT},--sep,;,--re,s/Animal::Animal/subst1/g;s/Cat::Cat/subst2/g;s/subst2/subst3/g" "${SIMPLIFY_SCRIPT},--file,simplify.cmd" ; do $COVER $GENHTML_TOOL --branch $PARLLEL $PROFILE -o simplify demangle.info --flat --simplify $callback if [ $? != 0 ] ; then @@ -90,23 +90,23 @@ done # test unused regexp in simplify callback for PAR in '' '--parallel' ; do - $COVER $GENHTML_TOOL --branch $PARLLEL $PROFILE -o simplify demangle.info --flat --simplify "${SIMPLIFY_SCRIPT},--sep,;,--re,s/Animal::Animal/subst1/;s/Cat::Cat/subst2/;s/subst2/subst3/;s/foo/bar/" $PAR 2>&1 | tee simplifyErr.log + $COVER $GENHTML_TOOL --branch $PARLLEL $PROFILE -o simplify demangle.info --flat --simplify "${SIMPLIFY_SCRIPT},--sep,;,--re,s/Animal::Animal/subst1/g;s/Cat::Cat/subst2/g;s/subst2/subst3/g;s/foo/bar/g" $PAR 2>&1 | tee simplifyErr.log if [ ${PIPESTATUS[0]} == 0 ] ; then echo "genhtml --simplify unused regexp didn't fail" exit 1 fi - grep "'simplify' pattern 's/foo/bar/' is unused" simplifyErr.log + grep "'simplify' pattern 's/foo/bar/g' is unused" simplifyErr.log if [ $? != 0 ] ; then echo "didn't find expected unused error" exit 1 fi - $COVER $GENHTML_TOOL --branch $PARLLEL $PROFILE -o simplify demangle.info --flat --simplify "${SIMPLIFY_SCRIPT},--sep,;,--re,s/Animal::Animal/subst1/;s/Cat::Cat/subst2/;s/subst2/subst3/;s/foo/bar/" $PAR --ignore unused 2>&1 | tee simplifyWarn.log + $COVER $GENHTML_TOOL --branch $PARLLEL $PROFILE -o simplify demangle.info --flat --simplify "${SIMPLIFY_SCRIPT},--sep,;,--re,s/Animal::Animal/subst1/g;s/Cat::Cat/subst2/g;s/subst2/subst3/g;s/foo/bar/g" $PAR --ignore unused 2>&1 | tee simplifyWarn.log if [ ${PIPESTATUS[0]} != 0 ] ; then echo "genhtml --simplify unused regexp warn didn't pass" exit 1 fi - grep "'simplify' pattern 's/foo/bar/' is unused" simplifyWarn.log + grep "'simplify' pattern 's/foo/bar/g' is unused" simplifyWarn.log if [ $? != 0 ] ; then echo "didn't find expected unused error" exit 1 diff --git a/tests/lcov/demangle/simplify.cmd b/tests/lcov/demangle/simplify.cmd index 8a628285..809f0834 100644 --- a/tests/lcov/demangle/simplify.cmd +++ b/tests/lcov/demangle/simplify.cmd @@ -1,6 +1,6 @@ # test the 'simplify.pm --file ...' option -s/Animal::Animal/subst1/ -s/Cat::Cat/subst2/ -s/subst2/subst3/ +s/Animal::Animal/subst1/g +s/Cat::Cat/subst2/g +s/subst2/subst3/g diff --git a/tests/lcov/errs/errs.sh b/tests/lcov/errs/errs.sh index 16e0831a..0fe0e55a 100755 --- a/tests/lcov/errs/errs.sh +++ b/tests/lcov/errs/errs.sh @@ -3,7 +3,7 @@ set +x source ../../common.tst -rm -f *.log *.json dumper* *.out +rm -f *.log *.json dumper* *.out emptyDir.info rm -rf emptyDir clean_cover diff --git a/tests/lcov/extract/extract.sh b/tests/lcov/extract/extract.sh index fe61beb5..f69b44f3 100755 --- a/tests/lcov/extract/extract.sh +++ b/tests/lcov/extract/extract.sh @@ -64,7 +64,7 @@ if [ "${VER[0]}" -lt 8 ] ; then fi if [ 1 != "$NO_INITIAL_CAPTURE" ] ; then - $COVER $CAPTURE . $LCOV_OPTS --initial -o initial.info $IGNORE_EMPTY $IGNORE_USAGE + $COVER $CAPTURE . $LCOV_OPTS --initial -o initial.info $IGNORE_EMPTY $IGNORE_USAGE --profile if [ 0 != $? ] ; then echo "Error: unexpected error code from lcov --initial" if [ $KEEP_GOING == 0 ] ; then @@ -81,7 +81,7 @@ fi if [ "$NO_INITIAL_CAPTURE" != 1 ] ; then # capture 'all' - which will pick up the unused file - $COVER $CAPTURE . $LCOV_OPTS --all -o all_initial.info $IGNORE_EMPTY $IGNORE_USAGE + $COVER $CAPTURE . $LCOV_OPTS --all -o all_initial.info $IGNORE_EMPTY $IGNORE_USAGE --history $SCRIPT_DIR/history.pm,initial.info.json --profile if [ 0 != $? ] ; then echo "Error: unexpected error code from lcov --capture --all" if [ $KEEP_GOING == 0 ] ; then @@ -105,7 +105,8 @@ if [ 0 != $? ] ; then exit 1 fi -$COVER $CAPTURE . $LCOV_OPTS -o external.info $FILTER $IGNORE +# test an empty/trivial history callback +$COVER $CAPTURE . $LCOV_OPTS -o external.info $FILTER $IGNORE --profile --histor ./history.sh if [ 0 != $? ] ; then echo "Error: unexpected error code from lcov --capture" if [ $KEEP_GOING == 0 ] ; then @@ -130,7 +131,7 @@ fi # callback tests echo $COVER $CAPTURE . $LCOV_OPTS -o callback.info $FILTER $IGNORE --criteria $SCRIPT_DIR/threshold.pm,--line,90,--branch,65,--function,100 -$COVER $CAPTURE . $LCOV_OPTS -o callback.info $FILTER $IGNORE --criteria $SCRIPT_DIR/threshold.pm,--line,90,--branch,65,--function,100 2>&1 | tee callback_fail.log +$COVER $CAPTURE . $LCOV_OPTS -o callback.info $FILTER $IGNORE --criteria $SCRIPT_DIR/threshold.pm,--line,90,--branch,65,--function,100 --history $SCRIPT_DIR/history.pm,external.info.json 2>&1 | tee callback_fail.log if [ 0 == ${PIPESTATUS[0]} ] ; then echo "Error: expected criteria fail from lcov --capture - but not found" if [ $KEEP_GOING == 0 ] ; then @@ -144,8 +145,8 @@ if [ 0 != $? ] ; then exit 1 fi fi -echo $COVER $CAPTURE . $LCOV_OPTS -o callback2.info $FILTER $IGNORE --criteria $SCRIPT_DIR/threshold.pm,--line,20 -$COVER $CAPTURE . $LCOV_OPTS -o callback2.info $FILTER $IGNORE --criteria $SCRIPT_DIR/threshold.pm,--line,20 +echo $COVER $CAPTURE . $LCOV_OPTS -o callback2.info $FILTER $IGNORE --criteria $SCRIPT_DIR/threshold.pm,--line,20 --history $SCRIPT_DIR/history.pm,external.info.json,callback.info.json +$COVER $CAPTURE . $LCOV_OPTS -o callback2.info $FILTER $IGNORE --criteria $SCRIPT_DIR/threshold.pm,--line,20 --history $SCRIPT_DIR/history.pm,external.info.json,callback.info.json if [ 0 != $? ] ; then echo "Error: expected criteria pass from lcov --capture - but failed" if [ $KEEP_GOING == 0 ] ; then @@ -153,7 +154,6 @@ if [ 0 != $? ] ; then fi fi - echo $COVER $LCOV_TOOL $LCOV_OPTS -o aggregata.info -a callback.info $FILTER $IGNORE --criteria $SCRIPT_DIR/threshold.pm,--line,90,--branch,65,--function,100 $COVER $LCOV_TOOL $LCOV_OPTS -o aggregata.info -a callback.info $FILTER $IGNORE --criteria $SCRIPT_DIR/threshold.pm,--line,90,--branch,65,--function,100 2>&1 | tee callback_fail2.log if [ 0 == ${PIPESTATUS[0]} ] ; then diff --git a/tests/lcov/extract/history.sh b/tests/lcov/extract/history.sh new file mode 100755 index 00000000..efce51d4 --- /dev/null +++ b/tests/lcov/extract/history.sh @@ -0,0 +1,2 @@ +#!/bin/sh +echo '' diff --git a/tests/llvm2lcov/llvm2lcov.sh b/tests/llvm2lcov/llvm2lcov.sh index 9b4d279e..03bf79f4 100755 --- a/tests/llvm2lcov/llvm2lcov.sh +++ b/tests/llvm2lcov/llvm2lcov.sh @@ -19,6 +19,12 @@ fi LCOV_OPTS="--branch-coverage $PARALLEL $PROFILE" +which clang +if [ $? != 0 ] ; then + echo "clang unavailable - skipping test" + exit 0 +fi + IFS='.' read -r -a LLVM_VER <<< `clang -dumpversion` if [ "${LLVM_VER[0]}" -ge 18 ] ; then ENABLE_MCDC=1