Browse Source
Instead of using lcov -r (which is extremely slow), first use a python script to perform bulk cleanup of the /usr/include/* coverage. Then use lcov -a to remove the duplicate entries. This has the same effect of lcov -r but runs significantly faster0.15
Andrew Chow
8 years ago
2 changed files with 32 additions and 4 deletions
@ -0,0 +1,24 @@
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env python3 |
||||
|
||||
import argparse |
||||
|
||||
parser = argparse.ArgumentParser(description='Remove the coverage data from a tracefile for all files matching the pattern.') |
||||
parser.add_argument('pattern', help='the pattern of files to remove') |
||||
parser.add_argument('tracefile', help='the tracefile to remove the coverage data from') |
||||
parser.add_argument('outfile', help='filename for the output to be written to') |
||||
|
||||
args = parser.parse_args() |
||||
tracefile = args.tracefile |
||||
pattern = args.pattern |
||||
outfile = args.outfile |
||||
|
||||
in_remove = False |
||||
with open(tracefile, 'r') as f: |
||||
with open(outfile, 'w') as wf: |
||||
for line in f: |
||||
if line.startswith("SF:") and pattern in line: |
||||
in_remove = True |
||||
if not in_remove: |
||||
wf.write(line) |
||||
if line == 'end_of_record\n': |
||||
in_remove = False |
Loading…
Reference in new issue