1 /**
2 Copyright: Copyright (c) 2017, Joakim Brännström. All rights reserved.
3 License: MPL-2
4 Author: Joakim Brännström (joakim.brannstrom@gmx.com)
5 
6 This Source Code Form is subject to the terms of the Mozilla Public License,
7 v.2.0. If a copy of the MPL was not distributed with this file, You can obtain
8 one at http://mozilla.org/MPL/2.0/.
9 
10 #SPC-analyzer
11 
12 TODO cache the checksums. They are *heavy*.
13 */
14 module dextool.plugin.mutate.backend.analyze;
15 
16 import logger = std.experimental.logger;
17 import std.algorithm : map, filter, joiner, cache;
18 import std.array : array, appender, empty;
19 import std.concurrency;
20 import std.datetime : dur;
21 import std.exception : collectException;
22 import std.parallelism;
23 import std.range : tee, enumerate;
24 import std.typecons;
25 
26 import colorlog;
27 import my.set;
28 
29 import dextool.compilation_db : CompileCommandFilter, defaultCompilerFlagFilter,
30     CompileCommandDB, ParsedCompileCommandRange, ParsedCompileCommand;
31 import dextool.plugin.mutate.backend.analyze.internal : Cache, TokenStream;
32 import dextool.plugin.mutate.backend.database : Database, LineMetadata, MutationPointEntry2;
33 import dextool.plugin.mutate.backend.database.type : MarkedMutant;
34 import dextool.plugin.mutate.backend.diff_parser : Diff;
35 import dextool.plugin.mutate.backend.interface_ : ValidateLoc, FilesysIO;
36 import dextool.plugin.mutate.backend.report.utility : statusToString, Table;
37 import dextool.plugin.mutate.backend.utility : checksum, trustedRelativePath,
38     Checksum, getProfileResult, Profile;
39 import dextool.plugin.mutate.config : ConfigCompiler, ConfigAnalyze;
40 import dextool.type : ExitStatusType, AbsolutePath, Path;
41 
42 version (unittest) {
43     import unit_threaded.assertions;
44 }
45 
46 /** Analyze the files in `frange` for mutations.
47  */
48 ExitStatusType runAnalyzer(ref Database db, ConfigAnalyze conf_analyze, ConfigCompiler conf_compiler,
49         ParsedCompileCommandRange frange, ValidateLoc val_loc, FilesysIO fio) @trusted {
50     import dextool.plugin.mutate.backend.diff_parser : diffFromStdin, Diff;
51 
52     auto fileFilter = () {
53         try {
54             return FileFilter(fio.getOutputDir, conf_analyze.unifiedDiffFromStdin,
55                     conf_analyze.unifiedDiffFromStdin ? diffFromStdin : Diff.init);
56         } catch (Exception e) {
57             logger.info(e.msg);
58             logger.warning("Unable to parse diff");
59         }
60         return FileFilter.init;
61     }();
62 
63     auto pool = () {
64         if (conf_analyze.poolSize == 0)
65             return new TaskPool();
66         return new TaskPool(conf_analyze.poolSize);
67     }();
68 
69     // will only be used by one thread at a time.
70     auto store = spawn(&storeActor, cast(shared)&db, cast(shared) fio.dup,
71             conf_analyze.prune, conf_analyze.fastDbStore,
72             conf_analyze.poolSize, conf_analyze.forceSaveAnalyze);
73 
74     int taskCnt;
75     Set!AbsolutePath alreadyAnalyzed;
76     // dfmt off
77     foreach (f; frange
78             // The tool only supports analyzing a file one time.
79             // This optimize it in some cases where the same file occurs
80             // multiple times in the compile commands database.
81             .filter!(a => a.cmd.absoluteFile !in alreadyAnalyzed)
82             .tee!(a => alreadyAnalyzed.add(a.cmd.absoluteFile))
83             .cache
84             .filter!(a => !isPathInsideAnyRoot(conf_analyze.exclude, a.cmd.absoluteFile))
85             .filter!(a => fileFilter.shouldAnalyze(a.cmd.absoluteFile))) {
86         try {
87             pool.put(task!analyzeActor(f, val_loc.dup, fio.dup, conf_compiler, conf_analyze, store));
88             taskCnt++;
89         } catch (Exception e) {
90             logger.trace(e);
91             logger.warning(e.msg);
92         }
93     }
94     // dfmt on
95 
96     // inform the store actor of how many analyse results it should *try* to
97     // save.
98     send(store, AnalyzeCntMsg(taskCnt));
99     // wait for all files to be analyzed
100     pool.finish(true);
101     // wait for the store actor to finish
102     receiveOnly!StoreDoneMsg;
103 
104     if (conf_analyze.profile)
105         try {
106             import std.stdio : writeln;
107 
108             writeln(getProfileResult.toString);
109         } catch (Exception e) {
110             logger.warning("Unable to print the profile data: ", e.msg).collectException;
111         }
112 
113     return ExitStatusType.Ok;
114 }
115 
116 @safe:
117 
118 /** Filter function for files. Either all or those in stdin.
119  *
120  * The matching ignores the file extension in order to lessen the problem of a
121  * file that this approach skip headers because they do not exist in
122  * `compile_commands.json`. It means that e.g. "foo.hpp" would match `true` if
123  * `foo.cpp` is in `compile_commands.json`.
124  */
125 struct FileFilter {
126     import std.path : stripExtension;
127 
128     Set!string files;
129     bool useFileFilter;
130     AbsolutePath root;
131 
132     this(AbsolutePath root, bool fromStdin, Diff diff) {
133         this.root = root;
134         this.useFileFilter = fromStdin;
135         foreach (a; diff.toRange(root)) {
136             files.add(a.key.stripExtension);
137         }
138     }
139 
140     bool shouldAnalyze(AbsolutePath p) {
141         import std.path : relativePath;
142 
143         if (!useFileFilter) {
144             return true;
145         }
146 
147         return relativePath(p, root).stripExtension in files;
148     }
149 }
150 
151 /// Number of analyze tasks that has been spawned that the `storeActor` should wait for.
152 struct AnalyzeCntMsg {
153     int value;
154 }
155 
156 struct StoreDoneMsg {
157 }
158 
159 /// Start an analyze of a file
160 void analyzeActor(ParsedCompileCommand fileToAnalyze, ValidateLoc vloc, FilesysIO fio,
161         ConfigCompiler compilerConf, ConfigAnalyze analyzeConf, Tid storeActor) @trusted nothrow {
162     auto profile = Profile("analyze file " ~ fileToAnalyze.cmd.absoluteFile);
163 
164     try {
165         auto analyzer = Analyze(vloc, fio,
166                 Analyze.Config(compilerConf.forceSystemIncludes, analyzeConf.mutantsPerSchema));
167         analyzer.process(fileToAnalyze);
168         send(storeActor, cast(immutable) analyzer.result);
169         return;
170     } catch (Exception e) {
171     }
172 
173     // send a dummy result
174     try {
175         send(storeActor, cast(immutable) new Analyze.Result);
176     } catch (Exception e) {
177     }
178 }
179 
180 /// Store the result of the analyze.
181 void storeActor(scope shared Database* dbShared, scope shared FilesysIO fioShared,
182         const bool prune, const bool fastDbStore, const long poolSize, const bool forceSave) @trusted nothrow {
183     import cachetools : CacheLRU;
184     import dextool.cachetools : nullableCache;
185     import dextool.plugin.mutate.backend.database : LineMetadata, FileId, LineAttr, NoMut;
186 
187     Database* db = cast(Database*) dbShared;
188     FilesysIO fio = cast(FilesysIO) fioShared;
189 
190     // A file is at most saved one time to the database.
191     Set!AbsolutePath savedFiles;
192 
193     auto getFileId = nullableCache!(string, FileId, (string p) => db.getFileId(p.Path))(256,
194             30.dur!"seconds");
195     auto getFileDbChecksum = nullableCache!(string, Checksum,
196             (string p) => db.getFileChecksum(p.Path))(256, 30.dur!"seconds");
197     auto getFileFsChecksum = nullableCache!(string, Checksum, (string p) {
198         return checksum(fio.makeInput(AbsolutePath(Path(p))).content[]);
199     })(256, 30.dur!"seconds");
200 
201     static struct Files {
202         Checksum[Path] value;
203 
204         this(ref Database db) {
205             foreach (a; db.getDetailedFiles) {
206                 value[a.file] = a.fileChecksum;
207             }
208         }
209     }
210 
211     void save(immutable Analyze.Result result) {
212         // mark files that have an unchanged checksum as "already saved"
213         foreach (f; result.idFile
214                 .byKey
215                 .filter!(a => a !in savedFiles)
216                 .filter!(a => getFileDbChecksum(fio.toRelativeRoot(a)) == getFileFsChecksum(a)
217                     && !forceSave)) {
218             logger.info("Unchanged ".color(Color.yellow), f);
219             savedFiles.add(f);
220         }
221 
222         // only saves mutation points to a file one time.
223         {
224             auto app = appender!(MutationPointEntry2[])();
225             foreach (mp; result.mutationPoints
226                     .map!(a => tuple!("data", "file")(a, fio.toAbsoluteRoot(a.file)))
227                     .filter!(a => a.file !in savedFiles)) {
228                 app.put(mp.data);
229             }
230             foreach (f; result.idFile.byKey.filter!(a => a !in savedFiles)) {
231                 logger.info("Saving ".color(Color.green), f);
232                 const relp = fio.toRelativeRoot(f);
233                 db.removeFile(relp);
234                 const info = result.infoId[result.idFile[f]];
235                 db.put(relp, info.checksum, info.language);
236                 savedFiles.add(f);
237             }
238             db.put(app.data, fio.getOutputDir);
239         }
240 
241         foreach (s; result.schematas.enumerate) {
242             try {
243                 auto mutants = result.schemataMutants[s.index].map!(
244                         a => db.getMutationStatusId(a.value))
245                     .filter!(a => !a.isNull)
246                     .map!(a => a.get)
247                     .array;
248                 if (!mutants.empty && !s.value.empty) {
249                     const id = db.putSchemata(result.schemataChecksum[s.index], s.value, mutants);
250                     logger.trace(!id.isNull, "Saving schemata ", id.get.value);
251                 }
252             } catch (Exception e) {
253                 logger.trace(e.msg);
254                 logger.warning("Unable to save schemata ", s.index).collectException;
255             }
256         }
257 
258         {
259             Set!long printed;
260             auto app = appender!(LineMetadata[])();
261             foreach (md; result.metadata) {
262                 // transform the ID from local to global.
263                 const fid = getFileId(fio.toRelativeRoot(result.fileId[md.id]));
264                 if (fid.isNull && !printed.contains(md.id)) {
265                     printed.add(md.id);
266                     logger.warningf("File with suppressed mutants (// NOMUT) not in the database: %s. Skipping...",
267                             result.fileId[md.id]).collectException;
268                 } else if (!fid.isNull) {
269                     app.put(LineMetadata(fid.get, md.line, md.attr));
270                 }
271             }
272             db.put(app.data);
273         }
274     }
275 
276     // listen for results from workers until the expected number is processed.
277     void recv() {
278         auto profile = Profile("updating files");
279         logger.info("Updating files");
280 
281         int resultCnt;
282         Nullable!int maxResults;
283         bool running = true;
284 
285         while (running) {
286             try {
287                 receive((AnalyzeCntMsg a) { maxResults = a.value; }, (immutable Analyze.Result a) {
288                     resultCnt++;
289                     save(a);
290                 },);
291             } catch (Exception e) {
292                 logger.trace(e).collectException;
293                 logger.warning(e.msg).collectException;
294             }
295 
296             if (!maxResults.isNull && resultCnt >= maxResults.get) {
297                 running = false;
298             }
299         }
300     }
301 
302     void pruneFiles() {
303         import std.path : buildPath;
304 
305         auto profile = Profile("prune files");
306 
307         logger.info("Pruning the database of dropped files");
308         auto files = db.getFiles.map!(a => fio.toAbsoluteRoot(a)).toSet;
309 
310         foreach (f; files.setDifference(savedFiles).toRange) {
311             logger.info("Removing ".color(Color.red), f);
312             db.removeFile(fio.toRelativeRoot(f));
313         }
314     }
315 
316     void fastDbOn() {
317         if (!fastDbStore)
318             return;
319         logger.info(
320                 "Turning OFF sqlite3 synchronization protection to improve the write performance");
321         logger.warning("Do NOT interrupt dextool in any way because it may corrupt the database");
322         db.run("PRAGMA synchronous = OFF");
323         db.run("PRAGMA journal_mode = MEMORY");
324     }
325 
326     void fastDbOff() {
327         if (!fastDbStore)
328             return;
329         db.run("PRAGMA synchronous = ON");
330         db.run("PRAGMA journal_mode = DELETE");
331     }
332 
333     try {
334         import dextool.plugin.mutate.backend.test_mutant.timeout : resetTimeoutContext;
335 
336         // by making the mailbox size follow the number of workers the overall
337         // behavior will slow down if saving to the database is too slow. This
338         // avoids excessive or even fatal memory usage.
339         setMaxMailboxSize(thisTid, poolSize + 2, OnCrowding.block);
340 
341         fastDbOn();
342 
343         auto trans = db.transaction;
344 
345         // TODO: only remove those files that are modified.
346         logger.info("Removing metadata");
347         db.clearMetadata;
348 
349         recv();
350 
351         // TODO: print what files has been updated.
352         logger.info("Resetting timeout context");
353         resetTimeoutContext(*db);
354 
355         logger.info("Updating metadata");
356         db.updateMetadata;
357 
358         if (prune) {
359             pruneFiles();
360             {
361                 auto profile = Profile("remove orphaned mutants");
362                 logger.info("Removing orphaned mutants");
363                 db.removeOrphanedMutants;
364             }
365             {
366                 auto profile = Profile("prune schematas");
367                 logger.info("Prune schematas");
368                 db.pruneSchemas;
369             }
370         }
371 
372         logger.info("Updating manually marked mutants");
373         updateMarkedMutants(*db);
374         printLostMarkings(db.getLostMarkings);
375 
376         logger.info("Committing changes");
377         trans.commit;
378         logger.info("Ok".color(Color.green));
379 
380         fastDbOff();
381     } catch (Exception e) {
382         logger.error(e.msg).collectException;
383         logger.error("Failed to save the result of the analyze to the database").collectException;
384     }
385 
386     try {
387         send(ownerTid, StoreDoneMsg.init);
388     } catch (Exception e) {
389         logger.errorf("Fatal error. Unable to send %s to the main thread",
390                 StoreDoneMsg.init).collectException;
391     }
392 }
393 
394 /// Analyze a file for mutants.
395 struct Analyze {
396     import std.regex : Regex, regex, matchFirst;
397     import std.typecons : Yes;
398     import cpptooling.analyzer.clang.context : ClangContext;
399     import cpptooling.utility.virtualfilesystem;
400     import dextool.type : Exists, makeExists;
401 
402     static struct Config {
403         bool forceSystemIncludes;
404         long mutantsPerSchema;
405     }
406 
407     private {
408         static immutable raw_re_nomut = `^((//)|(/\*))\s*NOMUT\s*(\((?P<tag>.*)\))?\s*((?P<comment>.*)\*/|(?P<comment>.*))?`;
409 
410         Regex!char re_nomut;
411 
412         ValidateLoc val_loc;
413         FilesysIO fio;
414         bool forceSystemIncludes;
415 
416         Cache cache;
417 
418         Result result;
419 
420         Config conf;
421     }
422 
423     this(ValidateLoc val_loc, FilesysIO fio, Config conf) @trusted {
424         this.val_loc = val_loc;
425         this.fio = fio;
426         this.cache = new Cache;
427         this.re_nomut = regex(raw_re_nomut);
428         this.forceSystemIncludes = forceSystemIncludes;
429         this.result = new Result;
430         this.conf = conf;
431     }
432 
433     void process(ParsedCompileCommand in_file) @safe {
434         in_file.flags.forceSystemIncludes = conf.forceSystemIncludes;
435 
436         // find the file and flags to analyze
437         Exists!AbsolutePath checked_in_file;
438         try {
439             checked_in_file = makeExists(in_file.cmd.absoluteFile);
440         } catch (Exception e) {
441             logger.warning(e.msg);
442             return;
443         }
444 
445         try {
446             () @trusted {
447                 auto ctx = ClangContext(Yes.useInternalHeaders, Yes.prependParamSyntaxOnly);
448                 auto tstream = new TokenStreamImpl(ctx);
449 
450                 analyzeForMutants(in_file, checked_in_file, ctx, tstream);
451                 // TODO: filter files so they are only analyzed once for comments
452                 foreach (f; result.fileId.byValue)
453                     analyzeForComments(f, tstream);
454             }();
455         } catch (Exception e) {
456             () @trusted { logger.trace(e); }();
457             logger.info(e.msg);
458             logger.error("failed analyze of ", in_file.cmd.absoluteFile).collectException;
459         }
460     }
461 
462     void analyzeForMutants(ParsedCompileCommand in_file,
463             Exists!AbsolutePath checked_in_file, ref ClangContext ctx, TokenStream tstream) @safe {
464         import dextool.plugin.mutate.backend.analyze.pass_clang;
465         import dextool.plugin.mutate.backend.analyze.pass_filter;
466         import dextool.plugin.mutate.backend.analyze.pass_mutant;
467         import dextool.plugin.mutate.backend.analyze.pass_schemata;
468         import cpptooling.analyzer.clang.check_parse_result : hasParseErrors, logDiagnostic;
469 
470         logger.infof("Analyzing %s", checked_in_file);
471         auto tu = ctx.makeTranslationUnit(checked_in_file, in_file.flags.completeFlags);
472         if (tu.hasParseErrors) {
473             logDiagnostic(tu);
474             logger.errorf("Compile error in %s. Skipping", checked_in_file);
475             return;
476         }
477 
478         auto ast = toMutateAst(tu.cursor, fio);
479         debug logger.trace(ast);
480 
481         auto codeMutants = () {
482             auto mutants = toMutants(ast, fio, val_loc);
483             debug logger.trace(mutants);
484 
485             debug logger.trace("filter mutants");
486             mutants = filterMutants(fio, mutants);
487             debug logger.trace(mutants);
488 
489             return toCodeMutants(mutants, fio, tstream);
490         }();
491         debug logger.trace(codeMutants);
492 
493         {
494             auto schemas = toSchemata(ast, fio, codeMutants, conf.mutantsPerSchema);
495             ast.release;
496 
497             debug logger.trace(schemas);
498             foreach (f; schemas.getSchematas.filter!(a => !(a.fragments.empty || a.mutants.empty))) {
499                 const id = result.schematas.length;
500                 result.schematas ~= f.fragments;
501                 result.schemataMutants[id] = f.mutants.map!(a => a.id).array;
502                 result.schemataChecksum[id] = f.checksum;
503             }
504         }
505 
506         result.mutationPoints = codeMutants.points.byKeyValue.map!(
507                 a => a.value.map!(b => MutationPointEntry2(fio.toRelativeRoot(a.key),
508                 b.offset, b.sloc.begin, b.sloc.end, b.mutants))).joiner.array;
509         foreach (f; codeMutants.points.byKey) {
510             const id = result.idFile.length;
511             result.idFile[f] = id;
512             result.fileId[id] = f;
513             result.infoId[id] = Result.FileInfo(codeMutants.csFiles[f], codeMutants.lang);
514         }
515     }
516 
517     /** Tokens are always from the same file.
518      *
519      * TODO: move this to pass_clang.
520      */
521     void analyzeForComments(AbsolutePath file, TokenStream tstream) @trusted {
522         import std.algorithm : filter;
523         import clang.c.Index : CXTokenKind;
524         import dextool.plugin.mutate.backend.database : LineMetadata, FileId, LineAttr, NoMut;
525 
526         const fid = result.idFile.require(file, result.fileId.length).FileId;
527 
528         auto mdata = appender!(LineMetadata[])();
529         foreach (t; cache.getTokens(AbsolutePath(file), tstream)
530                 .filter!(a => a.kind == CXTokenKind.comment)) {
531             auto m = matchFirst(t.spelling, re_nomut);
532             if (m.whichPattern == 0)
533                 continue;
534 
535             mdata.put(LineMetadata(fid, t.loc.line, LineAttr(NoMut(m["tag"], m["comment"]))));
536             logger.tracef("NOMUT found at %s:%s:%s", file, t.loc.line, t.loc.column);
537         }
538 
539         result.metadata ~= mdata.data;
540     }
541 
542     static class Result {
543         import dextool.plugin.mutate.backend.type : Language, CodeChecksum, SchemataChecksum;
544         import dextool.plugin.mutate.backend.database.type : SchemataFragment;
545 
546         MutationPointEntry2[] mutationPoints;
547 
548         static struct FileInfo {
549             Checksum checksum;
550             Language language;
551         }
552 
553         /// The key is the ID from idFile.
554         FileInfo[ulong] infoId;
555 
556         /// The IDs is unique for *this* analyze, not globally.
557         long[AbsolutePath] idFile;
558         AbsolutePath[long] fileId;
559 
560         // The FileID used in the metadata is local to this analysis. It has to
561         // be remapped when added to the database.
562         LineMetadata[] metadata;
563 
564         /// Mutant schematas that has been generated.
565         SchemataFragment[][] schematas;
566         /// the mutants that are associated with a schemata.
567         CodeChecksum[][long] schemataMutants;
568         /// checksum for the schemata
569         SchemataChecksum[long] schemataChecksum;
570     }
571 }
572 
573 @(
574         "shall extract the tag and comment from the input following the pattern NOMUT with optional tag and comment")
575 unittest {
576     import std.regex : regex, matchFirst;
577     import unit_threaded.runner.io : writelnUt;
578 
579     auto re_nomut = regex(Analyze.raw_re_nomut);
580     // NOMUT in other type of comments should NOT match.
581     matchFirst("/// NOMUT", re_nomut).whichPattern.shouldEqual(0);
582     matchFirst("// stuff with NOMUT in it", re_nomut).whichPattern.shouldEqual(0);
583     matchFirst("/** NOMUT*/", re_nomut).whichPattern.shouldEqual(0);
584     matchFirst("/* stuff with NOMUT in it */", re_nomut).whichPattern.shouldEqual(0);
585 
586     matchFirst("/*NOMUT*/", re_nomut).whichPattern.shouldEqual(1);
587     matchFirst("/*NOMUT*/", re_nomut)["comment"].shouldEqual("");
588     matchFirst("//NOMUT", re_nomut).whichPattern.shouldEqual(1);
589     matchFirst("// NOMUT", re_nomut).whichPattern.shouldEqual(1);
590     matchFirst("// NOMUT (arch)", re_nomut)["tag"].shouldEqual("arch");
591     matchFirst("// NOMUT smurf", re_nomut)["comment"].shouldEqual("smurf");
592     auto m = matchFirst("// NOMUT (arch) smurf", re_nomut);
593     m["tag"].shouldEqual("arch");
594     m["comment"].shouldEqual("smurf");
595 }
596 
597 /// Stream of tokens excluding comment tokens.
598 class TokenStreamImpl : TokenStream {
599     import cpptooling.analyzer.clang.context : ClangContext;
600     import dextool.plugin.mutate.backend.type : Token;
601     import dextool.plugin.mutate.backend.utility : tokenize;
602 
603     ClangContext* ctx;
604 
605     /// The context must outlive any instance of this class.
606     // TODO remove @trusted when upgrading to dmd-fe 2.091.0+ and activate dip25 + 1000
607     this(ref ClangContext ctx) @trusted {
608         this.ctx = &ctx;
609     }
610 
611     Token[] getTokens(Path p) {
612         return tokenize(*ctx, p);
613     }
614 
615     Token[] getFilteredTokens(Path p) {
616         import clang.c.Index : CXTokenKind;
617 
618         // Filter a stream of tokens for those that should affect the checksum.
619         return tokenize(*ctx, p).filter!(a => a.kind != CXTokenKind.comment).array;
620     }
621 }
622 
623 /// Returns: true if `f` is inside any `roots`.
624 bool isPathInsideAnyRoot(AbsolutePath[] roots, AbsolutePath f) @safe {
625     import dextool.utility : isPathInsideRoot;
626 
627     foreach (root; roots) {
628         if (isPathInsideRoot(root, f))
629             return true;
630     }
631 
632     return false;
633 }
634 
635 /** Update the connection between the marked mutants and their mutation status
636  * id and mutation id.
637  */
638 void updateMarkedMutants(ref Database db) {
639     import dextool.plugin.mutate.backend.database.type : MutationStatusId;
640 
641     void update(MarkedMutant m) {
642         const stId = db.getMutationStatusId(m.statusChecksum);
643         if (stId.isNull)
644             return;
645         const mutId = db.getMutationId(stId.get);
646         if (mutId.isNull)
647             return;
648         db.removeMarkedMutant(m.statusChecksum);
649         db.markMutant(mutId.get, m.path, m.sloc, stId.get, m.statusChecksum,
650                 m.toStatus, m.rationale, m.mutText);
651         db.updateMutationStatus(stId.get, m.toStatus);
652     }
653 
654     // find those marked mutants that have a checksum that is different from
655     // the mutation status the marked mutant is related to. If possible change
656     // the relation to the correct mutation status id.
657     foreach (m; db.getMarkedMutants
658             .map!(a => tuple(a, db.getChecksum(a.statusId)))
659             .filter!(a => !a[1].isNull)
660             .filter!(a => a[0].statusChecksum != a[1].get)) {
661         update(m[0]);
662     }
663 }
664 
665 /// Prints a marked mutant that has become lost due to rerun of analyze
666 void printLostMarkings(MarkedMutant[] lostMutants) {
667     import std.algorithm : sort;
668     import std.array : empty;
669     import std.conv : to;
670     import std.stdio : writeln;
671 
672     if (lostMutants.empty)
673         return;
674 
675     Table!6 tbl = Table!6([
676             "ID", "File", "Line", "Column", "Status", "Rationale"
677             ]);
678     foreach (m; lostMutants) {
679         typeof(tbl).Row r = [
680             m.mutationId.to!string, m.path, m.sloc.line.to!string,
681             m.sloc.column.to!string, m.toStatus.to!string, m.rationale
682         ];
683         tbl.put(r);
684     }
685     logger.warning("Marked mutants was lost");
686     writeln(tbl);
687 }
688 
689 @("shall only let files in the diff through")
690 unittest {
691     import std.string : lineSplitter;
692     import dextool.plugin.mutate.backend.diff_parser;
693 
694     immutable lines = `diff --git a/standalone2.d b/standalone2.d
695 index 0123..2345 100644
696 --- a/standalone.d
697 +++ b/standalone2.d
698 @@ -31,7 +31,6 @@ import std.algorithm : map;
699  import std.array : Appender, appender, array;
700  import std.datetime : SysTime;
701 +import std.format : format;
702 -import std.typecons : Tuple;
703 
704  import d2sqlite3 : sqlDatabase = Database;
705 
706 @@ -46,7 +45,7 @@ import dextool.plugin.mutate.backend.type : Language;
707  struct Database {
708      import std.conv : to;
709      import std.exception : collectException;
710 -    import std.typecons : Nullable;
711 +    import std.typecons : Nullable, Flag, No;
712      import dextool.plugin.mutate.backend.type : MutationPoint, Mutation, Checksum;
713 
714 +    sqlDatabase db;`;
715 
716     UnifiedDiffParser p;
717     foreach (line; lines.lineSplitter)
718         p.process(line);
719     auto diff = p.result;
720 
721     auto files = FileFilter(".".Path.AbsolutePath, true, diff);
722 
723     files.shouldAnalyze("standalone.d".Path.AbsolutePath).shouldBeFalse;
724     files.shouldAnalyze("standalone2.d".Path.AbsolutePath).shouldBeTrue;
725 }