@@ -336,8 +336,7 @@ fn build_clippy() {
336
336
}
337
337
338
338
/// Read a `toml` file and return a list of `CrateSources` that we want to check with clippy
339
- fn read_crates ( clap_toml_path : Option < & str > ) -> ( String , Vec < CrateSource > ) {
340
- let toml_path = lintcheck_config_toml ( clap_toml_path) ;
339
+ fn read_crates ( toml_path : & PathBuf ) -> ( String , Vec < CrateSource > ) {
341
340
// save it so that we can use the name of the sources.toml as name for the logfile later.
342
341
let toml_filename = toml_path. file_stem ( ) . unwrap ( ) . to_str ( ) . unwrap ( ) . to_string ( ) ;
343
342
let toml_content: String =
@@ -428,7 +427,7 @@ fn parse_json_message(json_message: &str, krate: &Crate) -> ClippyWarning {
428
427
}
429
428
430
429
/// Generate a short list of occuring lints-types and their count
431
- fn gather_stats ( clippy_warnings : & [ ClippyWarning ] ) -> String {
430
+ fn gather_stats ( clippy_warnings : & [ ClippyWarning ] ) -> ( String , HashMap < & String , usize > ) {
432
431
// count lint type occurrences
433
432
let mut counter: HashMap < & String , usize > = HashMap :: new ( ) ;
434
433
clippy_warnings
@@ -441,15 +440,17 @@ fn gather_stats(clippy_warnings: &[ClippyWarning]) -> String {
441
440
// to not have a lint with 200 and 2 warnings take the same spot
442
441
stats. sort_by_key ( |( lint, count) | format ! ( "{:0>4}, {}" , count, lint) ) ;
443
442
444
- stats
443
+ let stats_string = stats
445
444
. iter ( )
446
445
. map ( |( lint, count) | format ! ( "{} {}\n " , lint, count) )
447
- . collect :: < String > ( )
446
+ . collect :: < String > ( ) ;
447
+
448
+ ( stats_string, counter)
448
449
}
449
450
450
451
/// check if the latest modification of the logfile is older than the modification date of the
451
452
/// clippy binary, if this is true, we should clean the lintchec shared target directory and recheck
452
- fn lintcheck_needs_rerun ( toml_path : Option < & str > ) -> bool {
453
+ fn lintcheck_needs_rerun ( toml_path : & PathBuf ) -> bool {
453
454
let clippy_modified: std:: time:: SystemTime = {
454
455
let mut times = [ "target/debug/clippy-driver" , "target/debug/cargo-clippy" ]
455
456
. iter ( )
@@ -459,17 +460,18 @@ fn lintcheck_needs_rerun(toml_path: Option<&str>) -> bool {
459
460
. modified ( )
460
461
. expect ( "failed to get modification date" )
461
462
} ) ;
462
- // the lates modification of either of the binaries
463
- std:: cmp:: max ( times. next ( ) . unwrap ( ) , times. next ( ) . unwrap ( ) )
463
+ // the oldest modification of either of the binaries
464
+ std:: cmp:: min ( times. next ( ) . unwrap ( ) , times. next ( ) . unwrap ( ) )
464
465
} ;
465
466
466
- let logs_modified: std:: time:: SystemTime = std:: fs:: metadata ( lintcheck_config_toml ( toml_path) )
467
+ let logs_modified: std:: time:: SystemTime = std:: fs:: metadata ( toml_path)
467
468
. expect ( "failed to get metadata of file" )
468
469
. modified ( )
469
470
. expect ( "failed to get modification date" ) ;
470
471
471
- // if clippys modification time is bigger (older) than the logs mod time, we need to rerun lintcheck
472
- clippy_modified > logs_modified
472
+ // if clippys modification time is smaller (older) than the logs mod time, we need to rerun
473
+ // lintcheck
474
+ dbg ! ( clippy_modified < logs_modified)
473
475
}
474
476
475
477
/// lintchecks `main()` function
@@ -479,11 +481,11 @@ pub fn run(clap_config: &ArgMatches) {
479
481
println ! ( "Done compiling" ) ;
480
482
481
483
let clap_toml_path: Option < & str > = clap_config. value_of ( "crates-toml" ) ;
482
- let toml_path = lintcheck_config_toml ( clap_toml_path) ;
484
+ let toml_path: PathBuf = lintcheck_config_toml ( clap_toml_path) ;
483
485
484
486
// if the clippy bin is newer than our logs, throw away target dirs to force clippy to
485
487
// refresh the logs
486
- if lintcheck_needs_rerun ( clap_toml_path ) {
488
+ if dbg ! ( lintcheck_needs_rerun( & toml_path ) ) {
487
489
let shared_target_dir = "target/lintcheck/shared_target_dir" ;
488
490
match std:: fs:: metadata ( & shared_target_dir) {
489
491
Ok ( metadata) => {
@@ -518,7 +520,9 @@ pub fn run(clap_config: &ArgMatches) {
518
520
// download and extract the crates, then run clippy on them and collect clippys warnings
519
521
// flatten into one big list of warnings
520
522
521
- let ( filename, crates) = read_crates ( clap_toml_path) ;
523
+ let ( filename, crates) = read_crates ( & toml_path) ;
524
+ let file = format ! ( "lintcheck-logs/{}_logs.txt" , filename) ;
525
+ let old_stats = read_stats_from_file ( & file) ;
522
526
523
527
let clippy_warnings: Vec < ClippyWarning > = if let Some ( only_one_crate) = clap_config. value_of ( "only" ) {
524
528
// if we don't have the specified crate in the .toml, throw an error
@@ -587,7 +591,7 @@ pub fn run(clap_config: &ArgMatches) {
587
591
} ;
588
592
589
593
// generate some stats
590
- let stats_formatted = gather_stats ( & clippy_warnings) ;
594
+ let ( stats_formatted, new_stats ) = gather_stats ( & clippy_warnings) ;
591
595
592
596
// grab crashes/ICEs, save the crate name and the ice message
593
597
let ices: Vec < ( & String , & String ) > = clippy_warnings
@@ -598,7 +602,7 @@ pub fn run(clap_config: &ArgMatches) {
598
602
599
603
let mut all_msgs: Vec < String > = clippy_warnings. iter ( ) . map ( |warning| warning. to_string ( ) ) . collect ( ) ;
600
604
all_msgs. sort ( ) ;
601
- all_msgs. push ( "\n \n \n \n Stats\n \n " . into ( ) ) ;
605
+ all_msgs. push ( "\n \n \n \n Stats: \n " . into ( ) ) ;
602
606
all_msgs. push ( stats_formatted) ;
603
607
604
608
// save the text into lintcheck-logs/logs.txt
@@ -608,7 +612,85 @@ pub fn run(clap_config: &ArgMatches) {
608
612
ices. iter ( )
609
613
. for_each ( |( cratename, msg) | text. push_str ( & format ! ( "{}: '{}'" , cratename, msg) ) ) ;
610
614
611
- let file = format ! ( "lintcheck-logs/{}_logs.txt" , filename) ;
612
615
println ! ( "Writing logs to {}" , file) ;
613
- write ( file, text) . unwrap ( ) ;
616
+ write ( & file, text) . unwrap ( ) ;
617
+
618
+ print_stats ( old_stats, new_stats) ;
619
+ }
620
+
621
+ /// read the previous stats from the lintcheck-log file
622
+ fn read_stats_from_file ( file_path : & String ) -> HashMap < String , usize > {
623
+ let file_path = PathBuf :: from ( file_path) ;
624
+ dbg ! ( & file_path) ;
625
+ let file_content: String = match std:: fs:: read_to_string ( file_path) . ok ( ) {
626
+ Some ( content) => content,
627
+ None => {
628
+ eprintln ! ( "RETURND" ) ;
629
+ return HashMap :: new ( ) ;
630
+ } ,
631
+ } ;
632
+
633
+ let lines: Vec < String > = file_content. lines ( ) . map ( |l| l. to_string ( ) ) . collect ( ) ;
634
+
635
+ // search for the beginning "Stats:" and the end "ICEs:" of the section we want
636
+ let start = lines. iter ( ) . position ( |line| line == "Stats:" ) . unwrap ( ) ;
637
+ let end = lines. iter ( ) . position ( |line| line == "ICEs:" ) . unwrap ( ) ;
638
+
639
+ let stats_lines = & lines[ start + 1 ..=end - 1 ] ;
640
+
641
+ stats_lines
642
+ . into_iter ( )
643
+ . map ( |line| {
644
+ let mut spl = line. split ( " " ) . into_iter ( ) ;
645
+ (
646
+ spl. next ( ) . unwrap ( ) . to_string ( ) ,
647
+ spl. next ( ) . unwrap ( ) . parse :: < usize > ( ) . unwrap ( ) ,
648
+ )
649
+ } )
650
+ . collect :: < HashMap < String , usize > > ( )
651
+ }
652
+
653
+ /// print how lint counts changed between runs
654
+ fn print_stats ( old_stats : HashMap < String , usize > , new_stats : HashMap < & String , usize > ) {
655
+ let same_in_both_hashmaps = old_stats
656
+ . iter ( )
657
+ . filter ( |( old_key, old_val) | new_stats. get :: < & String > ( & old_key) == Some ( old_val) )
658
+ . map ( |( k, v) | ( k. to_string ( ) , * v) )
659
+ . collect :: < Vec < ( String , usize ) > > ( ) ;
660
+
661
+ let mut old_stats_deduped = old_stats;
662
+ let mut new_stats_deduped = new_stats;
663
+
664
+ // remove duplicates from both hashmaps
665
+ same_in_both_hashmaps. iter ( ) . for_each ( |( k, v) | {
666
+ assert ! ( old_stats_deduped. remove( k) == Some ( * v) ) ;
667
+ assert ! ( new_stats_deduped. remove( k) == Some ( * v) ) ;
668
+ } ) ;
669
+
670
+ println ! ( "\n Stats:" ) ;
671
+
672
+ // list all new counts (key is in new stats but not in old stats)
673
+ new_stats_deduped
674
+ . iter ( )
675
+ . filter ( |( new_key, _) | old_stats_deduped. get :: < str > ( & new_key) . is_none ( ) )
676
+ . for_each ( |( new_key, new_value) | {
677
+ println ! ( "{} 0 => {}" , new_key, new_value) ;
678
+ } ) ;
679
+
680
+ // list all changed counts (key is in both maps but value differs)
681
+ new_stats_deduped
682
+ . iter ( )
683
+ . filter ( |( new_key, _new_val) | old_stats_deduped. get :: < str > ( & new_key) . is_some ( ) )
684
+ . for_each ( |( new_key, new_val) | {
685
+ let old_val = old_stats_deduped. get :: < str > ( & new_key) . unwrap ( ) ;
686
+ println ! ( "{} {} => {}" , new_key, old_val, new_val) ;
687
+ } ) ;
688
+
689
+ // list all gone counts (key is in old status but not in new stats)
690
+ old_stats_deduped
691
+ . iter ( )
692
+ . filter ( |( old_key, _) | new_stats_deduped. get :: < & String > ( & old_key) . is_none ( ) )
693
+ . for_each ( |( old_key, old_value) | {
694
+ println ! ( "{} {} => 0" , old_key, old_value) ;
695
+ } ) ;
614
696
}
0 commit comments