@@ -336,8 +336,7 @@ fn build_clippy() {
336
336
}
337
337
338
338
/// Read a `toml` file and return a list of `CrateSources` that we want to check with clippy
339
- fn read_crates ( toml_path : Option < & str > ) -> ( String , Vec < CrateSource > ) {
340
- let toml_path = lintcheck_config_toml ( toml_path) ;
339
+ fn read_crates ( toml_path : & PathBuf ) -> ( String , Vec < CrateSource > ) {
341
340
// save it so that we can use the name of the sources.toml as name for the logfile later.
342
341
let toml_filename = toml_path. file_stem ( ) . unwrap ( ) . to_str ( ) . unwrap ( ) . to_string ( ) ;
343
342
let toml_content: String =
@@ -428,7 +427,7 @@ fn parse_json_message(json_message: &str, krate: &Crate) -> ClippyWarning {
428
427
}
429
428
430
429
/// Generate a short list of occuring lints-types and their count
431
- fn gather_stats ( clippy_warnings : & [ ClippyWarning ] ) -> String {
430
+ fn gather_stats ( clippy_warnings : & [ ClippyWarning ] ) -> ( String , HashMap < & String , usize > ) {
432
431
// count lint type occurrences
433
432
let mut counter: HashMap < & String , usize > = HashMap :: new ( ) ;
434
433
clippy_warnings
@@ -441,15 +440,17 @@ fn gather_stats(clippy_warnings: &[ClippyWarning]) -> String {
441
440
// to not have a lint with 200 and 2 warnings take the same spot
442
441
stats. sort_by_key ( |( lint, count) | format ! ( "{:0>4}, {}" , count, lint) ) ;
443
442
444
- stats
443
+ let stats_string = stats
445
444
. iter ( )
446
445
. map ( |( lint, count) | format ! ( "{} {}\n " , lint, count) )
447
- . collect :: < String > ( )
446
+ . collect :: < String > ( ) ;
447
+
448
+ ( stats_string, counter)
448
449
}
449
450
450
451
/// check if the latest modification of the logfile is older than the modification date of the
451
452
/// clippy binary, if this is true, we should clean the lintchec shared target directory and recheck
452
- fn lintcheck_needs_rerun ( toml_path : Option < & str > ) -> bool {
453
+ fn lintcheck_needs_rerun ( toml_path : & PathBuf ) -> bool {
453
454
let clippy_modified: std:: time:: SystemTime = {
454
455
let mut times = [ "target/debug/clippy-driver" , "target/debug/cargo-clippy" ]
455
456
. iter ( )
@@ -459,17 +460,18 @@ fn lintcheck_needs_rerun(toml_path: Option<&str>) -> bool {
459
460
. modified ( )
460
461
. expect ( "failed to get modification date" )
461
462
} ) ;
462
- // the lates modification of either of the binaries
463
- std:: cmp:: max ( times. next ( ) . unwrap ( ) , times. next ( ) . unwrap ( ) )
463
+ // the oldest modification of either of the binaries
464
+ std:: cmp:: min ( times. next ( ) . unwrap ( ) , times. next ( ) . unwrap ( ) )
464
465
} ;
465
466
466
- let logs_modified: std:: time:: SystemTime = std:: fs:: metadata ( lintcheck_config_toml ( toml_path) )
467
+ let logs_modified: std:: time:: SystemTime = std:: fs:: metadata ( toml_path)
467
468
. expect ( "failed to get metadata of file" )
468
469
. modified ( )
469
470
. expect ( "failed to get modification date" ) ;
470
471
471
- // if clippys modification time is bigger (older) than the logs mod time, we need to rerun lintcheck
472
- clippy_modified > logs_modified
472
+ // if clippys modification time is smaller (older) than the logs mod time, we need to rerun
473
+ // lintcheck
474
+ clippy_modified < logs_modified
473
475
}
474
476
475
477
/// lintchecks `main()` function
@@ -478,11 +480,12 @@ pub fn run(clap_config: &ArgMatches) {
478
480
build_clippy ( ) ;
479
481
println ! ( "Done compiling" ) ;
480
482
481
- let clap_toml_path = clap_config. value_of ( "crates-toml" ) ;
483
+ let clap_toml_path: Option < & str > = clap_config. value_of ( "crates-toml" ) ;
484
+ let toml_path: PathBuf = lintcheck_config_toml ( clap_toml_path) ;
482
485
483
486
// if the clippy bin is newer than our logs, throw away target dirs to force clippy to
484
487
// refresh the logs
485
- if lintcheck_needs_rerun ( clap_toml_path ) {
488
+ if lintcheck_needs_rerun ( & toml_path ) {
486
489
let shared_target_dir = "target/lintcheck/shared_target_dir" ;
487
490
match std:: fs:: metadata ( & shared_target_dir) {
488
491
Ok ( metadata) => {
@@ -517,7 +520,9 @@ pub fn run(clap_config: &ArgMatches) {
517
520
// download and extract the crates, then run clippy on them and collect clippys warnings
518
521
// flatten into one big list of warnings
519
522
520
- let ( filename, crates) = read_crates ( clap_toml_path) ;
523
+ let ( filename, crates) = read_crates ( & toml_path) ;
524
+ let file = format ! ( "lintcheck-logs/{}_logs.txt" , filename) ;
525
+ let old_stats = read_stats_from_file ( & file) ;
521
526
522
527
let clippy_warnings: Vec < ClippyWarning > = if let Some ( only_one_crate) = clap_config. value_of ( "only" ) {
523
528
// if we don't have the specified crate in the .toml, throw an error
@@ -586,7 +591,7 @@ pub fn run(clap_config: &ArgMatches) {
586
591
} ;
587
592
588
593
// generate some stats
589
- let stats_formatted = gather_stats ( & clippy_warnings) ;
594
+ let ( stats_formatted, new_stats ) = gather_stats ( & clippy_warnings) ;
590
595
591
596
// grab crashes/ICEs, save the crate name and the ice message
592
597
let ices: Vec < ( & String , & String ) > = clippy_warnings
@@ -597,7 +602,7 @@ pub fn run(clap_config: &ArgMatches) {
597
602
598
603
let mut all_msgs: Vec < String > = clippy_warnings. iter ( ) . map ( |warning| warning. to_string ( ) ) . collect ( ) ;
599
604
all_msgs. sort ( ) ;
600
- all_msgs. push ( "\n \n \n \n Stats\n \n " . into ( ) ) ;
605
+ all_msgs. push ( "\n \n \n \n Stats: \n " . into ( ) ) ;
601
606
all_msgs. push ( stats_formatted) ;
602
607
603
608
// save the text into lintcheck-logs/logs.txt
@@ -607,7 +612,84 @@ pub fn run(clap_config: &ArgMatches) {
607
612
ices. iter ( )
608
613
. for_each ( |( cratename, msg) | text. push_str ( & format ! ( "{}: '{}'" , cratename, msg) ) ) ;
609
614
610
- let file = format ! ( "lintcheck-logs/{}_logs.txt" , filename) ;
611
615
println ! ( "Writing logs to {}" , file) ;
612
- write ( file, text) . unwrap ( ) ;
616
+ write ( & file, text) . unwrap ( ) ;
617
+
618
+ print_stats ( old_stats, new_stats) ;
619
+ }
620
+
621
+ /// read the previous stats from the lintcheck-log file
622
+ fn read_stats_from_file ( file_path : & String ) -> HashMap < String , usize > {
623
+ let file_path = PathBuf :: from ( file_path) ;
624
+ let file_content: String = match std:: fs:: read_to_string ( file_path) . ok ( ) {
625
+ Some ( content) => content,
626
+ None => {
627
+ eprintln ! ( "RETURND" ) ;
628
+ return HashMap :: new ( ) ;
629
+ } ,
630
+ } ;
631
+
632
+ let lines: Vec < String > = file_content. lines ( ) . map ( |l| l. to_string ( ) ) . collect ( ) ;
633
+
634
+ // search for the beginning "Stats:" and the end "ICEs:" of the section we want
635
+ let start = lines. iter ( ) . position ( |line| line == "Stats:" ) . unwrap ( ) ;
636
+ let end = lines. iter ( ) . position ( |line| line == "ICEs:" ) . unwrap ( ) ;
637
+
638
+ let stats_lines = & lines[ start + 1 ..=end - 1 ] ;
639
+
640
+ stats_lines
641
+ . into_iter ( )
642
+ . map ( |line| {
643
+ let mut spl = line. split ( " " ) . into_iter ( ) ;
644
+ (
645
+ spl. next ( ) . unwrap ( ) . to_string ( ) ,
646
+ spl. next ( ) . unwrap ( ) . parse :: < usize > ( ) . unwrap ( ) ,
647
+ )
648
+ } )
649
+ . collect :: < HashMap < String , usize > > ( )
650
+ }
651
+
652
+ /// print how lint counts changed between runs
653
+ fn print_stats ( old_stats : HashMap < String , usize > , new_stats : HashMap < & String , usize > ) {
654
+ let same_in_both_hashmaps = old_stats
655
+ . iter ( )
656
+ . filter ( |( old_key, old_val) | new_stats. get :: < & String > ( & old_key) == Some ( old_val) )
657
+ . map ( |( k, v) | ( k. to_string ( ) , * v) )
658
+ . collect :: < Vec < ( String , usize ) > > ( ) ;
659
+
660
+ let mut old_stats_deduped = old_stats;
661
+ let mut new_stats_deduped = new_stats;
662
+
663
+ // remove duplicates from both hashmaps
664
+ same_in_both_hashmaps. iter ( ) . for_each ( |( k, v) | {
665
+ assert ! ( old_stats_deduped. remove( k) == Some ( * v) ) ;
666
+ assert ! ( new_stats_deduped. remove( k) == Some ( * v) ) ;
667
+ } ) ;
668
+
669
+ println ! ( "\n Stats:" ) ;
670
+
671
+ // list all new counts (key is in new stats but not in old stats)
672
+ new_stats_deduped
673
+ . iter ( )
674
+ . filter ( |( new_key, _) | old_stats_deduped. get :: < str > ( & new_key) . is_none ( ) )
675
+ . for_each ( |( new_key, new_value) | {
676
+ println ! ( "{} 0 => {}" , new_key, new_value) ;
677
+ } ) ;
678
+
679
+ // list all changed counts (key is in both maps but value differs)
680
+ new_stats_deduped
681
+ . iter ( )
682
+ . filter ( |( new_key, _new_val) | old_stats_deduped. get :: < str > ( & new_key) . is_some ( ) )
683
+ . for_each ( |( new_key, new_val) | {
684
+ let old_val = old_stats_deduped. get :: < str > ( & new_key) . unwrap ( ) ;
685
+ println ! ( "{} {} => {}" , new_key, old_val, new_val) ;
686
+ } ) ;
687
+
688
+ // list all gone counts (key is in old status but not in new stats)
689
+ old_stats_deduped
690
+ . iter ( )
691
+ . filter ( |( old_key, _) | new_stats_deduped. get :: < & String > ( & old_key) . is_none ( ) )
692
+ . for_each ( |( old_key, old_value) | {
693
+ println ! ( "{} {} => 0" , old_key, old_value) ;
694
+ } ) ;
613
695
}
0 commit comments