@@ -11,9 +11,11 @@ use crate::clippy_project_root;
1111
1212use std:: collections:: HashMap ;
1313use std:: process:: Command ;
14+ use std:: sync:: atomic:: { AtomicUsize , Ordering } ;
1415use std:: { env, fmt, fs:: write, path:: PathBuf } ;
1516
1617use clap:: ArgMatches ;
18+ use rayon:: prelude:: * ;
1719use serde:: { Deserialize , Serialize } ;
1820use serde_json:: Value ;
1921
@@ -37,7 +39,7 @@ struct TomlCrate {
3739
3840/// Represents an archive we download from crates.io, or a git repo, or a local repo/folder
3941/// Once processed (downloaded/extracted/cloned/copied...), this will be translated into a `Crate`
40- #[ derive( Debug , Serialize , Deserialize , Eq , Hash , PartialEq ) ]
42+ #[ derive( Debug , Serialize , Deserialize , Eq , Hash , PartialEq , Ord , PartialOrd ) ]
4143enum CrateSource {
4244 CratesIo {
4345 name : String ,
@@ -215,11 +217,34 @@ impl CrateSource {
215217impl Crate {
216218 /// Run `cargo clippy` on the `Crate` and collect and return all the lint warnings that clippy
217219 /// issued
218- fn run_clippy_lints ( & self , cargo_clippy_path : & PathBuf ) -> Vec < ClippyWarning > {
219- println ! ( "Linting {} {}..." , & self . name, & self . version) ;
220+ fn run_clippy_lints (
221+ & self ,
222+ cargo_clippy_path : & PathBuf ,
223+ target_dir_index : & AtomicUsize ,
224+ thread_limit : usize ,
225+ total_crates_to_lint : usize ,
226+ ) -> Vec < ClippyWarning > {
227+ // advance the atomic index by one
228+ let index = target_dir_index. fetch_add ( 1 , Ordering :: SeqCst ) ;
229+ // "loop" the index within 0..thread_limit
230+ let target_dir_index = index % thread_limit;
231+ let perc = ( ( index * 100 ) as f32 / total_crates_to_lint as f32 ) as u8 ;
232+
233+ if thread_limit == 1 {
234+ println ! (
235+ "{}/{} {}% Linting {} {}" ,
236+ index, total_crates_to_lint, perc, & self . name, & self . version
237+ ) ;
238+ } else {
239+ println ! (
240+ "{}/{} {}% Linting {} {} in target dir {:?}" ,
241+ index, total_crates_to_lint, perc, & self . name, & self . version, target_dir_index
242+ ) ;
243+ }
244+
220245 let cargo_clippy_path = std:: fs:: canonicalize ( cargo_clippy_path) . unwrap ( ) ;
221246
222- let shared_target_dir = clippy_project_root ( ) . join ( "target/lintcheck/shared_target_dir/ " ) ;
247+ let shared_target_dir = clippy_project_root ( ) . join ( "target/lintcheck/shared_target_dir" ) ;
223248
224249 let mut args = vec ! [ "--" , "--message-format=json" , "--" , "--cap-lints=warn" ] ;
225250
@@ -232,7 +257,11 @@ impl Crate {
232257 }
233258
234259 let all_output = std:: process:: Command :: new ( & cargo_clippy_path)
235- . env ( "CARGO_TARGET_DIR" , shared_target_dir)
260+ // use the looping index to create individual target dirs
261+ . env (
262+ "CARGO_TARGET_DIR" ,
263+ shared_target_dir. join ( format ! ( "_{:?}" , target_dir_index) ) ,
264+ )
236265 // lint warnings will look like this:
237266 // src/cargo/ops/cargo_compile.rs:127:35: warning: usage of `FromIterator::from_iter`
238267 . args ( & args)
@@ -283,13 +312,13 @@ fn filter_clippy_warnings(line: &str) -> bool {
283312
284313/// Builds clippy inside the repo to make sure we have a clippy executable we can use.
285314fn build_clippy ( ) {
286- let output = Command :: new ( "cargo" )
315+ let status = Command :: new ( "cargo" )
287316 . arg ( "build" )
288- . output ( )
317+ . status ( )
289318 . expect ( "Failed to build clippy!" ) ;
290- if !output . status . success ( ) {
291- eprintln ! ( "Failed to compile Clippy" ) ;
292- eprintln ! ( "stderr: {}" , String :: from_utf8_lossy ( & output . stderr ) )
319+ if !status. success ( ) {
320+ eprintln ! ( "Error: Failed to compile Clippy! " ) ;
321+ std :: process :: exit ( 1 ) ;
293322 }
294323}
295324
@@ -356,6 +385,9 @@ fn read_crates(toml_path: Option<&str>) -> (String, Vec<CrateSource>) {
356385 unreachable ! ( "Failed to translate TomlCrate into CrateSource!" ) ;
357386 }
358387 } ) ;
388+ // sort the crates
389+ crate_sources. sort ( ) ;
390+
359391 ( toml_filename, crate_sources)
360392}
361393
@@ -454,15 +486,46 @@ pub fn run(clap_config: &ArgMatches) {
454486 . into_iter ( )
455487 . map ( |krate| krate. download_and_extract ( ) )
456488 . filter ( |krate| krate. name == only_one_crate)
457- . map ( |krate| krate. run_clippy_lints ( & cargo_clippy_path) )
489+ . map ( |krate| krate. run_clippy_lints ( & cargo_clippy_path, & AtomicUsize :: new ( 0 ) , 1 , 1 ) )
458490 . flatten ( )
459491 . collect ( )
460492 } else {
493+ let counter = std:: sync:: atomic:: AtomicUsize :: new ( 0 ) ;
494+
495+ // Ask rayon for thread count. Assume that half of that is the number of physical cores
496+ // Use one target dir for each core so that we can run N clippys in parallel.
497+ // We need to use different target dirs because cargo would lock them for a single build otherwise,
498+ // killing the parallelism. However this also means that deps will only be reused half/a
499+ // quarter of the time which might result in a longer wall clock runtime
500+
501+ // This helps when we check many small crates with dep-trees that don't have a lot of branches in
502+ // order to achive some kind of parallelism
503+
504+ // by default, use a single thread
505+ let num_cpus = match clap_config. value_of ( "threads" ) {
506+ Some ( threads) => {
507+ let threads: usize = threads
508+ . parse ( )
509+ . expect ( & format ! ( "Failed to parse '{}' to a digit" , threads) ) ;
510+ if threads == 0 {
511+ // automatic choice
512+ // Rayon seems to return thread count so half that for core count
513+ ( rayon:: current_num_threads ( ) / 2 ) as usize
514+ } else {
515+ threads
516+ }
517+ } ,
518+ // no -j passed, use a single thread
519+ None => 1 ,
520+ } ;
521+
522+ let num_crates = crates. len ( ) ;
523+
461524 // check all crates (default)
462525 crates
463- . into_iter ( )
526+ . into_par_iter ( )
464527 . map ( |krate| krate. download_and_extract ( ) )
465- . map ( |krate| krate. run_clippy_lints ( & cargo_clippy_path) )
528+ . map ( |krate| krate. run_clippy_lints ( & cargo_clippy_path, & counter , num_cpus , num_crates ) )
466529 . flatten ( )
467530 . collect ( )
468531 } ;
0 commit comments